repo_name
stringlengths
6
100
path
stringlengths
4
294
copies
stringlengths
1
5
size
stringlengths
4
6
content
stringlengths
606
896k
license
stringclasses
15 values
felixfontein/ansible
test/units/cli/galaxy/test_execute_list_collection.py
19
10617
# -*- coding: utf-8 -*- # Copyright (c) 2020 Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type import pytest from ansible import context from ansible.cli.galaxy import GalaxyCLI from ansible.errors import AnsibleError, AnsibleOptionsError from ansible.galaxy import collection from ansible.galaxy.dependency_resolution.dataclasses import Requirement from ansible.module_utils._text import to_native def path_exists(path): if to_native(path) == '/root/.ansible/collections/ansible_collections/sandwiches/ham': return False elif to_native(path) == '/usr/share/ansible/collections/ansible_collections/sandwiches/reuben': return False elif to_native(path) == 'nope': return False else: return True def isdir(path): if to_native(path) == 'nope': return False else: return True def cliargs(collections_paths=None, collection_name=None): if collections_paths is None: collections_paths = ['~/root/.ansible/collections', '/usr/share/ansible/collections'] context.CLIARGS._store = { 'collections_path': collections_paths, 'collection': collection_name, 'type': 'collection', 'output_format': 'human' } @pytest.fixture def mock_collection_objects(mocker): mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', '/usr/share/ansible/collections']) mocker.patch('ansible.cli.galaxy.validate_collection_path', side_effect=['/root/.ansible/collections/ansible_collections', '/usr/share/ansible/collections/ansible_collections']) collection_args_1 = ( ( 'sandwiches.pbj', '1.5.0', None, 'dir', ), ( 'sandwiches.reuben', '2.5.0', None, 'dir', ), ) collection_args_2 = ( ( 'sandwiches.pbj', '1.0.0', None, 'dir', ), ( 'sandwiches.ham', '1.0.0', None, 'dir', ), ) collections_path_1 = [Requirement(*cargs) for cargs in collection_args_1] collections_path_2 = [Requirement(*cargs) for cargs in collection_args_2] mocker.patch('ansible.cli.galaxy.find_existing_collections', side_effect=[collections_path_1, collections_path_2]) @pytest.fixture def mock_from_path(mocker): def _from_path(collection_name='pbj'): collection_args = { 'sandwiches.pbj': ( ( 'sandwiches.pbj', '1.5.0', None, 'dir', ), ( 'sandwiches.pbj', '1.0.0', None, 'dir', ), ), 'sandwiches.ham': ( ( 'sandwiches.ham', '1.0.0', None, 'dir', ), ), } from_path_objects = [Requirement(*args) for args in collection_args[collection_name]] mocker.patch('ansible.cli.galaxy.Requirement.from_dir_path_as_unknown', side_effect=from_path_objects) return _from_path def test_execute_list_collection_all(mocker, capsys, mock_collection_objects, tmp_path_factory): """Test listing all collections from multiple paths""" cliargs() mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', return_value=True) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list']) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) out, err = capsys.readouterr() out_lines = out.splitlines() assert len(out_lines) == 12 assert out_lines[0] == '' assert out_lines[1] == '# /root/.ansible/collections/ansible_collections' assert out_lines[2] == 'Collection Version' assert out_lines[3] == '----------------- -------' assert out_lines[4] == 'sandwiches.pbj 1.5.0 ' assert out_lines[5] == 'sandwiches.reuben 2.5.0 ' assert out_lines[6] == '' assert out_lines[7] == '# /usr/share/ansible/collections/ansible_collections' assert out_lines[8] == 'Collection Version' assert out_lines[9] == '-------------- -------' assert out_lines[10] == 'sandwiches.ham 1.0.0 ' assert out_lines[11] == 'sandwiches.pbj 1.0.0 ' def test_execute_list_collection_specific(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): """Test listing a specific collection""" collection_name = 'sandwiches.ham' mock_from_path(collection_name) cliargs(collection_name=collection_name) mocker.patch('os.path.exists', path_exists) mocker.patch('os.path.isdir', return_value=True) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) mocker.patch('ansible.cli.galaxy._get_collection_widths', return_value=(14, 5)) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name]) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) out, err = capsys.readouterr() out_lines = out.splitlines() assert len(out_lines) == 5 assert out_lines[0] == '' assert out_lines[1] == '# /usr/share/ansible/collections/ansible_collections' assert out_lines[2] == 'Collection Version' assert out_lines[3] == '-------------- -------' assert out_lines[4] == 'sandwiches.ham 1.0.0 ' def test_execute_list_collection_specific_duplicate(mocker, capsys, mock_collection_objects, mock_from_path, tmp_path_factory): """Test listing a specific collection that exists at multiple paths""" collection_name = 'sandwiches.pbj' mock_from_path(collection_name) cliargs(collection_name=collection_name) mocker.patch('os.path.exists', path_exists) mocker.patch('os.path.isdir', return_value=True) mocker.patch('ansible.galaxy.collection.validate_collection_name', collection_name) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name]) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) out, err = capsys.readouterr() out_lines = out.splitlines() assert len(out_lines) == 10 assert out_lines[0] == '' assert out_lines[1] == '# /root/.ansible/collections/ansible_collections' assert out_lines[2] == 'Collection Version' assert out_lines[3] == '-------------- -------' assert out_lines[4] == 'sandwiches.pbj 1.5.0 ' assert out_lines[5] == '' assert out_lines[6] == '# /usr/share/ansible/collections/ansible_collections' assert out_lines[7] == 'Collection Version' assert out_lines[8] == '-------------- -------' assert out_lines[9] == 'sandwiches.pbj 1.0.0 ' def test_execute_list_collection_specific_invalid_fqcn(mocker, tmp_path_factory): """Test an invalid fully qualified collection name (FQCN)""" collection_name = 'no.good.name' cliargs(collection_name=collection_name) mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', return_value=True) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', collection_name]) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) with pytest.raises(AnsibleError, match='Invalid collection name'): gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) def test_execute_list_collection_no_valid_paths(mocker, capsys, tmp_path_factory): """Test listing collections when no valid paths are given""" cliargs() mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', return_value=False) mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False) mocker.patch('ansible.cli.galaxy.display.columns', 79) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list']) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) with pytest.raises(AnsibleOptionsError, match=r'None of the provided paths were usable.'): gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) out, err = capsys.readouterr() assert '[WARNING]: - the configured path' in err assert 'exists, but it\nis not a directory.' in err def test_execute_list_collection_one_invalid_path(mocker, capsys, mock_collection_objects, tmp_path_factory): """Test listing all collections when one invalid path is given""" cliargs() mocker.patch('os.path.exists', return_value=True) mocker.patch('os.path.isdir', isdir) mocker.patch('ansible.cli.galaxy.GalaxyCLI._resolve_path', side_effect=['/root/.ansible/collections', 'nope']) mocker.patch('ansible.utils.color.ANSIBLE_COLOR', False) gc = GalaxyCLI(['ansible-galaxy', 'collection', 'list', '-p', 'nope']) tmp_path = tmp_path_factory.mktemp('test-ÅÑŚÌβŁÈ Collections') concrete_artifact_cm = collection.concrete_artifact_manager.ConcreteArtifactsManager(tmp_path, validate_certs=False) gc.execute_list_collection(artifacts_manager=concrete_artifact_cm) out, err = capsys.readouterr() out_lines = out.splitlines() assert out_lines[0] == '' assert out_lines[1] == '# /root/.ansible/collections/ansible_collections' assert out_lines[2] == 'Collection Version' assert out_lines[3] == '----------------- -------' assert out_lines[4] == 'sandwiches.pbj 1.5.0 ' # Only a partial test of the output assert err == '[WARNING]: - the configured path nope, exists, but it is not a directory.\n'
gpl-3.0
dementrock/nbgrader
nbgrader/tests/nbextensions/test_assignment_list.py
1
13208
import pytest import os from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC from selenium.common.exceptions import TimeoutException, NoSuchElementException from .. import run_python_module def _wait(browser): return WebDriverWait(browser, 30) def _load_assignments_list(browser, retries=5): # go to the correct page browser.get("http://localhost:9000/tree") def page_loaded(browser): return browser.execute_script( 'return typeof IPython !== "undefined" && IPython.page !== undefined;') # wait for the page to load try: _wait(browser).until(page_loaded) except TimeoutException: if retries > 0: print("Retrying page load...") # page timeout, but sometimes this happens, so try refreshing? _load_assignments_list(browser, retries=retries - 1) else: print("Failed to load the page too many times") raise # wait for the extension to load _wait(browser).until(EC.presence_of_element_located((By.CSS_SELECTOR, "#assignments"))) # switch to the assignments list element = browser.find_element_by_link_text("Assignments") element.click() # make sure released, downloaded, and submitted assignments are visible _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list"))) _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list"))) _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list"))) def _expand(browser, list_id, assignment): browser.find_element_by_link_text(assignment).click() rows = browser.find_elements_by_css_selector("{} .list_item".format(list_id)) for i in range(1, len(rows)): _wait(browser).until(lambda browser: browser.find_elements_by_css_selector("{} .list_item".format(list_id))[i].is_displayed()) return rows def _unexpand(browser, list_id, assignment): browser.find_element_by_link_text(assignment).click() rows = browser.find_elements_by_css_selector("{} .list_item".format(list_id)) for i in range(1, len(rows)): _wait(browser).until(lambda browser: not browser.find_elements_by_css_selector("{} .list_item".format(list_id))[i].is_displayed()) def _wait_for_modal(browser): _wait(browser).until(EC.presence_of_all_elements_located((By.CSS_SELECTOR, ".modal-dialog"))) def _dismiss_modal(browser): button = browser.find_element_by_css_selector(".modal-footer .btn-primary") button.click() def modal_gone(browser): try: browser.find_element_by_css_selector(".modal-dialog") except NoSuchElementException: return True return False _wait(browser).until(modal_gone) def _sort_rows(x): try: item_name = x.find_element_by_class_name("item_name").text except NoSuchElementException: item_name = "" return item_name @pytest.mark.js def test_show_assignments_list(browser, class_files): _load_assignments_list(browser) # make sure all the placeholders ar initially showing _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder"))) _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder"))) _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list_placeholder"))) # release an assignment run_python_module(["nbgrader", "assign", "Problem Set 1"]) run_python_module(["nbgrader", "release", "Problem Set 1", "--course", "abc101"]) # click the refresh button browser.find_element_by_css_selector("#refresh_assignments_list").click() # wait for the released assignments to update _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item") assert len(rows) == 1 assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1" assert rows[0].find_element_by_class_name("item_course").text == "abc101" @pytest.mark.js def test_multiple_released_assignments(browser, class_files): _load_assignments_list(browser) # release another assignment run_python_module(["nbgrader", "assign", "ps1"]) run_python_module(["nbgrader", "release", "ps1", "--course", "xyz 200"]) # click the refresh button browser.find_element_by_css_selector("#refresh_assignments_list").click() # wait for the released assignments to update _wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#released_assignments_list > .list_item")) == 2) rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item") rows.sort(key=_sort_rows) assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1" assert rows[0].find_element_by_class_name("item_course").text == "abc101" assert rows[1].find_element_by_class_name("item_name").text == "ps1" assert rows[1].find_element_by_class_name("item_course").text == "xyz 200" @pytest.mark.js def test_fetch_assignment(browser, class_files): _load_assignments_list(browser) # click the "fetch" button _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item") rows[1].find_element_by_css_selector(".item_status button").click() # wait for the downloaded assignments list to update _wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")) == 1) rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item") assert rows[0].find_element_by_class_name("item_name").text == "ps1" assert rows[0].find_element_by_class_name("item_course").text == "xyz 200" assert os.path.exists(os.path.join(class_files, "ps1")) # expand the assignment to show the notebooks rows = _expand(browser, "#xyz_200-ps1", "ps1") rows.sort(key=_sort_rows) assert len(rows) == 2 assert rows[1].find_element_by_class_name("item_name").text == "problem 1" # unexpand the assignment _unexpand(browser, "#xyz_200-ps1", "ps1") @pytest.mark.js def test_submit_assignment(browser, class_files): _load_assignments_list(browser) # submit it _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item") rows[0].find_element_by_css_selector(".item_status button").click() # wait for the submitted assignments list to update _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#submitted_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item") assert len(rows) == 1 assert rows[0].find_element_by_class_name("item_name").text == "ps1" assert rows[0].find_element_by_class_name("item_course").text == "xyz 200" # submit it again rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item") rows[0].find_element_by_css_selector(".item_status button").click() # wait for the submitted assignments list to update _wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")) == 2) rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item") rows.sort(key=_sort_rows) assert rows[0].find_element_by_class_name("item_name").text == "ps1" assert rows[0].find_element_by_class_name("item_course").text == "xyz 200" assert rows[1].find_element_by_class_name("item_name").text == "ps1" assert rows[1].find_element_by_class_name("item_course").text == "xyz 200" assert rows[0].find_element_by_class_name("item_status").text != rows[1].find_element_by_class_name("item_status").text @pytest.mark.js def test_fetch_second_assignment(browser, class_files): _load_assignments_list(browser) # click the "fetch" button _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#released_assignments_list > .list_item") rows[0].find_element_by_css_selector(".item_status button").click() # wait for the downloaded assignments list to update _wait(browser).until(EC.visibility_of_element_located((By.CSS_SELECTOR, "#released_assignments_list_placeholder"))) _wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item")) == 2) rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item") rows.sort(key=_sort_rows) assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1" assert rows[0].find_element_by_class_name("item_course").text == "abc101" assert rows[1].find_element_by_class_name("item_name").text == "ps1" assert rows[1].find_element_by_class_name("item_course").text == "xyz 200" assert os.path.exists(os.path.join(class_files, "Problem Set 1")) # expand the assignment to show the notebooks rows = _expand(browser, "#abc101-Problem_Set_1", "Problem Set 1") rows.sort(key=_sort_rows) assert len(rows) == 3 assert rows[1].find_element_by_class_name("item_name").text == "Problem 1" assert rows[2].find_element_by_class_name("item_name").text == "Problem 2" # unexpand the assignment _unexpand(browser, "abc101-Problem_Set_1", "Problem Set 1") @pytest.mark.js def test_submit_other_assignment(browser, class_files): _load_assignments_list(browser) # submit it _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder"))) rows = browser.find_elements_by_css_selector("#fetched_assignments_list > .list_item") rows[0].find_element_by_css_selector(".item_status button").click() # wait for the submitted assignments list to update _wait(browser).until(lambda browser: len(browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item")) == 3) rows = browser.find_elements_by_css_selector("#submitted_assignments_list > .list_item") rows.sort(key=_sort_rows) assert rows[0].find_element_by_class_name("item_name").text == "Problem Set 1" assert rows[0].find_element_by_class_name("item_course").text == "abc101" assert rows[1].find_element_by_class_name("item_name").text == "ps1" assert rows[1].find_element_by_class_name("item_course").text == "xyz 200" assert rows[2].find_element_by_class_name("item_name").text == "ps1" assert rows[2].find_element_by_class_name("item_course").text == "xyz 200" assert rows[0].find_element_by_class_name("item_status").text != rows[1].find_element_by_class_name("item_status").text assert rows[0].find_element_by_class_name("item_status").text != rows[2].find_element_by_class_name("item_status").text @pytest.mark.js def test_validate_ok(browser, class_files): _load_assignments_list(browser) # expand the assignment to show the notebooks _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder"))) rows = _expand(browser, "#xyz_200-ps1", "ps1") rows.sort(key=_sort_rows) assert len(rows) == 2 assert rows[1].find_element_by_class_name("item_name").text == "problem 1" # click the "validate" button rows[1].find_element_by_css_selector(".item_status button").click() # wait for the modal dialog to appear _wait_for_modal(browser) # check that it succeeded browser.find_element_by_css_selector(".modal-dialog .validation-success") # close the modal dialog _dismiss_modal(browser) @pytest.mark.js def test_validate_failure(browser, class_files): _load_assignments_list(browser) # expand the assignment to show the notebooks _wait(browser).until(EC.invisibility_of_element_located((By.CSS_SELECTOR, "#fetched_assignments_list_placeholder"))) rows = _expand(browser, "#abc101-Problem_Set_1", "Problem Set 1") rows.sort(key=_sort_rows) assert len(rows) == 3 assert rows[1].find_element_by_class_name("item_name").text == "Problem 1" assert rows[2].find_element_by_class_name("item_name").text == "Problem 2" # click the "validate" button rows[2].find_element_by_css_selector(".item_status button").click() # wait for the modal dialog to appear _wait_for_modal(browser) # check that it succeeded browser.find_element_by_css_selector(".modal-dialog .validation-failed") # close the modal dialog _dismiss_modal(browser)
bsd-3-clause
liangz0707/scikit-learn
sklearn/externals/joblib/disk.py
320
3280
""" Disk management utilities. """ # Authors: Gael Varoquaux <gael dot varoquaux at normalesup dot org> # Lars Buitinck <L.J.Buitinck@uva.nl> # Copyright (c) 2010 Gael Varoquaux # License: BSD Style, 3 clauses. import errno import os import shutil import sys import time def disk_used(path): """ Return the disk usage in a directory.""" size = 0 for file in os.listdir(path) + ['.']: stat = os.stat(os.path.join(path, file)) if hasattr(stat, 'st_blocks'): size += stat.st_blocks * 512 else: # on some platform st_blocks is not available (e.g., Windows) # approximate by rounding to next multiple of 512 size += (stat.st_size // 512 + 1) * 512 # We need to convert to int to avoid having longs on some systems (we # don't want longs to avoid problems we SQLite) return int(size / 1024.) def memstr_to_kbytes(text): """ Convert a memory text to it's value in kilobytes. """ kilo = 1024 units = dict(K=1, M=kilo, G=kilo ** 2) try: size = int(units[text[-1]] * float(text[:-1])) except (KeyError, ValueError): raise ValueError( "Invalid literal for size give: %s (type %s) should be " "alike '10G', '500M', '50K'." % (text, type(text)) ) return size def mkdirp(d): """Ensure directory d exists (like mkdir -p on Unix) No guarantee that the directory is writable. """ try: os.makedirs(d) except OSError as e: if e.errno != errno.EEXIST: raise # if a rmtree operation fails in rm_subdirs, wait for this much time (in secs), # then retry once. if it still fails, raise the exception RM_SUBDIRS_RETRY_TIME = 0.1 def rm_subdirs(path, onerror=None): """Remove all subdirectories in this path. The directory indicated by `path` is left in place, and its subdirectories are erased. If onerror is set, it is called to handle the error with arguments (func, path, exc_info) where func is os.listdir, os.remove, or os.rmdir; path is the argument to that function that caused it to fail; and exc_info is a tuple returned by sys.exc_info(). If onerror is None, an exception is raised. """ # NOTE this code is adapted from the one in shutil.rmtree, and is # just as fast names = [] try: names = os.listdir(path) except os.error as err: if onerror is not None: onerror(os.listdir, path, sys.exc_info()) else: raise for name in names: fullname = os.path.join(path, name) if os.path.isdir(fullname): if onerror is not None: shutil.rmtree(fullname, False, onerror) else: # allow the rmtree to fail once, wait and re-try. # if the error is raised again, fail err_count = 0 while True: try: shutil.rmtree(fullname, False, None) break except os.error: if err_count > 0: raise err_count += 1 time.sleep(RM_SUBDIRS_RETRY_TIME)
bsd-3-clause
mollstam/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/pip-7.1.0/pip/_vendor/cachecontrol/caches/file_cache.py
762
3532
import hashlib import os from pip._vendor.lockfile import LockFile from pip._vendor.lockfile.mkdirlockfile import MkdirLockFile from ..cache import BaseCache from ..controller import CacheController def _secure_open_write(filename, fmode): # We only want to write to this file, so open it in write only mode flags = os.O_WRONLY # os.O_CREAT | os.O_EXCL will fail if the file already exists, so we only # will open *new* files. # We specify this because we want to ensure that the mode we pass is the # mode of the file. flags |= os.O_CREAT | os.O_EXCL # Do not follow symlinks to prevent someone from making a symlink that # we follow and insecurely open a cache file. if hasattr(os, "O_NOFOLLOW"): flags |= os.O_NOFOLLOW # On Windows we'll mark this file as binary if hasattr(os, "O_BINARY"): flags |= os.O_BINARY # Before we open our file, we want to delete any existing file that is # there try: os.remove(filename) except (IOError, OSError): # The file must not exist already, so we can just skip ahead to opening pass # Open our file, the use of os.O_CREAT | os.O_EXCL will ensure that if a # race condition happens between the os.remove and this line, that an # error will be raised. Because we utilize a lockfile this should only # happen if someone is attempting to attack us. fd = os.open(filename, flags, fmode) try: return os.fdopen(fd, "wb") except: # An error occurred wrapping our FD in a file object os.close(fd) raise class FileCache(BaseCache): def __init__(self, directory, forever=False, filemode=0o0600, dirmode=0o0700, use_dir_lock=None, lock_class=None): if use_dir_lock is not None and lock_class is not None: raise ValueError("Cannot use use_dir_lock and lock_class together") if use_dir_lock: lock_class = MkdirLockFile if lock_class is None: lock_class = LockFile self.directory = directory self.forever = forever self.filemode = filemode self.dirmode = dirmode self.lock_class = lock_class @staticmethod def encode(x): return hashlib.sha224(x.encode()).hexdigest() def _fn(self, name): # NOTE: This method should not change as some may depend on it. # See: https://github.com/ionrock/cachecontrol/issues/63 hashed = self.encode(name) parts = list(hashed[:5]) + [hashed] return os.path.join(self.directory, *parts) def get(self, key): name = self._fn(key) if not os.path.exists(name): return None with open(name, 'rb') as fh: return fh.read() def set(self, key, value): name = self._fn(key) # Make sure the directory exists try: os.makedirs(os.path.dirname(name), self.dirmode) except (IOError, OSError): pass with self.lock_class(name) as lock: # Write our actual file with _secure_open_write(lock.path, self.filemode) as fh: fh.write(value) def delete(self, key): name = self._fn(key) if not self.forever: os.remove(name) def url_to_file_path(url, filecache): """Return the file cache path based on the URL. This does not ensure the file exists! """ key = CacheController.cache_url(url) return filecache._fn(key)
mit
zhhf/charging
charging/db/migration/alembic_migrations/versions/40dffbf4b549_nvp_dist_router.py
11
1928
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """nvp_dist_router Revision ID: 40dffbf4b549 Revises: 63afba73813 Create Date: 2013-08-21 18:00:26.214923 """ # revision identifiers, used by Alembic. revision = '40dffbf4b549' down_revision = '63afba73813' # Change to ['*'] if this migration applies to all plugins migration_for_plugins = [ 'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2', 'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin', 'neutron.plugins.vmware.plugin.NsxPlugin', 'neutron.plugins.vmware.plugin.NsxServicePlugin' ] from alembic import op import sqlalchemy as sa from neutron.db import migration def upgrade(active_plugins=None, options=None): if not migration.should_run(active_plugins, migration_for_plugins): return op.create_table( 'nsxrouterextattributess', sa.Column('router_id', sa.String(length=36), nullable=False), sa.Column('distributed', sa.Boolean(), nullable=False), sa.ForeignKeyConstraint( ['router_id'], ['routers.id'], ondelete='CASCADE'), sa.PrimaryKeyConstraint('router_id') ) def downgrade(active_plugins=None, options=None): if not migration.should_run(active_plugins, migration_for_plugins): return op.drop_table('nsxrouterextattributess')
apache-2.0
souzainf3/namebench
nb_third_party/dns/tokenizer.py
246
17962
# Copyright (C) 2003-2007, 2009, 2010 Nominum, Inc. # # Permission to use, copy, modify, and distribute this software and its # documentation for any purpose with or without fee is hereby granted, # provided that the above copyright notice and this permission notice # appear in all copies. # # THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES # WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF # MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR # ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES # WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN # ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT # OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. """Tokenize DNS master file format""" import cStringIO import sys import dns.exception import dns.name import dns.ttl _DELIMITERS = { ' ' : True, '\t' : True, '\n' : True, ';' : True, '(' : True, ')' : True, '"' : True } _QUOTING_DELIMITERS = { '"' : True } EOF = 0 EOL = 1 WHITESPACE = 2 IDENTIFIER = 3 QUOTED_STRING = 4 COMMENT = 5 DELIMITER = 6 class UngetBufferFull(dns.exception.DNSException): """Raised when an attempt is made to unget a token when the unget buffer is full.""" pass class Token(object): """A DNS master file format token. @ivar ttype: The token type @type ttype: int @ivar value: The token value @type value: string @ivar has_escape: Does the token value contain escapes? @type has_escape: bool """ def __init__(self, ttype, value='', has_escape=False): """Initialize a token instance. @param ttype: The token type @type ttype: int @ivar value: The token value @type value: string @ivar has_escape: Does the token value contain escapes? @type has_escape: bool """ self.ttype = ttype self.value = value self.has_escape = has_escape def is_eof(self): return self.ttype == EOF def is_eol(self): return self.ttype == EOL def is_whitespace(self): return self.ttype == WHITESPACE def is_identifier(self): return self.ttype == IDENTIFIER def is_quoted_string(self): return self.ttype == QUOTED_STRING def is_comment(self): return self.ttype == COMMENT def is_delimiter(self): return self.ttype == DELIMITER def is_eol_or_eof(self): return (self.ttype == EOL or self.ttype == EOF) def __eq__(self, other): if not isinstance(other, Token): return False return (self.ttype == other.ttype and self.value == other.value) def __ne__(self, other): if not isinstance(other, Token): return True return (self.ttype != other.ttype or self.value != other.value) def __str__(self): return '%d "%s"' % (self.ttype, self.value) def unescape(self): if not self.has_escape: return self unescaped = '' l = len(self.value) i = 0 while i < l: c = self.value[i] i += 1 if c == '\\': if i >= l: raise dns.exception.UnexpectedEnd c = self.value[i] i += 1 if c.isdigit(): if i >= l: raise dns.exception.UnexpectedEnd c2 = self.value[i] i += 1 if i >= l: raise dns.exception.UnexpectedEnd c3 = self.value[i] i += 1 if not (c2.isdigit() and c3.isdigit()): raise dns.exception.SyntaxError c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) unescaped += c return Token(self.ttype, unescaped) # compatibility for old-style tuple tokens def __len__(self): return 2 def __iter__(self): return iter((self.ttype, self.value)) def __getitem__(self, i): if i == 0: return self.ttype elif i == 1: return self.value else: raise IndexError class Tokenizer(object): """A DNS master file format tokenizer. A token is a (type, value) tuple, where I{type} is an int, and I{value} is a string. The valid types are EOF, EOL, WHITESPACE, IDENTIFIER, QUOTED_STRING, COMMENT, and DELIMITER. @ivar file: The file to tokenize @type file: file @ivar ungotten_char: The most recently ungotten character, or None. @type ungotten_char: string @ivar ungotten_token: The most recently ungotten token, or None. @type ungotten_token: (int, string) token tuple @ivar multiline: The current multiline level. This value is increased by one every time a '(' delimiter is read, and decreased by one every time a ')' delimiter is read. @type multiline: int @ivar quoting: This variable is true if the tokenizer is currently reading a quoted string. @type quoting: bool @ivar eof: This variable is true if the tokenizer has encountered EOF. @type eof: bool @ivar delimiters: The current delimiter dictionary. @type delimiters: dict @ivar line_number: The current line number @type line_number: int @ivar filename: A filename that will be returned by the L{where} method. @type filename: string """ def __init__(self, f=sys.stdin, filename=None): """Initialize a tokenizer instance. @param f: The file to tokenize. The default is sys.stdin. This parameter may also be a string, in which case the tokenizer will take its input from the contents of the string. @type f: file or string @param filename: the name of the filename that the L{where} method will return. @type filename: string """ if isinstance(f, str): f = cStringIO.StringIO(f) if filename is None: filename = '<string>' else: if filename is None: if f is sys.stdin: filename = '<stdin>' else: filename = '<file>' self.file = f self.ungotten_char = None self.ungotten_token = None self.multiline = 0 self.quoting = False self.eof = False self.delimiters = _DELIMITERS self.line_number = 1 self.filename = filename def _get_char(self): """Read a character from input. @rtype: string """ if self.ungotten_char is None: if self.eof: c = '' else: c = self.file.read(1) if c == '': self.eof = True elif c == '\n': self.line_number += 1 else: c = self.ungotten_char self.ungotten_char = None return c def where(self): """Return the current location in the input. @rtype: (string, int) tuple. The first item is the filename of the input, the second is the current line number. """ return (self.filename, self.line_number) def _unget_char(self, c): """Unget a character. The unget buffer for characters is only one character large; it is an error to try to unget a character when the unget buffer is not empty. @param c: the character to unget @type c: string @raises UngetBufferFull: there is already an ungotten char """ if not self.ungotten_char is None: raise UngetBufferFull self.ungotten_char = c def skip_whitespace(self): """Consume input until a non-whitespace character is encountered. The non-whitespace character is then ungotten, and the number of whitespace characters consumed is returned. If the tokenizer is in multiline mode, then newlines are whitespace. @rtype: int """ skipped = 0 while True: c = self._get_char() if c != ' ' and c != '\t': if (c != '\n') or not self.multiline: self._unget_char(c) return skipped skipped += 1 def get(self, want_leading = False, want_comment = False): """Get the next token. @param want_leading: If True, return a WHITESPACE token if the first character read is whitespace. The default is False. @type want_leading: bool @param want_comment: If True, return a COMMENT token if the first token read is a comment. The default is False. @type want_comment: bool @rtype: Token object @raises dns.exception.UnexpectedEnd: input ended prematurely @raises dns.exception.SyntaxError: input was badly formed """ if not self.ungotten_token is None: token = self.ungotten_token self.ungotten_token = None if token.is_whitespace(): if want_leading: return token elif token.is_comment(): if want_comment: return token else: return token skipped = self.skip_whitespace() if want_leading and skipped > 0: return Token(WHITESPACE, ' ') token = '' ttype = IDENTIFIER has_escape = False while True: c = self._get_char() if c == '' or c in self.delimiters: if c == '' and self.quoting: raise dns.exception.UnexpectedEnd if token == '' and ttype != QUOTED_STRING: if c == '(': self.multiline += 1 self.skip_whitespace() continue elif c == ')': if not self.multiline > 0: raise dns.exception.SyntaxError self.multiline -= 1 self.skip_whitespace() continue elif c == '"': if not self.quoting: self.quoting = True self.delimiters = _QUOTING_DELIMITERS ttype = QUOTED_STRING continue else: self.quoting = False self.delimiters = _DELIMITERS self.skip_whitespace() continue elif c == '\n': return Token(EOL, '\n') elif c == ';': while 1: c = self._get_char() if c == '\n' or c == '': break token += c if want_comment: self._unget_char(c) return Token(COMMENT, token) elif c == '': if self.multiline: raise dns.exception.SyntaxError('unbalanced parentheses') return Token(EOF) elif self.multiline: self.skip_whitespace() token = '' continue else: return Token(EOL, '\n') else: # This code exists in case we ever want a # delimiter to be returned. It never produces # a token currently. token = c ttype = DELIMITER else: self._unget_char(c) break elif self.quoting: if c == '\\': c = self._get_char() if c == '': raise dns.exception.UnexpectedEnd if c.isdigit(): c2 = self._get_char() if c2 == '': raise dns.exception.UnexpectedEnd c3 = self._get_char() if c == '': raise dns.exception.UnexpectedEnd if not (c2.isdigit() and c3.isdigit()): raise dns.exception.SyntaxError c = chr(int(c) * 100 + int(c2) * 10 + int(c3)) elif c == '\n': raise dns.exception.SyntaxError('newline in quoted string') elif c == '\\': # # It's an escape. Put it and the next character into # the token; it will be checked later for goodness. # token += c has_escape = True c = self._get_char() if c == '' or c == '\n': raise dns.exception.UnexpectedEnd token += c if token == '' and ttype != QUOTED_STRING: if self.multiline: raise dns.exception.SyntaxError('unbalanced parentheses') ttype = EOF return Token(ttype, token, has_escape) def unget(self, token): """Unget a token. The unget buffer for tokens is only one token large; it is an error to try to unget a token when the unget buffer is not empty. @param token: the token to unget @type token: Token object @raises UngetBufferFull: there is already an ungotten token """ if not self.ungotten_token is None: raise UngetBufferFull self.ungotten_token = token def next(self): """Return the next item in an iteration. @rtype: (int, string) """ token = self.get() if token.is_eof(): raise StopIteration return token def __iter__(self): return self # Helpers def get_int(self): """Read the next token and interpret it as an integer. @raises dns.exception.SyntaxError: @rtype: int """ token = self.get().unescape() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') if not token.value.isdigit(): raise dns.exception.SyntaxError('expecting an integer') return int(token.value) def get_uint8(self): """Read the next token and interpret it as an 8-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int """ value = self.get_int() if value < 0 or value > 255: raise dns.exception.SyntaxError('%d is not an unsigned 8-bit integer' % value) return value def get_uint16(self): """Read the next token and interpret it as a 16-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int """ value = self.get_int() if value < 0 or value > 65535: raise dns.exception.SyntaxError('%d is not an unsigned 16-bit integer' % value) return value def get_uint32(self): """Read the next token and interpret it as a 32-bit unsigned integer. @raises dns.exception.SyntaxError: @rtype: int """ token = self.get().unescape() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') if not token.value.isdigit(): raise dns.exception.SyntaxError('expecting an integer') value = long(token.value) if value < 0 or value > 4294967296L: raise dns.exception.SyntaxError('%d is not an unsigned 32-bit integer' % value) return value def get_string(self, origin=None): """Read the next token and interpret it as a string. @raises dns.exception.SyntaxError: @rtype: string """ token = self.get().unescape() if not (token.is_identifier() or token.is_quoted_string()): raise dns.exception.SyntaxError('expecting a string') return token.value def get_identifier(self, origin=None): """Read the next token and raise an exception if it is not an identifier. @raises dns.exception.SyntaxError: @rtype: string """ token = self.get().unescape() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') return token.value def get_name(self, origin=None): """Read the next token and interpret it as a DNS name. @raises dns.exception.SyntaxError: @rtype: dns.name.Name object""" token = self.get() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') return dns.name.from_text(token.value, origin) def get_eol(self): """Read the next token and raise an exception if it isn't EOL or EOF. @raises dns.exception.SyntaxError: @rtype: string """ token = self.get() if not token.is_eol_or_eof(): raise dns.exception.SyntaxError('expected EOL or EOF, got %d "%s"' % (token.ttype, token.value)) return token.value def get_ttl(self): token = self.get().unescape() if not token.is_identifier(): raise dns.exception.SyntaxError('expecting an identifier') return dns.ttl.from_text(token.value)
apache-2.0
cycotech/WAR-app
env/lib/python3.5/site-packages/wheel/signatures/__init__.py
70
3766
""" Create and verify jws-js format Ed25519 signatures. """ import json from ..util import urlsafe_b64decode, urlsafe_b64encode, native, binary __all__ = ['sign', 'verify'] ed25519ll = None ALG = "Ed25519" def get_ed25519ll(): """Lazy import-and-test of ed25519 module""" global ed25519ll if not ed25519ll: try: import ed25519ll # fast (thousands / s) except (ImportError, OSError): # pragma nocover from . import ed25519py as ed25519ll # pure Python (hundreds / s) test() return ed25519ll def sign(payload, keypair): """Return a JWS-JS format signature given a JSON-serializable payload and an Ed25519 keypair.""" get_ed25519ll() # header = { "alg": ALG, "jwk": { "kty": ALG, # alg -> kty in jwk-08. "vk": native(urlsafe_b64encode(keypair.vk)) } } encoded_header = urlsafe_b64encode(binary(json.dumps(header, sort_keys=True))) encoded_payload = urlsafe_b64encode(binary(json.dumps(payload, sort_keys=True))) secured_input = b".".join((encoded_header, encoded_payload)) sig_msg = ed25519ll.crypto_sign(secured_input, keypair.sk) signature = sig_msg[:ed25519ll.SIGNATUREBYTES] encoded_signature = urlsafe_b64encode(signature) return {"recipients": [{"header": native(encoded_header), "signature": native(encoded_signature)}], "payload": native(encoded_payload)} def assertTrue(condition, message=""): if not condition: raise ValueError(message) def verify(jwsjs): """Return (decoded headers, payload) if all signatures in jwsjs are consistent, else raise ValueError. Caller must decide whether the keys are actually trusted.""" get_ed25519ll() # XXX forbid duplicate keys in JSON input using object_pairs_hook (2.7+) recipients = jwsjs["recipients"] encoded_payload = binary(jwsjs["payload"]) headers = [] for recipient in recipients: assertTrue(len(recipient) == 2, "Unknown recipient key {0}".format(recipient)) h = binary(recipient["header"]) s = binary(recipient["signature"]) header = json.loads(native(urlsafe_b64decode(h))) assertTrue(header["alg"] == ALG, "Unexpected algorithm {0}".format(header["alg"])) if "alg" in header["jwk"] and "kty" not in header["jwk"]: header["jwk"]["kty"] = header["jwk"]["alg"] # b/w for JWK < -08 assertTrue(header["jwk"]["kty"] == ALG, # true for Ed25519 "Unexpected key type {0}".format(header["jwk"]["kty"])) vk = urlsafe_b64decode(binary(header["jwk"]["vk"])) secured_input = b".".join((h, encoded_payload)) sig = urlsafe_b64decode(s) sig_msg = sig+secured_input verified_input = native(ed25519ll.crypto_sign_open(sig_msg, vk)) verified_header, verified_payload = verified_input.split('.') verified_header = binary(verified_header) decoded_header = native(urlsafe_b64decode(verified_header)) headers.append(json.loads(decoded_header)) verified_payload = binary(verified_payload) # only return header, payload that have passed through the crypto library. payload = json.loads(native(urlsafe_b64decode(verified_payload))) return headers, payload def test(): kp = ed25519ll.crypto_sign_keypair() payload = {'test': 'onstartup'} jwsjs = json.loads(json.dumps(sign(payload, kp))) verify(jwsjs) jwsjs['payload'] += 'x' try: verify(jwsjs) except ValueError: pass else: # pragma no cover raise RuntimeError("No error from bad wheel.signatures payload.")
mit
laosiaudi/tensorflow
tensorflow/python/kernel_tests/segment_reduction_ops_test.py
20
23513
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Functional tests for segment reduction ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow.python.util.all_util import reveal_undocumented class SegmentReductionHelper(tf.test.TestCase): def _input(self, input_shape, dtype=tf.int32): num_elem = 1 for x in input_shape: num_elem *= x values = np.arange(1, num_elem + 1) np_values = values.reshape(input_shape).astype(dtype.as_numpy_dtype) return tf.constant(values, shape=input_shape, dtype=dtype), np_values def _segmentReduce(self, indices, x, op1, op2=None, num_out_rows=None): if not x.size: return np.array([]) indices = np.asarray(indices) if num_out_rows is None: num_out_rows = indices[-1] + 1 output = [None] * num_out_rows slice_shape = x.shape[indices.ndim:] x_flat = x.reshape((indices.size,) + slice_shape) for i, index in enumerate(indices.ravel()): if output[index] is not None: output[index] = op1(output[index], x_flat[i]) else: output[index] = x_flat[i] # zero initialize values that are still uncalcuated. output = [o if o is not None else np.zeros(slice_shape) for o in output] if op2 is not None: output = [op2(o) for o in output] output = [o.reshape(slice_shape) for o in output] return np.array(output) def _assertAllClose(self, indices, np_x, tf_x): for i in set(np.asarray(indices).ravel()): self.assertAllClose(np_x[i], tf_x[i]) def _mean_cum_op(self, x, y): return (x[0] + y, x[1] + 1) if isinstance(x, tuple) else (x + y, 2) def _mean_reduce_op(self, x): return x[0] / x[1] if isinstance(x, tuple) else x class SegmentReductionOpTest(SegmentReductionHelper): def testValues(self): dtypes = [tf.float32, tf.float64, tf.int64, tf.int32, tf.complex64, tf.complex128] # Each item is np_op1, np_op2, tf_op ops_list = [(np.add, None, tf.segment_sum), (self._mean_cum_op, self._mean_reduce_op, tf.segment_mean), (np.ndarray.__mul__, None, tf.segment_prod), (np.minimum, None, tf.segment_min), (np.maximum, None, tf.segment_max)] # A subset of ops has been enabled for complex numbers complex_ops_list = [(np.add, None, tf.segment_sum), (np.ndarray.__mul__, None, tf.segment_prod)] n = 10 shape = [n, 2] indices = [i // 3 for i in range(n)] for dtype in dtypes: if dtype in (tf.complex64, tf.complex128): curr_ops_list = complex_ops_list else: curr_ops_list = ops_list with self.test_session(use_gpu=False): tf_x, np_x = self._input(shape, dtype=dtype) for np_op1, np_op2, tf_op in curr_ops_list: np_ans = self._segmentReduce(indices, np_x, np_op1, np_op2) s = tf_op(data=tf_x, segment_ids=indices) tf_ans = s.eval() self._assertAllClose(indices, np_ans, tf_ans) # NOTE(mrry): The static shape inference that computes # `tf_ans.shape` can only infer that sizes from dimension 1 # onwards, because the size of dimension 0 is data-dependent # and may therefore vary dynamically. self.assertAllEqual(np_ans.shape[1:], tf_ans.shape[1:]) def testSegmentIdsShape(self): shape = [4, 4] tf_x, _ = self._input(shape) indices = tf.constant([0, 1, 2, 2], shape=[2, 2]) with self.assertRaises(ValueError): tf.segment_sum(data=tf_x, segment_ids=indices) def testSegmentIdsSize(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 1] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment_ids should be the same size"): s.eval() def testSegmentIdsValid(self): # This is a baseline for the following SegmentIdsInvalid* tests. shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 0, 0, 1] result = tf.segment_sum(data=tf_x, segment_ids=indices).eval() self.assertAllEqual([[15, 18, 21, 24], [13, 14, 15, 16]], result) def testSegmentIdsInvalid1(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [-1, -1, 0, 0] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids do not start at 0"): s.eval() def testSegmentIdsInvalid2(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [1, 1, 2, 2] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids do not start at 0"): s.eval() def testSegmentIdsInvalid3(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 0, 2, 2] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids are not increasing by 1"): s.eval() def testSegmentIdsInvalid4(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 1, 0, 1] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids are not increasing by 1"): s.eval() def testSegmentIdsInvalid5(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 1, 2, 0] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError( r"Segment id 1 out of range \[0, 1\), probably " "because 'segment_ids' input is not sorted."): s.eval() def testSegmentIdsInvalid6(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 0, 0, -1] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids must be >= 0"): s.eval() def testSegmentIdsInvalid7(self): shape = [4, 4] with self.test_session(): tf_x, _ = self._input(shape) indices = [0, 0, 0, -2] s = tf.segment_sum(data=tf_x, segment_ids=indices) with self.assertRaisesOpError("segment ids must be >= 0"): s.eval() def testGradient(self): shape = [4, 4] indices = [0, 1, 2, 2] for tf_op in [tf.segment_sum, tf.segment_mean, tf.segment_min, tf.segment_max]: with self.test_session(): tf_x, np_x = self._input(shape, dtype=tf.float64) s = tf_op(data=tf_x, segment_ids=indices) jacob_t, jacob_n = tf.test.compute_gradient( tf_x, shape, s, [3, 4], x_init_value=np_x.astype(np.double), delta=1) self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3) class UnsortedSegmentSumTest(SegmentReductionHelper): use_gpu = False def testValues(self): dtypes = [tf.float32, tf.float64, tf.int64, tf.int32, tf.complex64, tf.complex128] indices_flat = np.array([0, 4, 0, 8, 3, 8, 4, 7, 7, 3]) num_segments = 12 for indices in indices_flat, indices_flat.reshape(5, 2): shape = indices.shape + (2,) for dtype in dtypes: with self.test_session(use_gpu=self.use_gpu): tf_x, np_x = self._input(shape, dtype=dtype) np_ans = self._segmentReduce(indices, np_x, np.add, op2=None, num_out_rows=num_segments) s = tf.unsorted_segment_sum(data=tf_x, segment_ids=indices, num_segments=num_segments) tf_ans = s.eval() self._assertAllClose(indices, np_ans, tf_ans) self.assertShapeEqual(np_ans, s) def testGradient(self): num_cols = 2 indices_flat = np.array([0, 4, 0, 8, 3, 8, 4, 7, 7, 3]) num_segments = max(indices_flat) + 3 for indices in indices_flat, indices_flat.reshape(5, 2): shape = indices.shape + (num_cols,) with self.test_session(use_gpu=self.use_gpu): tf_x, np_x = self._input(shape, dtype=tf.float64) s = tf.unsorted_segment_sum(data=tf_x, segment_ids=indices, num_segments=num_segments) jacob_t, jacob_n = tf.test.compute_gradient( tf_x, shape, s, [num_segments, num_cols], x_init_value=np_x.astype(np.double), delta=1) self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3) def testGradientMatchesSegmentSum(self): # Strategy: compute the gradient for UnsortedSegmentSum and SegmentSum # and compare the outputs, which should be identical. # NB: for this test to work, indices must be valid for SegmentSum, namely # it must be sorted, the indices must be contiguous, and num_segments # must be max(indices) + 1. indices = [0, 0, 1, 1, 1, 2, 3, 4, 5] n = len(indices) num_cols = 2 shape = [n, num_cols] num_segments = max(indices) + 1 with self.test_session(use_gpu=self.use_gpu): tf_x, np_x = self._input(shape, dtype=tf.float64) # Results from UnsortedSegmentSum unsorted_s = tf.unsorted_segment_sum(data=tf_x, segment_ids=indices, num_segments=num_segments) (unsorted_jacob_t, unsorted_jacob_n) = tf.test.compute_gradient( tf_x, shape, unsorted_s, [num_segments, num_cols], x_init_value=np_x.astype(np.double), delta=1) # Results from SegmentSum sorted_s = tf.segment_sum(data=tf_x, segment_ids=indices) sorted_jacob_t, sorted_jacob_n = tf.test.compute_gradient( tf_x, shape, sorted_s, [num_segments, num_cols], x_init_value=np_x.astype(np.double), delta=1) self.assertAllClose(unsorted_jacob_t, sorted_jacob_t, rtol=1e-3, atol=1e-3) self.assertAllClose(unsorted_jacob_n, sorted_jacob_n, rtol=1e-3, atol=1e-3) def testBadIndices(self): # Note: GPU kernel does not return the out-of-range error needed for this # test, so this test is marked as cpu-only. with self.test_session(use_gpu=False): for bad in [[-1]], [[7]]: unsorted = tf.unsorted_segment_sum([[17]], bad, num_segments=2) with self.assertRaisesOpError( r"segment_ids\[0,0\] = %d is out of range \[0, 2\)" % bad[0][0]): unsorted.eval() def testEmptySecondDimension(self): dtypes = [np.float32, np.float64, np.int64, np.int32, np.complex64, np.complex128] with self.test_session(use_gpu=self.use_gpu): for dtype in dtypes: for itype in (np.int32, np.int64): data = np.zeros((2, 0), dtype=dtype) segment_ids = np.array([0, 1], dtype=itype) unsorted = tf.unsorted_segment_sum(data, segment_ids, 2) self.assertAllEqual(unsorted.eval(), np.zeros((2, 0), dtype=dtype)) class UnsortedSegmentSumGpuTest(UnsortedSegmentSumTest): use_gpu = True class SparseSegmentReductionHelper(SegmentReductionHelper): def _sparse_input(self, input_shape, num_indices, dtype=tf.int32): a, b = super(SparseSegmentReductionHelper, self)._input(input_shape, dtype) indices = np.random.randint(0, input_shape[0], num_indices).astype(np.int32) return (tf.constant(indices, dtype=tf.int32), indices, a, b) def _sparseSegmentReduce(self, x, indices, segment_indices, op1, op2=None): return self._segmentReduce(segment_indices, x[indices], op1, op2) class SparseSegmentReductionOpTest(SparseSegmentReductionHelper): def setUp(self): reveal_undocumented("tensorflow.python." "sparse_segment_mean_grad", tf) reveal_undocumented("tensorflow.python." "sparse_segment_sqrt_n_grad", tf) def testValues(self): dtypes = [tf.float32, tf.float64, tf.int64, tf.int32] mean_dtypes = [tf.float32, tf.float64] # Each item is np_op1, np_op2, tf_op ops_list = [(np.add, None, tf.sparse_segment_sum), (self._mean_cum_op, self._mean_reduce_op, tf.sparse_segment_mean)] n = 400 shape = [n, 2] segment_indices = [] for i in range(20): for _ in range(i + 1): segment_indices.append(i) num_indices = len(segment_indices) for dtype in dtypes: with self.test_session(use_gpu=False): tf_indices, np_indices, tf_x, np_x = self._sparse_input(shape, num_indices, dtype=dtype) for np_op1, np_op2, tf_op in ops_list: if tf_op == tf.sparse_segment_mean and dtype not in mean_dtypes: continue np_ans = self._sparseSegmentReduce(np_x, np_indices, segment_indices, np_op1, np_op2) s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) tf_ans = s.eval() self._assertAllClose(segment_indices, np_ans, tf_ans) # NOTE(mrry): The static shape inference that computes # `tf_ans.shape` can only infer that sizes from dimension 1 # onwards, because the size of dimension 0 is data-dependent # and may therefore vary dynamically. self.assertAllEqual(np_ans.shape[1:], tf_ans.shape[1:]) def testValid(self): # Baseline for the test*Invalid* methods below. tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 1, 2, 2] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) s.eval() def testIndicesInvalid1(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 1, 2, 2] tf_indices = [8, -1, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError( r"indices\[1\] == -1 out of range \[0, 10\)"): s.eval() def testIndicesInvalid2(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 1, 2, 2] tf_indices = [8, 3, 0, 10] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError( r"indices\[3\] == 10 out of range \[0, 10\)"): s.eval() def testSegmentsInvalid1(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 2, 2, 2] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids are not increasing by 1"): s.eval() def testSegmentsInvalid2(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 1, 0, 1] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids are not increasing by 1"): s.eval() def testSegmentsInvalid3(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 1, 2, 0] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError( r"Segment id 1 out of range \[0, 1\), probably because " "'segment_ids' input is not sorted"): s.eval() def testSegmentsInvalid4(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [-1, 0, 1, 1] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids do not start at 0"): s.eval() def testSegmentsInvalid5(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [1, 2, 2, 2] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids do not start at 0"): s.eval() def testSegmentsInvalid6(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 0, 0, -1] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids must be >= 0"): s.eval() def testSegmentsInvalid7(self): tf_x, _ = self._input([10, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_sum, tf.sparse_segment_mean] segment_indices = [0, 0, 0, -2] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) with self.assertRaisesOpError("segment ids must be >= 0"): s.eval() def testGradient(self): shape = [10, 4] segment_indices = [0, 1, 2, 2] num_indices = len(segment_indices) for tf_op in [tf.sparse_segment_sum, tf.sparse_segment_mean]: with self.test_session(): tf_indices, _, tf_x, np_x = self._sparse_input( shape, num_indices, dtype=tf.float64) s = tf_op(data=tf_x, indices=tf_indices, segment_ids=segment_indices) jacob_t, jacob_n = tf.test.compute_gradient( tf_x, shape, s, [3, 4], x_init_value=np_x.astype(np.double), delta=1) self.assertAllClose(jacob_t, jacob_n, rtol=1e-3, atol=1e-3) def testGradientValid(self): # Baseline for the testGradient*Invalid* methods below. tf_x, _ = self._input([3, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 2, 2] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) s.eval() def testGradientIndicesInvalid1(self): tf_x, _ = self._input([3, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 2, 2] tf_indices = [8, 3, 0, 10] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError(r"Index 10 out of range \[0, 10\)"): s.eval() def testGradientIndicesInvalid2(self): tf_x, _ = self._input([3, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 2, 2] tf_indices = [8, 3, -1, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError(r"Index -1 out of range \[0, 10\)"): s.eval() def testGradientSegmentsInvalid1(self): tf_x, _ = self._input([3, 4], dtype=tf.float32) # expecting 3 segments ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 1, 1] # 2 segments tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError("Invalid number of segments"): s.eval() def testGradientSegmentsInvalid2(self): tf_x, _ = self._input([1, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 2, 0] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError(r"Segment id 1 out of range \[0, 1\)"): s.eval() def testGradientSegmentsInvalid3(self): tf_x, _ = self._input([2, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [-1, 0, 1, 1] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError(r"Segment id -1 out of range \[0, 2\)"): s.eval() def testGradientSegmentsInvalid4(self): tf_x, _ = self._input([0, 4], dtype=tf.float32) ops_list = [tf.sparse_segment_mean_grad, tf.sparse_segment_sqrt_n_grad] segment_indices = [0, 1, 2, -1] tf_indices = [8, 3, 0, 9] with self.test_session(use_gpu=False): for tf_op in ops_list: s = tf_op(tf_x, tf_indices, segment_indices, 10) with self.assertRaisesOpError(r"Segment id 0 out of range \[0, 0\)"): s.eval() if __name__ == "__main__": tf.test.main()
apache-2.0
gunan/tensorflow
tensorflow/compiler/tests/slice_ops_test.py
23
9455
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for slicing.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.compiler.tests import xla_test from tensorflow.python.framework import dtypes from tensorflow.python.framework import tensor_shape from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.platform import googletest class SliceTest(xla_test.XLATestCase): def test1D(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[10]) with self.test_scope(): o = array_ops.slice(i, [2], [4]) params = { i: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], } result = o.eval(feed_dict=params) self.assertAllEqual([2, 3, 4, 5], result) def testZeroSlice(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[2]) with self.test_scope(): o = array_ops.slice(i, [0], [0]) params = { i: [0, 1], } result = o.eval(feed_dict=params) self.assertAllEqual([], result) def test3D(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[3, 3, 10]) with self.test_scope(): o = array_ops.slice(i, [1, 2, 2], [1, 1, 4]) params = { i: [[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0], [5, 3, 1, 7, 9, 2, 4, 6, 8, 0]], [[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [8, 7, 6, 5, 4, 3, 2, 1, 8, 7]], [[7, 5, 7, 5, 7, 5, 7, 5, 7, 5], [1, 2, 1, 2, 1, 2, 1, 2, 1, 2], [9, 8, 7, 9, 8, 7, 9, 8, 7, 9]]] } result = o.eval(feed_dict=params) self.assertAllEqual([[[6, 5, 4, 3]]], result) def test3DWithDynamicBegin(self): """Tests a slice where the start offset is not known at compile time.""" for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[3, 3, 10]) begin = array_ops.placeholder(dtypes.int32, shape=[3]) with self.test_scope(): o = array_ops.slice(i, begin, [1, 1, 4]) params = { i: [[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0], [5, 3, 1, 7, 9, 2, 4, 6, 8, 0]], [[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [8, 7, 6, 5, 4, 3, 2, 1, 8, 7]], [[7, 5, 7, 5, 7, 5, 7, 5, 7, 5], [1, 2, 1, 2, 1, 2, 1, 2, 1, 2], [9, 8, 7, 9, 8, 7, 9, 8, 7, 9]]], begin: [1, 2, 2] } result = o.eval(feed_dict=params) self.assertAllEqual([[[6, 5, 4, 3]]], result) def test3DWithDynamicBeginAndNegativeSize(self): """Tests a slice where `begin` is fed dynamically and `size` contains -1.""" for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[3, 3, 10]) begin = array_ops.placeholder(dtypes.int32, shape=[3]) with self.test_scope(): o = array_ops.slice(i, begin, [1, -1, 4]) params = { i: [[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0], [5, 3, 1, 7, 9, 2, 4, 6, 8, 0]], [[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [8, 7, 6, 5, 4, 3, 2, 1, 8, 7]], [[7, 5, 7, 5, 7, 5, 7, 5, 7, 5], [1, 2, 1, 2, 1, 2, 1, 2, 1, 2], [9, 8, 7, 9, 8, 7, 9, 8, 7, 9]]], begin: [1, 1, 2] } result = o.eval(feed_dict=params) self.assertAllEqual([[[1, 1, 1, 1], [6, 5, 4, 3]]], result) class StridedSliceTest(xla_test.XLATestCase): def test1D(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[10]) with self.test_scope(): o = array_ops.strided_slice(i, [2], [6], [2]) params = { i: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], } result = o.eval(feed_dict=params) self.assertAllEqual([2, 4], result) def test1DDynamic(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[10]) begin = array_ops.placeholder(dtypes.int32, shape=[1]) with self.test_scope(): end = math_ops.add(begin, [1]) o = array_ops.strided_slice(i, begin, end, [1]) params = { i: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], begin: [0] } result = o.eval(feed_dict=params) self.assertAllEqual([0], result) def test1DNegativeStride(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[10]) with self.test_scope(): o = array_ops.strided_slice(i, [6], [2], [-2]) params = { i: [0, 1, 2, 3, 4, 5, 6, 7, 8, 9], } result = o.eval(feed_dict=params) self.assertAllEqual([6, 4], result) def test2DDegenerate(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[2, 3]) with self.test_scope(): o = array_ops.strided_slice(i, [-1, 0], [0, 3]) params = { i: [[0, 1, 2], [3, 4, 5]] } result = o.eval(feed_dict=params) self.assertEqual(tensor_shape.TensorShape((0, 3)), result.shape) def test2DDegenerateNegativeStride(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[2, 3]) with self.test_scope(): o = array_ops.strided_slice(i, [0, 0], [-1, 3], [-1, 1]) params = { i: [[0, 1, 2], [3, 4, 5]] } result = o.eval(feed_dict=params) self.assertEqual(tensor_shape.TensorShape((0, 3)), result.shape) def test2DFullSlice(self): for dtype in self.numeric_types: with self.session(): with self.test_scope(): i = array_ops.placeholder(dtype, shape=[2, 4]) begin = array_ops.placeholder(dtypes.int32, shape=[2]) end = math_ops.add(begin, [1, 1]) o = array_ops.strided_slice(i, begin, end, [1, 1]) params = { i: [[0, 1, 2, 3], [4, 5, 6, 7]], begin: [1, 1] } result = o.eval(feed_dict=params) self.assertAllEqual([[5]], result) def test3D(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[3, 3, 10]) with self.test_scope(): o = array_ops.strided_slice(i, [0, 2, 2], [2, 3, 6], [1, 1, 2]) params = { i: [[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0], [5, 3, 1, 7, 9, 2, 4, 6, 8, 0]], [[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [1, 1, 1, 1, 1, 1, 1, 1, 1, 1], [8, 7, 6, 5, 4, 3, 2, 1, 8, 7]], [[7, 5, 7, 5, 7, 5, 7, 5, 7, 5], [1, 2, 1, 2, 1, 2, 1, 2, 1, 2], [9, 8, 7, 9, 8, 7, 9, 8, 7, 9]]] } result = o.eval(feed_dict=params) self.assertAllEqual([[[1, 9]], [[6, 4]]], result) def test3DNegativeStride(self): for dtype in self.numeric_types: with self.session(): i = array_ops.placeholder(dtype, shape=[3, 4, 10]) with self.test_scope(): o = array_ops.strided_slice(i, [2, 2, 6], [0, 0, 2], [-1, -1, -2]) params = { i: [[[0, 1, 2, 3, 4, 5, 6, 7, 8, 9], [9, 8, 7, 6, 5, 4, 3, 2, 1, 0], [5, 3, 1, 7, 9, 2, 4, 6, 8, 0], [4, 5, 2, 4, 3, 7, 6, 8, 9, 4]], [[5, 5, 5, 5, 5, 5, 5, 5, 5, 5], [4, 3, 4, 5, 7, 6, 5, 3, 4, 5], [8, 7, 6, 5, 4, 3, 2, 1, 8, 7], [7, 1, 7, 1, 8, 1, 8, 1, 3, 1]], [[7, 5, 7, 5, 7, 5, 7, 5, 7, 5], [1, 2, 1, 2, 1, 2, 1, 2, 1, 2], [9, 8, 7, 9, 8, 7, 9, 8, 7, 9], [9, 9, 5, 5, 6, 6, 3, 3, 6, 6]]] } result = o.eval(feed_dict=params) self.assertAllEqual([[[9, 8], [1, 1]], [[2, 4], [5, 7]]], result) if __name__ == "__main__": googletest.main()
apache-2.0
fgesora/odoo
addons/l10n_be/__init__.py
430
1060
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import wizard # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ogrisel/numpy
doc/numpybook/comparison/timing.py
4
1059
from __future__ import division, absolute_import, print_function import timeit pyrex_pre = """ import numpy as N a = N.random.rand(%d,%d) import filter """ pyrex_run = """ b = filter.filter(a) """ weave_pre = """ import numpy as N a = N.random.rand(%d,%d) import filter """ weave_run = """ b = filter.filter(a) """ ctypes_pre = """ import numpy as N a = N.random.rand(%d,%d) import filter """ ctypes_run = """ b = filter.filter(a) """ f2py_pre = """ import numpy as N a = N.random.rand(%d, %d).T import filter """ f2py_run = """ b = N.zeros_like(a) filter.DFILTER2D(a,b) """ N = [10, 20, 30, 40, 50, 100, 200, 300, 400, 500] res = {} import os import sys path = sys.path for kind in ['f2py']:#['ctypes', 'pyrex', 'weave', 'f2py']: res[kind] = [] sys.path = ['/Users/oliphant/numpybook/%s' % (kind,)] + path print(sys.path) for n in N: print("%s - %d" % (kind, n)) t = timeit.Timer(eval('%s_run'%kind), eval('%s_pre %% (%d,%d)'%(kind, n, n))) mytime = min(t.repeat(3, 100)) res[kind].append(mytime)
bsd-3-clause
madscatt/zazmol
src/python/calculate.py
2
15079
from __future__ import absolute_import from __future__ import division from __future__ import print_function #from __future__ import unicode_literals # # SASMOL: Copyright (C) 2011 Joseph E. Curtis, Ph.D. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # # CALCULATE # # 12/10/2009 -- initial coding : jc # 12/20/2015 -- refactored for release : jc # 07/23/2016 -- refactored for Python 3 : jc # # 1 2 3 4 5 6 7 # LC4567890123456789012345678901234567890123456789012345678901234567890123456789 # * ** ''' Calculate contains the classes and methods to calculate various atomic and molecular properties from instances of system objects ''' import sys import numpy import sasmol.operate as operate class Calculate(object): """ Base class containing methods to calculate properties of system object. Examples ======== First example shows how to use class methods from system object: >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_mass() 47896.61864599498 Second example shows how to use class methods directly: >>> import sasmol.system as system >>> import sasmol.calculate as calculate >>> molecule = system.Molecule('hiv1_gag.pdb') >>> calculate.Calculate.calculate_mass(molecule) 47896.61864599498 Note ---- `self` parameter is not shown in the ``Parameters`` section in the documentation TODO: Need to write a generic driver to loop over single or multiple frames """ def calculate_mass(self, **kwargs): ''' Note ---- atomic weights are contained in the ``properties.py`` file http://physics.nist.gov/cgi-bin/Compositions/stand_alone.pl?ele=&ascii=html&isotype=some standard atomic weight is based on the natural istopic composition NOTE: deuterium is 'D' 2H1 and '1H' is 1H1, all other elements have their natural abundance weight. These elements are located at the end of the dictionary. Parameters ---------- kwargs optional future arguments Returns ------- float mass of object in Daltons Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_mass() 47896.61864599498 ''' standard_atomic_weight = self.amu() self._total_mass = 0.0 self._mass = numpy.zeros(len(self._element), numpy.float) count = 0 for element in self._element: if element in standard_atomic_weight: self._total_mass = self._total_mass + \ standard_atomic_weight[element] self._mass[count] = standard_atomic_weight[element] count += 1 else: message = 'element ' + element + ' not found' # log.error('ERROR: ' + message) # need to return an error that element was not found return self._total_mass def calculate_center_of_mass(self, frame, **kwargs): ''' This method calculates the center of mass of the object. Parameters ---------- frame integer : trajectory frame number to use kwargs optional future arguments Returns ------- numpy array coordinates of center of mass Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_center_of_mass(0) array([ -6.79114736, -23.71577133, 8.06558513]) ''' if(self._total_mass <= 0.0): self.calculate_mass() x = self._coor[frame, :, 0] y = self._coor[frame, :, 1] z = self._coor[frame, :, 2] comx = numpy.sum(self._mass * x) / self._total_mass comy = numpy.sum(self._mass * y) / self._total_mass comz = numpy.sum(self._mass * z) / self._total_mass self._com = numpy.array([comx, comy, comz], numpy.float) return self._com def calculate_radius_of_gyration(self, frame, **kwargs): ''' This method calculates the radius of gyration of the object Parameters ---------- frame integer : trajectory frame number to use kwargs optional future arguments Returns ------- float radius of gyration of object Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_radius_of_gyration(0) 64.043168998442368 ''' self._com = self.calculate_center_of_mass(frame) if(self._natoms > 0): rg2 = ((self._coor[frame, :, :] - self._com) * (self._coor[frame, :, :] - self._com)) self._rg = numpy.sqrt(numpy.sum(numpy.sum(rg2)) / self._natoms) return self._rg def calculate_root_mean_square_deviation(self, other, **kwargs): ''' This method calculates the radius root mean square deviation (rmsd) of one set of coordinates compared to another self contains the coordinates of set 1 other contains the coordinates of set 2 the number of coordinates in each set must be equal To use this over multiple frames you must call this function repeatedly. Parameters ---------- other system object with coordinates with equal number of frames kwargs optional future arguments Returns ------- float root mean square deviation between objects Examples ------- ''' # OPEN Add frame support here? try: dxyz = ((self._coor - other._coor) * (self._coor - other._coor)) self._rmsd = numpy.sqrt((numpy.sum(dxyz)) / self._natoms) except: if(self._natoms != other._natoms): print('number of atoms in (1) != (2)') print('rmsd not calculated: None returned') print('number of atoms in self is < 1') print('number of atoms in other is < 1') self._rmsd = None return self._rmsd def calculate_principal_moments_of_inertia(self, frame, **kwargs): ''' This method calculates the principal moments of inertia of the object. It uses the center method from operate.Move to center the molecule. The present method is designated for the non-linear system with non-singular moment of inertia matrix only. For the linear systems, it will return eigenvectors and I as None. Testing for non-None return values should be done in the calling method. Parameters ---------- frame integer : trajectory frame number to use kwargs optional future arguments Returns ------- tuple of numpy arrays principal moments of inertia of object : eigenvalues, eigenvectors, and I Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_principal_moments_of_inetia(0) (array([ 1.30834716e+07, 1.91993314e+08, 1.85015201e+08]), array([[-0.08711655, -0.97104917, 0.22242802], [-0.512547 , 0.23514759, 0.82583363], [ 0.85422847, 0.04206103, 0.51819358]]), array([[ 1.90290278e+08, -9.27036144e+06, 1.25097100e+07], [ -9.27036144e+06, 1.40233826e+08, 7.53462715e+07], [ 1.25097100e+07, 7.53462715e+07, 5.95678834e+07]])) ''' com = self.calculate_center_of_mass(frame) operate.Move.center(self, frame) n_atoms = self._natoms m = self._mass.reshape(n_atoms, -1) m_coor = m * self._coor[frame] m_coor2 = numpy.dot(self._coor[frame].T, m_coor) numpy.fill_diagonal(m_coor2, m_coor2.diagonal() - m_coor2.trace()) I = -m_coor2 if numpy.linalg.matrix_rank(I) < 3: print("You are requesting the pmi calculation for a singular system.") print("The eigen-decomposition of this system is not defined") uk = None ak = None I = None else: uk, ak = numpy.linalg.eig(I) order = uk.argsort() uk = uk[order] ak = ak[:, order] operate.Move.translate(self, frame, com, point=True) return uk, ak, I def calculate_minimum_and_maximum(self, **kwargs): ''' This method calculates the minimum and maximum values of of the object in (x,y,z) The default usage is to evaluate all frames A numpy array of minimum and maximum values for each dimension are returned Parameters ---------- kwargs frames = [] : integer list of frames to process Returns ------- numpy array nested list of minimum and maximum values [ [ min_x, min_y, min_z ], [max_x, max_y, max_z] ] Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_minimum_and_maximum() [array([-31.29899979, -93.23899841, -85.81900024]), array([ 19.64699936, 30.37800026, 99.52999878])] >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.read_dcd('hiv1_gag_200_frames') >>> molecule.calculate_minimum_and_maximum() [array([ -94.47146606, -121.88082886, -99.94940948]), array([ 52.85133362, 65.53725433, 100.76850891])] >>> molecule.calculate_minimum_and_maximum(frames=[0,1,2,3]) [array([-30.9330883 , -92.68256378, -84.51082611]), array([ 20.98281288, 38.45230484, 99.91564178])] ''' try: frames = kwargs['frames'] except: frames = [x for x in xrange(self.number_of_frames())] first_flag = True for frame in frames: this_min_x=numpy.min(self._coor[frame,:,0]) this_max_x=numpy.max(self._coor[frame,:,0]) this_min_y=numpy.min(self._coor[frame,:,1]) this_max_y=numpy.max(self._coor[frame,:,1]) this_min_z=numpy.min(self._coor[frame,:,2]) this_max_z=numpy.max(self._coor[frame,:,2]) if(first_flag or (this_min_x < min_x)): min_x = this_min_x if(first_flag or (this_min_y < min_y)): min_y = this_min_y if(first_flag or (this_min_z < min_z)): min_z = this_min_z if(first_flag or (this_max_x > max_x)): max_x = this_max_x if(first_flag or (this_max_y > max_y)): max_y = this_max_y if(first_flag or (this_max_z > max_z)): max_z = this_max_z first_flag = False self._minimum = numpy.array([min_x, min_y, min_z]) self._maximum = numpy.array([max_x, max_y, max_z]) return [self._minimum, self._maximum] def calculate_residue_charge(self, **kwargs): ''' Method to sum the atomic charges and assign the net charge of the resiude to a new variable that is attached to each atom. Note ---------- Requires that the atom_charge() attribute of object is complete Parameters ---------- kwargs optional future arguments Returns ------- float charge per residue assigned to object Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_residue_charge() >>> single_molecule.calculate_residue_charge() >>> residue_charge = single_molecule.residue_charge() >>> print('res-charge = ',residue_charge[0]) ''' resid = self.resid() natoms = self.natoms() ### # needs a gentle failure if atom_charge() does not exist ### atom_charge = self.atom_charge() charge_sum = 0.0 charge_residue_sum = [] last_resid = resid[0] for i in xrange(natoms): this_resid = resid[i] this_charge = atom_charge[i] if(this_resid != last_resid or i == natoms - 1): charge_residue_sum.append([last_resid, charge_sum]) charge_sum = this_charge last_resid = this_resid else: charge_sum += this_charge last_resid = resid[0] charge_residue = [] for i in xrange(natoms): this_resid = resid[i] for j in xrange(len(charge_residue_sum)): if(this_resid == charge_residue_sum[j][0]): charge_residue.append(charge_residue_sum[j][1]) continue self.setResidue_charge(numpy.array(charge_residue, numpy.float32)) return def calculate_molecular_formula(self, **kwargs): ''' Method to determine the number of each element in the molecule Parameters ---------- kwargs optional future arguments Returns ------- dictionary {element : integer number, ... } Examples ------- >>> import sasmol.system as system >>> molecule = system.Molecule('hiv1_gag.pdb') >>> molecule.calculate_molecular_formula() {'H': 3378, 'C': 2080, 'S': 24, 'O': 632, 'N': 616} ''' my_formula = {} for element in self._element: if element in my_formula: my_formula[element] += 1 else: my_formula[element] = 1 self.setFormula(my_formula) return my_formula
gpl-3.0
laperry1/android_external_chromium_org
chrome/test/chromedriver/server/server.py
121
2131
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import atexit import os import socket import subprocess import time import urllib2 class Server(object): """A running ChromeDriver server.""" def __init__(self, exe_path, log_path=None): """Starts the ChromeDriver server and waits for it to be ready. Args: exe_path: path to the ChromeDriver executable log_path: path to the log file Raises: RuntimeError if ChromeDriver fails to start """ if not os.path.exists(exe_path): raise RuntimeError('ChromeDriver exe not found at: ' + exe_path) port = self._FindOpenPort() chromedriver_args = [exe_path, '--port=%d' % port] if log_path: chromedriver_args.extend(['--verbose', '--log-path=%s' % log_path]) self._process = subprocess.Popen(chromedriver_args) self._url = 'http://127.0.0.1:%d' % port if self._process is None: raise RuntimeError('ChromeDriver server cannot be started') max_time = time.time() + 10 while not self.IsRunning(): if time.time() > max_time: self._process.terminate() raise RuntimeError('ChromeDriver server did not start') time.sleep(0.1) atexit.register(self.Kill) def _FindOpenPort(self): for port in range(9500, 10000): try: socket.create_connection(('127.0.0.1', port), 0.2).close() except socket.error: return port raise RuntimeError('Cannot find open port to launch ChromeDriver') def GetUrl(self): return self._url def IsRunning(self): """Returns whether the server is up and running.""" try: urllib2.urlopen(self.GetUrl() + '/status') return True except urllib2.URLError: return False def Kill(self): """Kills the ChromeDriver server, if it is running.""" if self._process is None: return try: urllib2.urlopen(self.GetUrl() + '/shutdown', timeout=10).close() except: self._process.terminate() self._process.wait() self._process = None
bsd-3-clause
40223246/0622W17test2
static/Brython3.1.3-20150514-095342/Lib/imp.py
637
9839
"""This module provides the components needed to build your own __import__ function. Undocumented functions are obsolete. In most cases it is preferred you consider using the importlib module's functionality over this module. """ # (Probably) need to stay in _imp from _imp import (lock_held, acquire_lock, release_lock, get_frozen_object, is_frozen_package, init_builtin, init_frozen, is_builtin, is_frozen, _fix_co_filename) try: from _imp import load_dynamic except ImportError: # Platform doesn't support dynamic loading. load_dynamic = None # Directly exposed by this module from importlib._bootstrap import new_module from importlib._bootstrap import cache_from_source, source_from_cache from importlib import _bootstrap #fixme brython #from importlib import machinery import importlib.machinery as machinery import os import sys import tokenize import warnings # DEPRECATED SEARCH_ERROR = 0 PY_SOURCE = 1 PY_COMPILED = 2 C_EXTENSION = 3 PY_RESOURCE = 4 PKG_DIRECTORY = 5 C_BUILTIN = 6 PY_FROZEN = 7 PY_CODERESOURCE = 8 IMP_HOOK = 9 def get_magic(): """Return the magic number for .pyc or .pyo files.""" return _bootstrap._MAGIC_BYTES def get_tag(): """Return the magic tag for .pyc or .pyo files.""" return sys.implementation.cache_tag def get_suffixes(): warnings.warn('imp.get_suffixes() is deprecated; use the constants ' 'defined on importlib.machinery instead', DeprecationWarning, 2) extensions = [(s, 'rb', C_EXTENSION) for s in machinery.EXTENSION_SUFFIXES] source = [(s, 'U', PY_SOURCE) for s in machinery.SOURCE_SUFFIXES] bytecode = [(s, 'rb', PY_COMPILED) for s in machinery.BYTECODE_SUFFIXES] return extensions + source + bytecode class NullImporter: """Null import object.""" def __init__(self, path): if path == '': raise ImportError('empty pathname', path='') elif os.path.isdir(path): raise ImportError('existing directory', path=path) def find_module(self, fullname): """Always returns None.""" return None class _HackedGetData: """Compatibiilty support for 'file' arguments of various load_*() functions.""" def __init__(self, fullname, path, file=None): super().__init__(fullname, path) self.file = file def get_data(self, path): """Gross hack to contort loader to deal w/ load_*()'s bad API.""" if self.file and path == self.path: if not self.file.closed: file = self.file else: self.file = file = open(self.path, 'r') with file: # Technically should be returning bytes, but # SourceLoader.get_code() just passed what is returned to # compile() which can handle str. And converting to bytes would # require figuring out the encoding to decode to and # tokenize.detect_encoding() only accepts bytes. return file.read() else: return super().get_data(path) class _LoadSourceCompatibility(_HackedGetData, _bootstrap.SourceFileLoader): """Compatibility support for implementing load_source().""" #brython fix me pass def load_source(name, pathname, file=None): msg = ('imp.load_source() is deprecated; use ' 'importlib.machinery.SourceFileLoader(name, pathname).load_module()' ' instead') warnings.warn(msg, DeprecationWarning, 2) _LoadSourceCompatibility(name, pathname, file).load_module(name) module = sys.modules[name] # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = _bootstrap.SourceFileLoader(name, pathname) return module class _LoadCompiledCompatibility(_HackedGetData, _bootstrap.SourcelessFileLoader): """Compatibility support for implementing load_compiled().""" #brython fix me pass def load_compiled(name, pathname, file=None): msg = ('imp.load_compiled() is deprecated; use ' 'importlib.machinery.SourcelessFileLoader(name, pathname).' 'load_module() instead ') warnings.warn(msg, DeprecationWarning, 2) _LoadCompiledCompatibility(name, pathname, file).load_module(name) module = sys.modules[name] # To allow reloading to potentially work, use a non-hacked loader which # won't rely on a now-closed file object. module.__loader__ = _bootstrap.SourcelessFileLoader(name, pathname) return module def load_package(name, path): msg = ('imp.load_package() is deprecated; use either ' 'importlib.machinery.SourceFileLoader() or ' 'importlib.machinery.SourcelessFileLoader() instead') warnings.warn(msg, DeprecationWarning, 2) if os.path.isdir(path): extensions = (machinery.SOURCE_SUFFIXES[:] + machinery.BYTECODE_SUFFIXES[:]) for extension in extensions: path = os.path.join(path, '__init__'+extension) if os.path.exists(path): break else: raise ValueError('{!r} is not a package'.format(path)) return _bootstrap.SourceFileLoader(name, path).load_module(name) def load_module(name, file, filename, details): """**DEPRECATED** Load a module, given information returned by find_module(). The module name must include the full package name, if any. """ suffix, mode, type_ = details with warnings.catch_warnings(): warnings.simplefilter('ignore') if mode and (not mode.startswith(('r', 'U')) or '+' in mode): raise ValueError('invalid file open mode {!r}'.format(mode)) elif file is None and type_ in {PY_SOURCE, PY_COMPILED}: msg = 'file object required for import (type code {})'.format(type_) raise ValueError(msg) elif type_ == PY_SOURCE: return load_source(name, filename, file) elif type_ == PY_COMPILED: return load_compiled(name, filename, file) elif type_ == C_EXTENSION and load_dynamic is not None: if file is None: with open(filename, 'rb') as opened_file: return load_dynamic(name, filename, opened_file) else: return load_dynamic(name, filename, file) elif type_ == PKG_DIRECTORY: return load_package(name, filename) elif type_ == C_BUILTIN: return init_builtin(name) elif type_ == PY_FROZEN: return init_frozen(name) else: msg = "Don't know how to import {} (type code {})".format(name, type_) raise ImportError(msg, name=name) def find_module(name, path=None): """**DEPRECATED** Search for a module. If path is omitted or None, search for a built-in, frozen or special module and continue search in sys.path. The module name cannot contain '.'; to search for a submodule of a package, pass the submodule name and the package's __path__. """ if not isinstance(name, str): raise TypeError("'name' must be a str, not {}".format(type(name))) elif not isinstance(path, (type(None), list)): # Backwards-compatibility raise RuntimeError("'list' must be None or a list, " "not {}".format(type(name))) if path is None: if is_builtin(name): return None, None, ('', '', C_BUILTIN) elif is_frozen(name): return None, None, ('', '', PY_FROZEN) else: path = sys.path for entry in path: package_directory = os.path.join(entry, name) for suffix in ['.py', machinery.BYTECODE_SUFFIXES[0]]: package_file_name = '__init__' + suffix file_path = os.path.join(package_directory, package_file_name) if os.path.isfile(file_path): return None, package_directory, ('', '', PKG_DIRECTORY) with warnings.catch_warnings(): warnings.simplefilter('ignore') for suffix, mode, type_ in get_suffixes(): file_name = name + suffix file_path = os.path.join(entry, file_name) if os.path.isfile(file_path): break else: continue break # Break out of outer loop when breaking out of inner loop. else: raise ImportError(_bootstrap._ERR_MSG.format(name), name=name) encoding = None if mode == 'U': with open(file_path, 'rb') as file: encoding = tokenize.detect_encoding(file.readline)[0] file = open(file_path, mode, encoding=encoding) return file, file_path, (suffix, mode, type_) _RELOADING = {} def reload(module): """Reload the module and return it. The module must have been successfully imported before. """ if not module or type(module) != type(sys): raise TypeError("reload() argument must be module") name = module.__name__ if name not in sys.modules: msg = "module {} not in sys.modules" raise ImportError(msg.format(name), name=name) if name in _RELOADING: return _RELOADING[name] _RELOADING[name] = module try: parent_name = name.rpartition('.')[0] if parent_name and parent_name not in sys.modules: msg = "parent {!r} not in sys.modules" raise ImportError(msg.format(parent_name), name=parent_name) module.__loader__.load_module(name) # The module may have replaced itself in sys.modules! return sys.modules[module.__name__] finally: try: del _RELOADING[name] except KeyError: pass
gpl-3.0
sarantapichos/faircoop-market
addons/stock/stock.py
15
269641
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from datetime import date, datetime from dateutil import relativedelta import json import time from openerp.osv import fields, osv from openerp.tools.float_utils import float_compare, float_round from openerp.tools.translate import _ from openerp.tools import DEFAULT_SERVER_DATETIME_FORMAT, DEFAULT_SERVER_DATE_FORMAT from openerp.exceptions import Warning from openerp import SUPERUSER_ID, api import openerp.addons.decimal_precision as dp from openerp.addons.procurement import procurement import logging _logger = logging.getLogger(__name__) #---------------------------------------------------------- # Incoterms #---------------------------------------------------------- class stock_incoterms(osv.osv): _name = "stock.incoterms" _description = "Incoterms" _columns = { 'name': fields.char('Name', required=True, help="Incoterms are series of sales terms. They are used to divide transaction costs and responsibilities between buyer and seller and reflect state-of-the-art transportation practices."), 'code': fields.char('Code', size=3, required=True, help="Incoterm Standard Code"), 'active': fields.boolean('Active', help="By unchecking the active field, you may hide an INCOTERM you will not use."), } _defaults = { 'active': True, } #---------------------------------------------------------- # Stock Location #---------------------------------------------------------- class stock_location(osv.osv): _name = "stock.location" _description = "Inventory Locations" _parent_name = "location_id" _parent_store = True _parent_order = 'name' _order = 'parent_left' _rec_name = 'complete_name' def _location_owner(self, cr, uid, location, context=None): ''' Return the company owning the location if any ''' return location and (location.usage == 'internal') and location.company_id or False def _complete_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = m.name parent = m.location_id while parent: res[m.id] = parent.name + ' / ' + res[m.id] parent = parent.location_id return res def _get_sublocations(self, cr, uid, ids, context=None): """ return all sublocations of the given stock locations (included) """ if context is None: context = {} context_with_inactive = context.copy() context_with_inactive['active_test'] = False return self.search(cr, uid, [('id', 'child_of', ids)], context=context_with_inactive) def _name_get(self, cr, uid, location, context=None): name = location.name while location.location_id and location.usage != 'view': location = location.location_id name = location.name + '/' + name return name def name_get(self, cr, uid, ids, context=None): res = [] for location in self.browse(cr, uid, ids, context=context): res.append((location.id, self._name_get(cr, uid, location, context=context))) return res _columns = { 'name': fields.char('Location Name', required=True, translate=True), 'active': fields.boolean('Active', help="By unchecking the active field, you may hide a location without deleting it."), 'usage': fields.selection([ ('supplier', 'Supplier Location'), ('view', 'View'), ('internal', 'Internal Location'), ('customer', 'Customer Location'), ('inventory', 'Inventory'), ('procurement', 'Procurement'), ('production', 'Production'), ('transit', 'Transit Location')], 'Location Type', required=True, help="""* Supplier Location: Virtual location representing the source location for products coming from your suppliers \n* View: Virtual location used to create a hierarchical structures for your warehouse, aggregating its child locations ; can't directly contain products \n* Internal Location: Physical locations inside your own warehouses, \n* Customer Location: Virtual location representing the destination location for products sent to your customers \n* Inventory: Virtual location serving as counterpart for inventory operations used to correct stock levels (Physical inventories) \n* Procurement: Virtual location serving as temporary counterpart for procurement operations when the source (supplier or production) is not known yet. This location should be empty when the procurement scheduler has finished running. \n* Production: Virtual counterpart location for production operations: this location consumes the raw material and produces finished products \n* Transit Location: Counterpart location that should be used in inter-companies or inter-warehouses operations """, select=True), 'complete_name': fields.function(_complete_name, type='char', string="Location Name", store={'stock.location': (_get_sublocations, ['name', 'location_id', 'active'], 10)}), 'location_id': fields.many2one('stock.location', 'Parent Location', select=True, ondelete='cascade'), 'child_ids': fields.one2many('stock.location', 'location_id', 'Contains'), 'partner_id': fields.many2one('res.partner', 'Owner', help="Owner of the location if not internal"), 'comment': fields.text('Additional Information'), 'posx': fields.integer('Corridor (X)', help="Optional localization details, for information purpose only"), 'posy': fields.integer('Shelves (Y)', help="Optional localization details, for information purpose only"), 'posz': fields.integer('Height (Z)', help="Optional localization details, for information purpose only"), 'parent_left': fields.integer('Left Parent', select=1), 'parent_right': fields.integer('Right Parent', select=1), 'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this location is shared between companies'), 'scrap_location': fields.boolean('Is a Scrap Location?', help='Check this box to allow using this location to put scrapped/damaged goods.'), 'removal_strategy_id': fields.many2one('product.removal', 'Removal Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to take the products from, which lot etc. for this location. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."), 'putaway_strategy_id': fields.many2one('product.putaway', 'Put Away Strategy', help="Defines the default method used for suggesting the exact location (shelf) where to store the products. This method can be enforced at the product category level, and a fallback is made on the parent locations if none is set here."), 'loc_barcode': fields.char('Location Barcode'), } _defaults = { 'active': True, 'usage': 'internal', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location', context=c), 'posx': 0, 'posy': 0, 'posz': 0, 'scrap_location': False, } _sql_constraints = [('loc_barcode_company_uniq', 'unique (loc_barcode,company_id)', 'The barcode for a location must be unique per company !')] def create(self, cr, uid, default, context=None): if not default.get('loc_barcode', False): default.update({'loc_barcode': default.get('complete_name', False)}) return super(stock_location, self).create(cr, uid, default, context=context) def get_putaway_strategy(self, cr, uid, location, product, context=None): ''' Returns the location where the product has to be put, if any compliant putaway strategy is found. Otherwise returns None.''' putaway_obj = self.pool.get('product.putaway') loc = location while loc: if loc.putaway_strategy_id: res = putaway_obj.putaway_apply(cr, uid, loc.putaway_strategy_id, product, context=context) if res: return res loc = loc.location_id def _default_removal_strategy(self, cr, uid, context=None): return 'fifo' def get_removal_strategy(self, cr, uid, location, product, context=None): ''' Returns the removal strategy to consider for the given product and location. :param location: browse record (stock.location) :param product: browse record (product.product) :rtype: char ''' if product.categ_id.removal_strategy_id: return product.categ_id.removal_strategy_id.method loc = location while loc: if loc.removal_strategy_id: return loc.removal_strategy_id.method loc = loc.location_id return self._default_removal_strategy(cr, uid, context=context) def get_warehouse(self, cr, uid, location, context=None): """ Returns warehouse id of warehouse that contains location :param location: browse record (stock.location) """ wh_obj = self.pool.get("stock.warehouse") whs = wh_obj.search(cr, uid, [('view_location_id.parent_left', '<=', location.parent_left), ('view_location_id.parent_right', '>=', location.parent_left)], context=context) return whs and whs[0] or False #---------------------------------------------------------- # Routes #---------------------------------------------------------- class stock_location_route(osv.osv): _name = 'stock.location.route' _description = "Inventory Routes" _order = 'sequence' _columns = { 'name': fields.char('Route Name', required=True, translate=True), 'sequence': fields.integer('Sequence'), 'pull_ids': fields.one2many('procurement.rule', 'route_id', 'Pull Rules', copy=True), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the route without removing it."), 'push_ids': fields.one2many('stock.location.path', 'route_id', 'Push Rules', copy=True), 'product_selectable': fields.boolean('Applicable on Product'), 'product_categ_selectable': fields.boolean('Applicable on Product Category'), 'warehouse_selectable': fields.boolean('Applicable on Warehouse'), 'supplied_wh_id': fields.many2one('stock.warehouse', 'Supplied Warehouse'), 'supplier_wh_id': fields.many2one('stock.warehouse', 'Supplier Warehouse'), 'company_id': fields.many2one('res.company', 'Company', select=1, help='Let this field empty if this route is shared between all companies'), } _defaults = { 'sequence': lambda self, cr, uid, ctx: 0, 'active': True, 'product_selectable': True, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.location.route', context=c), } def write(self, cr, uid, ids, vals, context=None): '''when a route is deactivated, deactivate also its pull and push rules''' if isinstance(ids, (int, long)): ids = [ids] res = super(stock_location_route, self).write(cr, uid, ids, vals, context=context) if 'active' in vals: push_ids = [] pull_ids = [] for route in self.browse(cr, uid, ids, context=context): if route.push_ids: push_ids += [r.id for r in route.push_ids if r.active != vals['active']] if route.pull_ids: pull_ids += [r.id for r in route.pull_ids if r.active != vals['active']] if push_ids: self.pool.get('stock.location.path').write(cr, uid, push_ids, {'active': vals['active']}, context=context) if pull_ids: self.pool.get('procurement.rule').write(cr, uid, pull_ids, {'active': vals['active']}, context=context) return res #---------------------------------------------------------- # Quants #---------------------------------------------------------- class stock_quant(osv.osv): """ Quants are the smallest unit of stock physical instances """ _name = "stock.quant" _description = "Quants" def _get_quant_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for q in self.browse(cr, uid, ids, context=context): res[q.id] = q.product_id.code or '' if q.lot_id: res[q.id] = q.lot_id.name res[q.id] += ': ' + str(q.qty) + q.product_id.uom_id.name return res def _calc_inventory_value(self, cr, uid, ids, name, attr, context=None): context = dict(context or {}) res = {} uid_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id for quant in self.browse(cr, uid, ids, context=context): context.pop('force_company', None) if quant.company_id.id != uid_company_id: #if the company of the quant is different than the current user company, force the company in the context #then re-do a browse to read the property fields for the good company. context['force_company'] = quant.company_id.id quant = self.browse(cr, uid, quant.id, context=context) res[quant.id] = self._get_inventory_value(cr, uid, quant, context=context) return res def _get_inventory_value(self, cr, uid, quant, context=None): return quant.product_id.standard_price * quant.qty _columns = { 'name': fields.function(_get_quant_name, type='char', string='Identifier'), 'product_id': fields.many2one('product.product', 'Product', required=True, ondelete="restrict", readonly=True, select=True), 'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="restrict", readonly=True, select=True, auto_join=True), 'qty': fields.float('Quantity', required=True, help="Quantity of products in this quant, in the default unit of measure of the product", readonly=True, select=True), 'package_id': fields.many2one('stock.quant.package', string='Package', help="The package containing this quant", readonly=True, select=True), 'packaging_type_id': fields.related('package_id', 'packaging_id', type='many2one', relation='product.packaging', string='Type of packaging', readonly=True, store=True), 'reservation_id': fields.many2one('stock.move', 'Reserved for Move', help="The move the quant is reserved for", readonly=True, select=True), 'lot_id': fields.many2one('stock.production.lot', 'Lot', readonly=True, select=True, ondelete="restrict"), 'cost': fields.float('Unit Cost'), 'owner_id': fields.many2one('res.partner', 'Owner', help="This is the owner of the quant", readonly=True, select=True), 'create_date': fields.datetime('Creation Date', readonly=True), 'in_date': fields.datetime('Incoming Date', readonly=True, select=True), 'history_ids': fields.many2many('stock.move', 'stock_quant_move_rel', 'quant_id', 'move_id', 'Moves', help='Moves that operate(d) on this quant', copy=False), 'company_id': fields.many2one('res.company', 'Company', help="The company to which the quants belong", required=True, readonly=True, select=True), 'inventory_value': fields.function(_calc_inventory_value, string="Inventory Value", type='float', readonly=True), # Used for negative quants to reconcile after compensated by a new positive one 'propagated_from_id': fields.many2one('stock.quant', 'Linked Quant', help='The negative quant this is coming from', readonly=True, select=True), 'negative_move_id': fields.many2one('stock.move', 'Move Negative Quant', help='If this is a negative quant, this will be the move that caused this negative quant.', readonly=True), 'negative_dest_location_id': fields.related('negative_move_id', 'location_dest_id', type='many2one', relation='stock.location', string="Negative Destination Location", readonly=True, help="Technical field used to record the destination location of a move that created a negative quant"), } _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.quant', context=c), } def init(self, cr): cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_quant_product_location_index',)) if not cr.fetchone(): cr.execute('CREATE INDEX stock_quant_product_location_index ON stock_quant (product_id, location_id, company_id, qty, in_date, reservation_id)') def read_group(self, cr, uid, domain, fields, groupby, offset=0, limit=None, context=None, orderby=False, lazy=True): ''' Overwrite the read_group in order to sum the function field 'inventory_value' in group by''' res = super(stock_quant, self).read_group(cr, uid, domain, fields, groupby, offset=offset, limit=limit, context=context, orderby=orderby, lazy=lazy) if 'inventory_value' in fields: for line in res: if '__domain' in line: lines = self.search(cr, uid, line['__domain'], context=context) inv_value = 0.0 for line2 in self.browse(cr, uid, lines, context=context): inv_value += line2.inventory_value line['inventory_value'] = inv_value return res def action_view_quant_history(self, cr, uid, ids, context=None): ''' This function returns an action that display the history of the quant, which mean all the stock moves that lead to this quant creation with this quant quantity. ''' mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') result = mod_obj.get_object_reference(cr, uid, 'stock', 'action_move_form2') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context={})[0] move_ids = [] for quant in self.browse(cr, uid, ids, context=context): move_ids += [move.id for move in quant.history_ids] result['domain'] = "[('id','in',[" + ','.join(map(str, move_ids)) + "])]" return result def quants_reserve(self, cr, uid, quants, move, link=False, context=None): '''This function reserves quants for the given move (and optionally given link). If the total of quantity reserved is enough, the move's state is also set to 'assigned' :param quants: list of tuple(quant browse record or None, qty to reserve). If None is given as first tuple element, the item will be ignored. Negative quants should not be received as argument :param move: browse record :param link: browse record (stock.move.operation.link) ''' toreserve = [] reserved_availability = move.reserved_availability #split quants if needed for quant, qty in quants: if qty <= 0.0 or (quant and quant.qty <= 0.0): raise osv.except_osv(_('Error!'), _('You can not reserve a negative quantity or a negative quant.')) if not quant: continue self._quant_split(cr, uid, quant, qty, context=context) toreserve.append(quant.id) reserved_availability += quant.qty #reserve quants if toreserve: self.write(cr, SUPERUSER_ID, toreserve, {'reservation_id': move.id}, context=context) #if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed if move.picking_id: self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context) #check if move'state needs to be set as 'assigned' rounding = move.product_id.uom_id.rounding if float_compare(reserved_availability, move.product_qty, precision_rounding=rounding) == 0 and move.state in ('confirmed', 'waiting') : self.pool.get('stock.move').write(cr, uid, [move.id], {'state': 'assigned'}, context=context) elif float_compare(reserved_availability, 0, precision_rounding=rounding) > 0 and not move.partially_available: self.pool.get('stock.move').write(cr, uid, [move.id], {'partially_available': True}, context=context) def quants_move(self, cr, uid, quants, move, location_to, location_from=False, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, context=None): """Moves all given stock.quant in the given destination location. Unreserve from current move. :param quants: list of tuple(browse record(stock.quant) or None, quantity to move) :param move: browse record (stock.move) :param location_to: browse record (stock.location) depicting where the quants have to be moved :param location_from: optional browse record (stock.location) explaining where the quant has to be taken (may differ from the move source location in case a removal strategy applied). This parameter is only used to pass to _quant_create if a negative quant must be created :param lot_id: ID of the lot that must be set on the quants to move :param owner_id: ID of the partner that must own the quants to move :param src_package_id: ID of the package that contains the quants to move :param dest_package_id: ID of the package that must be set on the moved quant """ quants_reconcile = [] to_move_quants = [] self._check_location(cr, uid, location_to, context=context) for quant, qty in quants: if not quant: #If quant is None, we will create a quant to move (and potentially a negative counterpart too) quant = self._quant_create(cr, uid, qty, move, lot_id=lot_id, owner_id=owner_id, src_package_id=src_package_id, dest_package_id=dest_package_id, force_location_from=location_from, force_location_to=location_to, context=context) else: self._quant_split(cr, uid, quant, qty, context=context) to_move_quants.append(quant) quants_reconcile.append(quant) if to_move_quants: to_recompute_move_ids = [x.reservation_id.id for x in to_move_quants if x.reservation_id and x.reservation_id.id != move.id] self.move_quants_write(cr, uid, to_move_quants, move, location_to, dest_package_id, context=context) self.pool.get('stock.move').recalculate_move_state(cr, uid, to_recompute_move_ids, context=context) if location_to.usage == 'internal': # Do manual search for quant to avoid full table scan (order by id) cr.execute(""" SELECT 0 FROM stock_quant, stock_location WHERE product_id = %s AND stock_location.id = stock_quant.location_id AND ((stock_location.parent_left >= %s AND stock_location.parent_left < %s) OR stock_location.id = %s) AND qty < 0.0 LIMIT 1 """, (move.product_id.id, location_to.parent_left, location_to.parent_right, location_to.id)) if cr.fetchone(): for quant in quants_reconcile: self._quant_reconcile_negative(cr, uid, quant, move, context=context) def move_quants_write(self, cr, uid, quants, move, location_dest_id, dest_package_id, context=None): context=context or {} vals = {'location_id': location_dest_id.id, 'history_ids': [(4, move.id)], 'reservation_id': False} if not context.get('entire_pack'): vals.update({'package_id': dest_package_id}) self.write(cr, SUPERUSER_ID, [q.id for q in quants], vals, context=context) def quants_get_prefered_domain(self, cr, uid, location, product, qty, domain=None, prefered_domain_list=[], restrict_lot_id=False, restrict_partner_id=False, context=None): ''' This function tries to find quants in the given location for the given domain, by trying to first limit the choice on the quants that match the first item of prefered_domain_list as well. But if the qty requested is not reached it tries to find the remaining quantity by looping on the prefered_domain_list (tries with the second item and so on). Make sure the quants aren't found twice => all the domains of prefered_domain_list should be orthogonal ''' if domain is None: domain = [] quants = [(None, qty)] #don't look for quants in location that are of type production, supplier or inventory. if location.usage in ['inventory', 'production', 'supplier']: return quants res_qty = qty if not prefered_domain_list: return self.quants_get(cr, uid, location, product, qty, domain=domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context) for prefered_domain in prefered_domain_list: res_qty_cmp = float_compare(res_qty, 0, precision_rounding=product.uom_id.rounding) if res_qty_cmp > 0: #try to replace the last tuple (None, res_qty) with something that wasn't chosen at first because of the prefered order quants.pop() tmp_quants = self.quants_get(cr, uid, location, product, res_qty, domain=domain + prefered_domain, restrict_lot_id=restrict_lot_id, restrict_partner_id=restrict_partner_id, context=context) for quant in tmp_quants: if quant[0]: res_qty -= quant[1] quants += tmp_quants return quants def quants_get(self, cr, uid, location, product, qty, domain=None, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Use the removal strategies of product to search for the correct quants If you inherit, put the super at the end of your method. :location: browse record of the parent location where the quants have to be found :product: browse record of the product to find :qty in UoM of product """ result = [] domain = domain or [('qty', '>', 0.0)] if restrict_partner_id: domain += [('owner_id', '=', restrict_partner_id)] if restrict_lot_id: domain += [('lot_id', '=', restrict_lot_id)] if location: removal_strategy = self.pool.get('stock.location').get_removal_strategy(cr, uid, location, product, context=context) result += self.apply_removal_strategy(cr, uid, location, product, qty, domain, removal_strategy, context=context) return result def apply_removal_strategy(self, cr, uid, location, product, quantity, domain, removal_strategy, context=None): if removal_strategy == 'fifo': order = 'in_date, id' return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context) elif removal_strategy == 'lifo': order = 'in_date desc, id desc' return self._quants_get_order(cr, uid, location, product, quantity, domain, order, context=context) raise osv.except_osv(_('Error!'), _('Removal strategy %s not implemented.' % (removal_strategy,))) def _quant_create(self, cr, uid, qty, move, lot_id=False, owner_id=False, src_package_id=False, dest_package_id=False, force_location_from=False, force_location_to=False, context=None): '''Create a quant in the destination location and create a negative quant in the source location if it's an internal location. ''' if context is None: context = {} price_unit = self.pool.get('stock.move').get_price_unit(cr, uid, move, context=context) location = force_location_to or move.location_dest_id rounding = move.product_id.uom_id.rounding vals = { 'product_id': move.product_id.id, 'location_id': location.id, 'qty': float_round(qty, precision_rounding=rounding), 'cost': price_unit, 'history_ids': [(4, move.id)], 'in_date': datetime.now().strftime(DEFAULT_SERVER_DATETIME_FORMAT), 'company_id': move.company_id.id, 'lot_id': lot_id, 'owner_id': owner_id, 'package_id': dest_package_id, } if move.location_id.usage == 'internal': #if we were trying to move something from an internal location and reach here (quant creation), #it means that a negative quant has to be created as well. negative_vals = vals.copy() negative_vals['location_id'] = force_location_from and force_location_from.id or move.location_id.id negative_vals['qty'] = float_round(-qty, precision_rounding=rounding) negative_vals['cost'] = price_unit negative_vals['negative_move_id'] = move.id negative_vals['package_id'] = src_package_id negative_quant_id = self.create(cr, SUPERUSER_ID, negative_vals, context=context) vals.update({'propagated_from_id': negative_quant_id}) #create the quant as superuser, because we want to restrict the creation of quant manually: we should always use this method to create quants quant_id = self.create(cr, SUPERUSER_ID, vals, context=context) return self.browse(cr, uid, quant_id, context=context) def _quant_split(self, cr, uid, quant, qty, context=None): context = context or {} rounding = quant.product_id.uom_id.rounding if float_compare(abs(quant.qty), abs(qty), precision_rounding=rounding) <= 0: # if quant <= qty in abs, take it entirely return False qty_round = float_round(qty, precision_rounding=rounding) new_qty_round = float_round(quant.qty - qty, precision_rounding=rounding) # Fetch the history_ids manually as it will not do a join with the stock moves then (=> a lot faster) cr.execute("""SELECT move_id FROM stock_quant_move_rel WHERE quant_id = %s""", (quant.id,)) res = cr.fetchall() new_quant = self.copy(cr, SUPERUSER_ID, quant.id, default={'qty': new_qty_round, 'history_ids': [(4, x[0]) for x in res]}, context=context) self.write(cr, SUPERUSER_ID, quant.id, {'qty': qty_round}, context=context) return self.browse(cr, uid, new_quant, context=context) def _get_latest_move(self, cr, uid, quant, context=None): move = False for m in quant.history_ids: if not move or m.date > move.date: move = m return move @api.cr_uid_ids_context def _quants_merge(self, cr, uid, solved_quant_ids, solving_quant, context=None): path = [] for move in solving_quant.history_ids: path.append((4, move.id)) self.write(cr, SUPERUSER_ID, solved_quant_ids, {'history_ids': path}, context=context) def _quant_reconcile_negative(self, cr, uid, quant, move, context=None): """ When new quant arrive in a location, try to reconcile it with negative quants. If it's possible, apply the cost of the new quant to the conter-part of the negative quant. """ solving_quant = quant dom = [('qty', '<', 0)] if quant.lot_id: dom += [('lot_id', '=', quant.lot_id.id)] dom += [('owner_id', '=', quant.owner_id.id)] dom += [('package_id', '=', quant.package_id.id)] dom += [('id', '!=', quant.propagated_from_id.id)] quants = self.quants_get(cr, uid, quant.location_id, quant.product_id, quant.qty, dom, context=context) product_uom_rounding = quant.product_id.uom_id.rounding for quant_neg, qty in quants: if not quant_neg or not solving_quant: continue to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id)], context=context) if not to_solve_quant_ids: continue solving_qty = qty solved_quant_ids = [] for to_solve_quant in self.browse(cr, uid, to_solve_quant_ids, context=context): if float_compare(solving_qty, 0, precision_rounding=product_uom_rounding) <= 0: continue solved_quant_ids.append(to_solve_quant.id) self._quant_split(cr, uid, to_solve_quant, min(solving_qty, to_solve_quant.qty), context=context) solving_qty -= min(solving_qty, to_solve_quant.qty) remaining_solving_quant = self._quant_split(cr, uid, solving_quant, qty, context=context) remaining_neg_quant = self._quant_split(cr, uid, quant_neg, -qty, context=context) #if the reconciliation was not complete, we need to link together the remaining parts if remaining_neg_quant: remaining_to_solve_quant_ids = self.search(cr, uid, [('propagated_from_id', '=', quant_neg.id), ('id', 'not in', solved_quant_ids)], context=context) if remaining_to_solve_quant_ids: self.write(cr, SUPERUSER_ID, remaining_to_solve_quant_ids, {'propagated_from_id': remaining_neg_quant.id}, context=context) if solving_quant.propagated_from_id and solved_quant_ids: self.write(cr, SUPERUSER_ID, solved_quant_ids, {'propagated_from_id': solving_quant.propagated_from_id.id}, context=context) #delete the reconciled quants, as it is replaced by the solved quants self.unlink(cr, SUPERUSER_ID, [quant_neg.id], context=context) if solved_quant_ids: #price update + accounting entries adjustments self._price_update(cr, uid, solved_quant_ids, solving_quant.cost, context=context) #merge history (and cost?) self._quants_merge(cr, uid, solved_quant_ids, solving_quant, context=context) self.unlink(cr, SUPERUSER_ID, [solving_quant.id], context=context) solving_quant = remaining_solving_quant def _price_update(self, cr, uid, ids, newprice, context=None): self.write(cr, SUPERUSER_ID, ids, {'cost': newprice}, context=context) def quants_unreserve(self, cr, uid, move, context=None): related_quants = [x.id for x in move.reserved_quant_ids] if related_quants: #if move has a picking_id, write on that picking that pack_operation might have changed and need to be recomputed if move.picking_id: self.pool.get('stock.picking').write(cr, uid, [move.picking_id.id], {'recompute_pack_op': True}, context=context) if move.partially_available: self.pool.get("stock.move").write(cr, uid, [move.id], {'partially_available': False}, context=context) self.write(cr, SUPERUSER_ID, related_quants, {'reservation_id': False}, context=context) def _quants_get_order(self, cr, uid, location, product, quantity, domain=[], orderby='in_date', context=None): ''' Implementation of removal strategies If it can not reserve, it will return a tuple (None, qty) ''' if context is None: context = {} domain += location and [('location_id', 'child_of', location.id)] or [] domain += [('product_id', '=', product.id)] if context.get('force_company'): domain += [('company_id', '=', context.get('force_company'))] else: domain += [('company_id', '=', self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id)] res = [] offset = 0 while float_compare(quantity, 0, precision_rounding=product.uom_id.rounding) > 0: quants = self.search(cr, uid, domain, order=orderby, limit=10, offset=offset, context=context) if not quants: res.append((None, quantity)) break for quant in self.browse(cr, uid, quants, context=context): rounding = product.uom_id.rounding if float_compare(quantity, abs(quant.qty), precision_rounding=rounding) >= 0: res += [(quant, abs(quant.qty))] quantity -= abs(quant.qty) elif float_compare(quantity, 0.0, precision_rounding=rounding) != 0: res += [(quant, quantity)] quantity = 0 break offset += 10 return res def _check_location(self, cr, uid, location, context=None): if location.usage == 'view': raise osv.except_osv(_('Error'), _('You cannot move to a location of type view %s.') % (location.name)) return True #---------------------------------------------------------- # Stock Picking #---------------------------------------------------------- class stock_picking(osv.osv): _name = "stock.picking" _inherit = ['mail.thread'] _description = "Picking List" _order = "priority desc, date asc, id desc" def _set_min_date(self, cr, uid, id, field, value, arg, context=None): move_obj = self.pool.get("stock.move") if value: move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines] move_obj.write(cr, uid, move_ids, {'date_expected': value}, context=context) def _set_priority(self, cr, uid, id, field, value, arg, context=None): move_obj = self.pool.get("stock.move") if value: move_ids = [move.id for move in self.browse(cr, uid, id, context=context).move_lines] move_obj.write(cr, uid, move_ids, {'priority': value}, context=context) def get_min_max_date(self, cr, uid, ids, field_name, arg, context=None): """ Finds minimum and maximum dates for picking. @return: Dictionary of values """ res = {} for id in ids: res[id] = {'min_date': False, 'max_date': False, 'priority': '1'} if not ids: return res cr.execute("""select picking_id, min(date_expected), max(date_expected), max(priority) from stock_move where picking_id IN %s group by picking_id""", (tuple(ids),)) for pick, dt1, dt2, prio in cr.fetchall(): res[pick]['min_date'] = dt1 res[pick]['max_date'] = dt2 res[pick]['priority'] = prio return res def create(self, cr, user, vals, context=None): context = context or {} if ('name' not in vals) or (vals.get('name') in ('/', False)): ptype_id = vals.get('picking_type_id', context.get('default_picking_type_id', False)) sequence_id = self.pool.get('stock.picking.type').browse(cr, user, ptype_id, context=context).sequence_id.id vals['name'] = self.pool.get('ir.sequence').get_id(cr, user, sequence_id, 'id', context=context) return super(stock_picking, self).create(cr, user, vals, context) def _state_get(self, cr, uid, ids, field_name, arg, context=None): '''The state of a picking depends on the state of its related stock.move draft: the picking has no line or any one of the lines is draft done, draft, cancel: all lines are done / draft / cancel confirmed, waiting, assigned, partially_available depends on move_type (all at once or partial) ''' res = {} for pick in self.browse(cr, uid, ids, context=context): if (not pick.move_lines) or any([x.state == 'draft' for x in pick.move_lines]): res[pick.id] = 'draft' continue if all([x.state == 'cancel' for x in pick.move_lines]): res[pick.id] = 'cancel' continue if all([x.state in ('cancel', 'done') for x in pick.move_lines]): res[pick.id] = 'done' continue order = {'confirmed': 0, 'waiting': 1, 'assigned': 2} order_inv = {0: 'confirmed', 1: 'waiting', 2: 'assigned'} lst = [order[x.state] for x in pick.move_lines if x.state not in ('cancel', 'done')] if pick.move_type == 'one': res[pick.id] = order_inv[min(lst)] else: #we are in the case of partial delivery, so if all move are assigned, picking #should be assign too, else if one of the move is assigned, or partially available, picking should be #in partially available state, otherwise, picking is in waiting or confirmed state res[pick.id] = order_inv[max(lst)] if not all(x == 2 for x in lst): if any(x == 2 for x in lst): res[pick.id] = 'partially_available' else: #if all moves aren't assigned, check if we have one product partially available for move in pick.move_lines: if move.partially_available: res[pick.id] = 'partially_available' break return res def _get_pickings(self, cr, uid, ids, context=None): res = set() for move in self.browse(cr, uid, ids, context=context): if move.picking_id: res.add(move.picking_id.id) return list(res) def _get_pickings_dates_priority(self, cr, uid, ids, context=None): res = set() for move in self.browse(cr, uid, ids, context=context): if move.picking_id and (not (move.picking_id.min_date < move.date_expected < move.picking_id.max_date) or move.priority > move.picking_id.priority): res.add(move.picking_id.id) return list(res) def _get_pack_operation_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for pick in self.browse(cr, uid, ids, context=context): res[pick.id] = False if pick.pack_operation_ids: res[pick.id] = True return res def _get_quant_reserved_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for pick in self.browse(cr, uid, ids, context=context): res[pick.id] = False for move in pick.move_lines: if move.reserved_quant_ids: res[pick.id] = True continue return res def check_group_lot(self, cr, uid, context=None): """ This function will return true if we have the setting to use lots activated. """ return self.pool.get('res.users').has_group(cr, uid, 'stock.group_production_lot') def check_group_pack(self, cr, uid, context=None): """ This function will return true if we have the setting to use package activated. """ return self.pool.get('res.users').has_group(cr, uid, 'stock.group_tracking_lot') def action_assign_owner(self, cr, uid, ids, context=None): for picking in self.browse(cr, uid, ids, context=context): packop_ids = [op.id for op in picking.pack_operation_ids] self.pool.get('stock.pack.operation').write(cr, uid, packop_ids, {'owner_id': picking.owner_id.id}, context=context) _columns = { 'name': fields.char('Reference', select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), 'origin': fields.char('Source Document', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Reference of the document", select=True), 'backorder_id': fields.many2one('stock.picking', 'Back Order of', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="If this shipment was split, then this field links to the shipment which contains the already processed part.", select=True, copy=False), 'note': fields.text('Notes'), 'move_type': fields.selection([('direct', 'Partial'), ('one', 'All at once')], 'Delivery Method', required=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="It specifies goods to be deliver partially or all at once"), 'state': fields.function(_state_get, type="selection", copy=False, store={ 'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_type'], 20), 'stock.move': (_get_pickings, ['state', 'picking_id', 'partially_available'], 20)}, selection=[ ('draft', 'Draft'), ('cancel', 'Cancelled'), ('waiting', 'Waiting Another Operation'), ('confirmed', 'Waiting Availability'), ('partially_available', 'Partially Available'), ('assigned', 'Ready to Transfer'), ('done', 'Transferred'), ], string='Status', readonly=True, select=True, track_visibility='onchange', help=""" * Draft: not confirmed yet and will not be scheduled until confirmed\n * Waiting Another Operation: waiting for another move to proceed before it becomes automatically available (e.g. in Make-To-Order flows)\n * Waiting Availability: still waiting for the availability of products\n * Partially Available: some products are available and reserved\n * Ready to Transfer: products reserved, simply waiting for confirmation.\n * Transferred: has been processed, can't be modified or cancelled anymore\n * Cancelled: has been cancelled, can't be confirmed anymore""" ), 'priority': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_priority, type='selection', selection=procurement.PROCUREMENT_PRIORITIES, string='Priority', store={'stock.move': (_get_pickings_dates_priority, ['priority', 'picking_id'], 20)}, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, select=1, help="Priority for this picking. Setting manually a value here would set it as priority for all the moves", track_visibility='onchange', required=True), 'min_date': fields.function(get_min_max_date, multi="min_max_date", fnct_inv=_set_min_date, store={'stock.move': (_get_pickings_dates_priority, ['date_expected', 'picking_id'], 20)}, type='datetime', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Scheduled Date', select=1, help="Scheduled time for the first part of the shipment to be processed. Setting manually a value here would set it as expected date for all the stock moves.", track_visibility='onchange'), 'max_date': fields.function(get_min_max_date, multi="min_max_date", store={'stock.move': (_get_pickings_dates_priority, ['date_expected', 'picking_id'], 20)}, type='datetime', string='Max. Expected Date', select=2, help="Scheduled time for the last part of the shipment to be processed"), 'date': fields.datetime('Creation Date', help="Creation Date, usually the time of the order", select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, track_visibility='onchange'), 'date_done': fields.datetime('Date of Transfer', help="Date of Completion", states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=False), 'move_lines': fields.one2many('stock.move', 'picking_id', 'Internal Moves', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, copy=True), 'quant_reserved_exist': fields.function(_get_quant_reserved_exist, type='boolean', string='Quant already reserved ?', help='technical field used to know if there is already at least one quant reserved on moves of a given picking'), 'partner_id': fields.many2one('res.partner', 'Partner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}), 'pack_operation_ids': fields.one2many('stock.pack.operation', 'picking_id', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, string='Related Packing Operations'), 'pack_operation_exist': fields.function(_get_pack_operation_exist, type='boolean', string='Pack Operation Exists?', help='technical field for attrs in view'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, required=True), 'picking_type_code': fields.related('picking_type_id', 'code', type='char', string='Picking Type Code', help="Technical field used to display the correct label on print button in the picking view"), 'owner_id': fields.many2one('res.partner', 'Owner', states={'done': [('readonly', True)], 'cancel': [('readonly', True)]}, help="Default Owner"), # Used to search on pickings 'product_id': fields.related('move_lines', 'product_id', type='many2one', relation='product.product', string='Product'), 'recompute_pack_op': fields.boolean('Recompute pack operation?', help='True if reserved quants changed, which mean we might need to recompute the package operations', copy=False), 'location_id': fields.related('move_lines', 'location_id', type='many2one', relation='stock.location', string='Location', readonly=True), 'location_dest_id': fields.related('move_lines', 'location_dest_id', type='many2one', relation='stock.location', string='Destination Location', readonly=True), 'group_id': fields.related('move_lines', 'group_id', type='many2one', relation='procurement.group', string='Procurement Group', readonly=True, store={ 'stock.picking': (lambda self, cr, uid, ids, ctx: ids, ['move_lines'], 10), 'stock.move': (_get_pickings, ['group_id', 'picking_id'], 10), }), } _defaults = { 'name': '/', 'state': 'draft', 'move_type': 'direct', 'priority': '1', # normal 'date': fields.datetime.now, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.picking', context=c), 'recompute_pack_op': True, } _sql_constraints = [ ('name_uniq', 'unique(name, company_id)', 'Reference must be unique per company!'), ] def do_print_picking(self, cr, uid, ids, context=None): '''This function prints the picking list''' context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_picking', context=context) def action_confirm(self, cr, uid, ids, context=None): todo = [] todo_force_assign = [] for picking in self.browse(cr, uid, ids, context=context): if picking.location_id.usage in ('supplier', 'inventory', 'production'): todo_force_assign.append(picking.id) for r in picking.move_lines: if r.state == 'draft': todo.append(r.id) if len(todo): self.pool.get('stock.move').action_confirm(cr, uid, todo, context=context) if todo_force_assign: self.force_assign(cr, uid, todo_force_assign, context=context) return True def action_assign(self, cr, uid, ids, context=None): """ Check availability of picking moves. This has the effect of changing the state and reserve quants on available moves, and may also impact the state of the picking as it is computed based on move's states. @return: True """ for pick in self.browse(cr, uid, ids, context=context): if pick.state == 'draft': self.action_confirm(cr, uid, [pick.id], context=context) #skip the moves that don't need to be checked move_ids = [x.id for x in pick.move_lines if x.state not in ('draft', 'cancel', 'done')] if not move_ids: raise osv.except_osv(_('Warning!'), _('Nothing to check the availability for.')) self.pool.get('stock.move').action_assign(cr, uid, move_ids, context=context) return True def force_assign(self, cr, uid, ids, context=None): """ Changes state of picking to available if moves are confirmed or waiting. @return: True """ for pick in self.browse(cr, uid, ids, context=context): move_ids = [x.id for x in pick.move_lines if x.state in ['confirmed', 'waiting']] self.pool.get('stock.move').force_assign(cr, uid, move_ids, context=context) #pack_operation might have changed and need to be recomputed self.write(cr, uid, ids, {'recompute_pack_op': True}, context=context) return True def action_cancel(self, cr, uid, ids, context=None): for pick in self.browse(cr, uid, ids, context=context): ids2 = [move.id for move in pick.move_lines] self.pool.get('stock.move').action_cancel(cr, uid, ids2, context) return True def action_done(self, cr, uid, ids, context=None): """Changes picking state to done by processing the Stock Moves of the Picking Normally that happens when the button "Done" is pressed on a Picking view. @return: True """ for pick in self.browse(cr, uid, ids, context=context): todo = [] for move in pick.move_lines: if move.state == 'draft': todo.extend(self.pool.get('stock.move').action_confirm(cr, uid, [move.id], context=context)) elif move.state in ('assigned', 'confirmed'): todo.append(move.id) if len(todo): self.pool.get('stock.move').action_done(cr, uid, todo, context=context) return True def unlink(self, cr, uid, ids, context=None): #on picking deletion, cancel its move then unlink them too move_obj = self.pool.get('stock.move') context = context or {} for pick in self.browse(cr, uid, ids, context=context): move_ids = [move.id for move in pick.move_lines] move_obj.action_cancel(cr, uid, move_ids, context=context) move_obj.unlink(cr, uid, move_ids, context=context) return super(stock_picking, self).unlink(cr, uid, ids, context=context) def write(self, cr, uid, ids, vals, context=None): if vals.get('move_lines') and not vals.get('pack_operation_ids'): # pack operations are directly dependant of move lines, it needs to be recomputed pack_operation_obj = self.pool['stock.pack.operation'] existing_package_ids = pack_operation_obj.search(cr, uid, [('picking_id', 'in', ids)], context=context) if existing_package_ids: pack_operation_obj.unlink(cr, uid, existing_package_ids, context) res = super(stock_picking, self).write(cr, uid, ids, vals, context=context) #if we changed the move lines or the pack operations, we need to recompute the remaining quantities of both if 'move_lines' in vals or 'pack_operation_ids' in vals: self.do_recompute_remaining_quantities(cr, uid, ids, context=context) return res def _create_backorder(self, cr, uid, picking, backorder_moves=[], context=None): """ Move all non-done lines into a new backorder picking. If the key 'do_only_split' is given in the context, then move all lines not in context.get('split', []) instead of all non-done lines. """ if not backorder_moves: backorder_moves = picking.move_lines backorder_move_ids = [x.id for x in backorder_moves if x.state not in ('done', 'cancel')] if 'do_only_split' in context and context['do_only_split']: backorder_move_ids = [x.id for x in backorder_moves if x.id not in context.get('split', [])] if backorder_move_ids: backorder_id = self.copy(cr, uid, picking.id, { 'name': '/', 'move_lines': [], 'pack_operation_ids': [], 'backorder_id': picking.id, }) backorder = self.browse(cr, uid, backorder_id, context=context) self.message_post(cr, uid, picking.id, body=_("Back order <em>%s</em> <b>created</b>.") % (backorder.name), context=context) move_obj = self.pool.get("stock.move") move_obj.write(cr, uid, backorder_move_ids, {'picking_id': backorder_id}, context=context) self.write(cr, uid, [picking.id], {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) self.action_confirm(cr, uid, [backorder_id], context=context) return backorder_id return False @api.cr_uid_ids_context def recheck_availability(self, cr, uid, picking_ids, context=None): self.action_assign(cr, uid, picking_ids, context=context) self.do_prepare_partial(cr, uid, picking_ids, context=context) def _get_top_level_packages(self, cr, uid, quants_suggested_locations, context=None): """This method searches for the higher level packages that can be moved as a single operation, given a list of quants to move and their suggested destination, and returns the list of matching packages. """ # Try to find as much as possible top-level packages that can be moved pack_obj = self.pool.get("stock.quant.package") quant_obj = self.pool.get("stock.quant") top_lvl_packages = set() quants_to_compare = quants_suggested_locations.keys() for pack in list(set([x.package_id for x in quants_suggested_locations.keys() if x and x.package_id])): loop = True test_pack = pack good_pack = False pack_destination = False while loop: pack_quants = pack_obj.get_content(cr, uid, [test_pack.id], context=context) all_in = True for quant in quant_obj.browse(cr, uid, pack_quants, context=context): # If the quant is not in the quants to compare and not in the common location if not quant in quants_to_compare: all_in = False break else: #if putaway strat apply, the destination location of each quant may be different (and thus the package should not be taken as a single operation) if not pack_destination: pack_destination = quants_suggested_locations[quant] elif pack_destination != quants_suggested_locations[quant]: all_in = False break if all_in: good_pack = test_pack if test_pack.parent_id: test_pack = test_pack.parent_id else: #stop the loop when there's no parent package anymore loop = False else: #stop the loop when the package test_pack is not totally reserved for moves of this picking #(some quants may be reserved for other picking or not reserved at all) loop = False if good_pack: top_lvl_packages.add(good_pack) return list(top_lvl_packages) def _prepare_pack_ops(self, cr, uid, picking, quants, forced_qties, context=None): """ returns a list of dict, ready to be used in create() of stock.pack.operation. :param picking: browse record (stock.picking) :param quants: browse record list (stock.quant). List of quants associated to the picking :param forced_qties: dictionary showing for each product (keys) its corresponding quantity (value) that is not covered by the quants associated to the picking """ def _picking_putaway_apply(product): location = False # Search putaway strategy if product_putaway_strats.get(product.id): location = product_putaway_strats[product.id] else: location = self.pool.get('stock.location').get_putaway_strategy(cr, uid, picking.location_dest_id, product, context=context) product_putaway_strats[product.id] = location return location or picking.location_dest_id.id # If we encounter an UoM that is smaller than the default UoM or the one already chosen, use the new one instead. product_uom = {} # Determines UoM used in pack operations location_dest_id = None location_id = None for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if not product_uom.get(move.product_id.id): product_uom[move.product_id.id] = move.product_id.uom_id if move.product_uom.id != move.product_id.uom_id.id and move.product_uom.factor > product_uom[move.product_id.id].factor: product_uom[move.product_id.id] = move.product_uom if not move.scrapped: if location_dest_id and move.location_dest_id.id != location_dest_id: raise Warning(_('The destination location must be the same for all the moves of the picking.')) location_dest_id = move.location_dest_id.id if location_id and move.location_id.id != location_id: raise Warning(_('The source location must be the same for all the moves of the picking.')) location_id = move.location_id.id pack_obj = self.pool.get("stock.quant.package") quant_obj = self.pool.get("stock.quant") vals = [] qtys_grouped = {} #for each quant of the picking, find the suggested location quants_suggested_locations = {} product_putaway_strats = {} for quant in quants: if quant.qty <= 0: continue suggested_location_id = _picking_putaway_apply(quant.product_id) quants_suggested_locations[quant] = suggested_location_id #find the packages we can movei as a whole top_lvl_packages = self._get_top_level_packages(cr, uid, quants_suggested_locations, context=context) # and then create pack operations for the top-level packages found for pack in top_lvl_packages: pack_quant_ids = pack_obj.get_content(cr, uid, [pack.id], context=context) pack_quants = quant_obj.browse(cr, uid, pack_quant_ids, context=context) vals.append({ 'picking_id': picking.id, 'package_id': pack.id, 'product_qty': 1.0, 'location_id': pack.location_id.id, 'location_dest_id': quants_suggested_locations[pack_quants[0]], 'owner_id': pack.owner_id.id, }) #remove the quants inside the package so that they are excluded from the rest of the computation for quant in pack_quants: del quants_suggested_locations[quant] # Go through all remaining reserved quants and group by product, package, lot, owner, source location and dest location for quant, dest_location_id in quants_suggested_locations.items(): key = (quant.product_id.id, quant.package_id.id, quant.lot_id.id, quant.owner_id.id, quant.location_id.id, dest_location_id) if qtys_grouped.get(key): qtys_grouped[key] += quant.qty else: qtys_grouped[key] = quant.qty # Do the same for the forced quantities (in cases of force_assign or incomming shipment for example) for product, qty in forced_qties.items(): if qty <= 0: continue suggested_location_id = _picking_putaway_apply(product) key = (product.id, False, False, picking.owner_id.id, picking.location_id.id, suggested_location_id) if qtys_grouped.get(key): qtys_grouped[key] += qty else: qtys_grouped[key] = qty # Create the necessary operations for the grouped quants and remaining qtys uom_obj = self.pool.get('product.uom') prevals = {} for key, qty in qtys_grouped.items(): product = self.pool.get("product.product").browse(cr, uid, key[0], context=context) uom_id = product.uom_id.id qty_uom = qty if product_uom.get(key[0]): uom_id = product_uom[key[0]].id qty_uom = uom_obj._compute_qty(cr, uid, product.uom_id.id, qty, uom_id) val_dict = { 'picking_id': picking.id, 'product_qty': qty_uom, 'product_id': key[0], 'package_id': key[1], 'lot_id': key[2], 'owner_id': key[3], 'location_id': key[4], 'location_dest_id': key[5], 'product_uom_id': uom_id, } if key[0] in prevals: prevals[key[0]].append(val_dict) else: prevals[key[0]] = [val_dict] # prevals var holds the operations in order to create them in the same order than the picking stock moves if possible processed_products = set() for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if move.product_id.id not in processed_products: vals += prevals.get(move.product_id.id, []) processed_products.add(move.product_id.id) return vals @api.cr_uid_ids_context def open_barcode_interface(self, cr, uid, picking_ids, context=None): final_url="/barcode/web/#action=stock.ui&picking_id="+str(picking_ids[0]) return {'type': 'ir.actions.act_url', 'url':final_url, 'target': 'self',} @api.cr_uid_ids_context def do_partial_open_barcode(self, cr, uid, picking_ids, context=None): self.do_prepare_partial(cr, uid, picking_ids, context=context) return self.open_barcode_interface(cr, uid, picking_ids, context=context) @api.cr_uid_ids_context def do_prepare_partial(self, cr, uid, picking_ids, context=None): context = context or {} pack_operation_obj = self.pool.get('stock.pack.operation') #used to avoid recomputing the remaining quantities at each new pack operation created ctx = context.copy() ctx['no_recompute'] = True #get list of existing operations and delete them existing_package_ids = pack_operation_obj.search(cr, uid, [('picking_id', 'in', picking_ids)], context=context) if existing_package_ids: pack_operation_obj.unlink(cr, uid, existing_package_ids, context) for picking in self.browse(cr, uid, picking_ids, context=context): forced_qties = {} # Quantity remaining after calculating reserved quants picking_quants = [] #Calculate packages, reserved quants, qtys of this picking's moves for move in picking.move_lines: if move.state not in ('assigned', 'confirmed', 'waiting'): continue move_quants = move.reserved_quant_ids picking_quants += move_quants forced_qty = (move.state == 'assigned') and move.product_qty - sum([x.qty for x in move_quants]) or 0 #if we used force_assign() on the move, or if the move is incoming, forced_qty > 0 if float_compare(forced_qty, 0, precision_rounding=move.product_id.uom_id.rounding) > 0: if forced_qties.get(move.product_id): forced_qties[move.product_id] += forced_qty else: forced_qties[move.product_id] = forced_qty for vals in self._prepare_pack_ops(cr, uid, picking, picking_quants, forced_qties, context=context): pack_operation_obj.create(cr, uid, vals, context=ctx) #recompute the remaining quantities all at once self.do_recompute_remaining_quantities(cr, uid, picking_ids, context=context) self.write(cr, uid, picking_ids, {'recompute_pack_op': False}, context=context) @api.cr_uid_ids_context def do_unreserve(self, cr, uid, picking_ids, context=None): """ Will remove all quants for picking in picking_ids """ moves_to_unreserve = [] pack_line_to_unreserve = [] for picking in self.browse(cr, uid, picking_ids, context=context): moves_to_unreserve += [m.id for m in picking.move_lines if m.state not in ('done', 'cancel')] pack_line_to_unreserve += [p.id for p in picking.pack_operation_ids] if moves_to_unreserve: if pack_line_to_unreserve: self.pool.get('stock.pack.operation').unlink(cr, uid, pack_line_to_unreserve, context=context) self.pool.get('stock.move').do_unreserve(cr, uid, moves_to_unreserve, context=context) def recompute_remaining_qty(self, cr, uid, picking, context=None): def _create_link_for_index(operation_id, index, product_id, qty_to_assign, quant_id=False): move_dict = prod2move_ids[product_id][index] qty_on_link = min(move_dict['remaining_qty'], qty_to_assign) self.pool.get('stock.move.operation.link').create(cr, uid, {'move_id': move_dict['move'].id, 'operation_id': operation_id, 'qty': qty_on_link, 'reserved_quant_id': quant_id}, context=context) if move_dict['remaining_qty'] == qty_on_link: prod2move_ids[product_id].pop(index) else: move_dict['remaining_qty'] -= qty_on_link return qty_on_link def _create_link_for_quant(operation_id, quant, qty): """create a link for given operation and reserved move of given quant, for the max quantity possible, and returns this quantity""" if not quant.reservation_id.id: return _create_link_for_product(operation_id, quant.product_id.id, qty) qty_on_link = 0 for i in range(0, len(prod2move_ids[quant.product_id.id])): if prod2move_ids[quant.product_id.id][i]['move'].id != quant.reservation_id.id: continue qty_on_link = _create_link_for_index(operation_id, i, quant.product_id.id, qty, quant_id=quant.id) break return qty_on_link def _create_link_for_product(operation_id, product_id, qty): '''method that creates the link between a given operation and move(s) of given product, for the given quantity. Returns True if it was possible to create links for the requested quantity (False if there was not enough quantity on stock moves)''' qty_to_assign = qty prod_obj = self.pool.get("product.product") product = prod_obj.browse(cr, uid, product_id) rounding = product.uom_id.rounding qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding) if prod2move_ids.get(product_id): while prod2move_ids[product_id] and qtyassign_cmp > 0: qty_on_link = _create_link_for_index(operation_id, 0, product_id, qty_to_assign, quant_id=False) qty_to_assign -= qty_on_link qtyassign_cmp = float_compare(qty_to_assign, 0.0, precision_rounding=rounding) return qtyassign_cmp == 0 uom_obj = self.pool.get('product.uom') package_obj = self.pool.get('stock.quant.package') quant_obj = self.pool.get('stock.quant') link_obj = self.pool.get('stock.move.operation.link') quants_in_package_done = set() prod2move_ids = {} still_to_do = [] #make a dictionary giving for each product, the moves and related quantity that can be used in operation links for move in [x for x in picking.move_lines if x.state not in ('done', 'cancel')]: if not prod2move_ids.get(move.product_id.id): prod2move_ids[move.product_id.id] = [{'move': move, 'remaining_qty': move.product_qty}] else: prod2move_ids[move.product_id.id].append({'move': move, 'remaining_qty': move.product_qty}) need_rereserve = False #sort the operations in order to give higher priority to those with a package, then a serial number operations = picking.pack_operation_ids operations = sorted(operations, key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) #delete existing operations to start again from scratch links = link_obj.search(cr, uid, [('operation_id', 'in', [x.id for x in operations])], context=context) if links: link_obj.unlink(cr, uid, links, context=context) #1) first, try to create links when quants can be identified without any doubt for ops in operations: #for each operation, create the links with the stock move by seeking on the matching reserved quants, #and deffer the operation if there is some ambiguity on the move to select if ops.package_id and not ops.product_id: #entire package quant_ids = package_obj.get_content(cr, uid, [ops.package_id.id], context=context) for quant in quant_obj.browse(cr, uid, quant_ids, context=context): remaining_qty_on_quant = quant.qty if quant.reservation_id: #avoid quants being counted twice quants_in_package_done.add(quant.id) qty_on_link = _create_link_for_quant(ops.id, quant, quant.qty) remaining_qty_on_quant -= qty_on_link if remaining_qty_on_quant: still_to_do.append((ops, quant.product_id.id, remaining_qty_on_quant)) need_rereserve = True elif ops.product_id.id: #Check moves with same product qty_to_assign = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context) for move_dict in prod2move_ids.get(ops.product_id.id, []): move = move_dict['move'] for quant in move.reserved_quant_ids: if not qty_to_assign > 0: break if quant.id in quants_in_package_done: continue #check if the quant is matching the operation details if ops.package_id: flag = quant.package_id and bool(package_obj.search(cr, uid, [('id', 'child_of', [ops.package_id.id])], context=context)) or False else: flag = not quant.package_id.id flag = flag and ((ops.lot_id and ops.lot_id.id == quant.lot_id.id) or not ops.lot_id) flag = flag and (ops.owner_id.id == quant.owner_id.id) if flag: max_qty_on_link = min(quant.qty, qty_to_assign) qty_on_link = _create_link_for_quant(ops.id, quant, max_qty_on_link) qty_to_assign -= qty_on_link qty_assign_cmp = float_compare(qty_to_assign, 0, precision_rounding=ops.product_id.uom_id.rounding) if qty_assign_cmp > 0: #qty reserved is less than qty put in operations. We need to create a link but it's deferred after we processed #all the quants (because they leave no choice on their related move and needs to be processed with higher priority) still_to_do += [(ops, ops.product_id.id, qty_to_assign)] need_rereserve = True #2) then, process the remaining part all_op_processed = True for ops, product_id, remaining_qty in still_to_do: all_op_processed = _create_link_for_product(ops.id, product_id, remaining_qty) and all_op_processed return (need_rereserve, all_op_processed) def picking_recompute_remaining_quantities(self, cr, uid, picking, context=None): need_rereserve = False all_op_processed = True if picking.pack_operation_ids: need_rereserve, all_op_processed = self.recompute_remaining_qty(cr, uid, picking, context=context) return need_rereserve, all_op_processed @api.cr_uid_ids_context def do_recompute_remaining_quantities(self, cr, uid, picking_ids, context=None): for picking in self.browse(cr, uid, picking_ids, context=context): if picking.pack_operation_ids: self.recompute_remaining_qty(cr, uid, picking, context=context) def _prepare_values_extra_move(self, cr, uid, op, product, remaining_qty, context=None): """ Creates an extra move when there is no corresponding original move to be copied """ uom_obj = self.pool.get("product.uom") uom_id = product.uom_id.id qty = remaining_qty if op.product_id and op.product_uom_id and op.product_uom_id.id != product.uom_id.id: if op.product_uom_id.factor > product.uom_id.factor: #If the pack operation's is a smaller unit uom_id = op.product_uom_id.id #HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM qty = uom_obj._compute_qty_obj(cr, uid, product.uom_id, remaining_qty, op.product_uom_id, rounding_method='HALF-UP') picking = op.picking_id ref = product.default_code name = '[' + ref + ']' + ' ' + product.name if ref else product.name res = { 'picking_id': picking.id, 'location_id': picking.location_id.id, 'location_dest_id': picking.location_dest_id.id, 'product_id': product.id, 'product_uom': uom_id, 'product_uom_qty': qty, 'name': _('Extra Move: ') + name, 'state': 'draft', 'restrict_partner_id': op.owner_id, } return res def _create_extra_moves(self, cr, uid, picking, context=None): '''This function creates move lines on a picking, at the time of do_transfer, based on unexpected product transfers (or exceeding quantities) found in the pack operations. ''' move_obj = self.pool.get('stock.move') operation_obj = self.pool.get('stock.pack.operation') moves = [] for op in picking.pack_operation_ids: for product_id, remaining_qty in operation_obj._get_remaining_prod_quantities(cr, uid, op, context=context).items(): product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) if float_compare(remaining_qty, 0, precision_rounding=product.uom_id.rounding) > 0: vals = self._prepare_values_extra_move(cr, uid, op, product, remaining_qty, context=context) moves.append(move_obj.create(cr, uid, vals, context=context)) if moves: move_obj.action_confirm(cr, uid, moves, context=context) return moves def rereserve_pick(self, cr, uid, ids, context=None): """ This can be used to provide a button that rereserves taking into account the existing pack operations """ for pick in self.browse(cr, uid, ids, context=context): self.rereserve_quants(cr, uid, pick, move_ids = [x.id for x in pick.move_lines], context=context) def rereserve_quants(self, cr, uid, picking, move_ids=[], context=None): """ Unreserve quants then try to reassign quants.""" stock_move_obj = self.pool.get('stock.move') if not move_ids: self.do_unreserve(cr, uid, [picking.id], context=context) self.action_assign(cr, uid, [picking.id], context=context) else: stock_move_obj.do_unreserve(cr, uid, move_ids, context=context) stock_move_obj.action_assign(cr, uid, move_ids, context=context) @api.cr_uid_ids_context def do_enter_transfer_details(self, cr, uid, picking, context=None): if not context: context = {} context.update({ 'active_model': self._name, 'active_ids': picking, 'active_id': len(picking) and picking[0] or False }) created_id = self.pool['stock.transfer_details'].create(cr, uid, {'picking_id': len(picking) and picking[0] or False}, context) return self.pool['stock.transfer_details'].wizard_view(cr, uid, created_id, context) @api.cr_uid_ids_context def do_transfer(self, cr, uid, picking_ids, context=None): """ If no pack operation, we do simple action_done of the picking Otherwise, do the pack operations """ if not context: context = {} stock_move_obj = self.pool.get('stock.move') for picking in self.browse(cr, uid, picking_ids, context=context): if not picking.pack_operation_ids: self.action_done(cr, uid, [picking.id], context=context) continue else: need_rereserve, all_op_processed = self.picking_recompute_remaining_quantities(cr, uid, picking, context=context) #create extra moves in the picking (unexpected product moves coming from pack operations) todo_move_ids = [] if not all_op_processed: todo_move_ids += self._create_extra_moves(cr, uid, picking, context=context) #split move lines if needed toassign_move_ids = [] for move in picking.move_lines: remaining_qty = move.remaining_qty if move.state in ('done', 'cancel'): #ignore stock moves cancelled or already done continue elif move.state == 'draft': toassign_move_ids.append(move.id) if float_compare(remaining_qty, 0, precision_rounding = move.product_id.uom_id.rounding) == 0: if move.state in ('draft', 'assigned', 'confirmed'): todo_move_ids.append(move.id) elif float_compare(remaining_qty,0, precision_rounding = move.product_id.uom_id.rounding) > 0 and \ float_compare(remaining_qty, move.product_qty, precision_rounding = move.product_id.uom_id.rounding) < 0: new_move = stock_move_obj.split(cr, uid, move, remaining_qty, context=context) todo_move_ids.append(move.id) #Assign move as it was assigned before toassign_move_ids.append(new_move) if need_rereserve or not all_op_processed: if not picking.location_id.usage in ("supplier", "production", "inventory"): self.rereserve_quants(cr, uid, picking, move_ids=todo_move_ids, context=context) self.do_recompute_remaining_quantities(cr, uid, [picking.id], context=context) if todo_move_ids and not context.get('do_only_split'): self.pool.get('stock.move').action_done(cr, uid, todo_move_ids, context=context) elif context.get('do_only_split'): context = dict(context, split=todo_move_ids) self._create_backorder(cr, uid, picking, context=context) if toassign_move_ids: stock_move_obj.action_assign(cr, uid, toassign_move_ids, context=context) return True @api.cr_uid_ids_context def do_split(self, cr, uid, picking_ids, context=None): """ just split the picking (create a backorder) without making it 'done' """ if context is None: context = {} ctx = context.copy() ctx['do_only_split'] = True return self.do_transfer(cr, uid, picking_ids, context=ctx) def get_next_picking_for_ui(self, cr, uid, context=None): """ returns the next pickings to process. Used in the barcode scanner UI""" if context is None: context = {} domain = [('state', 'in', ('assigned', 'partially_available'))] if context.get('default_picking_type_id'): domain.append(('picking_type_id', '=', context['default_picking_type_id'])) return self.search(cr, uid, domain, context=context) def action_done_from_ui(self, cr, uid, picking_id, context=None): """ called when button 'done' is pushed in the barcode scanner UI """ #write qty_done into field product_qty for every package_operation before doing the transfer pack_op_obj = self.pool.get('stock.pack.operation') for operation in self.browse(cr, uid, picking_id, context=context).pack_operation_ids: pack_op_obj.write(cr, uid, operation.id, {'product_qty': operation.qty_done}, context=context) self.do_transfer(cr, uid, [picking_id], context=context) #return id of next picking to work on return self.get_next_picking_for_ui(cr, uid, context=context) @api.cr_uid_ids_context def action_pack(self, cr, uid, picking_ids, operation_filter_ids=None, context=None): """ Create a package with the current pack_operation_ids of the picking that aren't yet in a pack. Used in the barcode scanner UI and the normal interface as well. operation_filter_ids is used by barcode scanner interface to specify a subset of operation to pack""" if operation_filter_ids == None: operation_filter_ids = [] stock_operation_obj = self.pool.get('stock.pack.operation') package_obj = self.pool.get('stock.quant.package') stock_move_obj = self.pool.get('stock.move') package_id = False for picking_id in picking_ids: operation_search_domain = [('picking_id', '=', picking_id), ('result_package_id', '=', False)] if operation_filter_ids != []: operation_search_domain.append(('id', 'in', operation_filter_ids)) operation_ids = stock_operation_obj.search(cr, uid, operation_search_domain, context=context) pack_operation_ids = [] if operation_ids: for operation in stock_operation_obj.browse(cr, uid, operation_ids, context=context): #If we haven't done all qty in operation, we have to split into 2 operation op = operation if (operation.qty_done < operation.product_qty): new_operation = stock_operation_obj.copy(cr, uid, operation.id, {'product_qty': operation.qty_done,'qty_done': operation.qty_done}, context=context) stock_operation_obj.write(cr, uid, operation.id, {'product_qty': operation.product_qty - operation.qty_done,'qty_done': 0, 'lot_id': False}, context=context) op = stock_operation_obj.browse(cr, uid, new_operation, context=context) pack_operation_ids.append(op.id) if op.product_id and op.location_id and op.location_dest_id: stock_move_obj.check_tracking_product(cr, uid, op.product_id, op.lot_id.id, op.location_id, op.location_dest_id, context=context) package_id = package_obj.create(cr, uid, {}, context=context) stock_operation_obj.write(cr, uid, pack_operation_ids, {'result_package_id': package_id}, context=context) return package_id def process_product_id_from_ui(self, cr, uid, picking_id, product_id, op_id, increment=True, context=None): return self.pool.get('stock.pack.operation')._search_and_increment(cr, uid, picking_id, [('product_id', '=', product_id),('id', '=', op_id)], increment=increment, context=context) def process_barcode_from_ui(self, cr, uid, picking_id, barcode_str, visible_op_ids, context=None): '''This function is called each time there barcode scanner reads an input''' lot_obj = self.pool.get('stock.production.lot') package_obj = self.pool.get('stock.quant.package') product_obj = self.pool.get('product.product') stock_operation_obj = self.pool.get('stock.pack.operation') stock_location_obj = self.pool.get('stock.location') answer = {'filter_loc': False, 'operation_id': False} #check if the barcode correspond to a location matching_location_ids = stock_location_obj.search(cr, uid, [('loc_barcode', '=', barcode_str)], context=context) if matching_location_ids: #if we have a location, return immediatly with the location name location = stock_location_obj.browse(cr, uid, matching_location_ids[0], context=None) answer['filter_loc'] = stock_location_obj._name_get(cr, uid, location, context=None) answer['filter_loc_id'] = matching_location_ids[0] return answer #check if the barcode correspond to a product matching_product_ids = product_obj.search(cr, uid, ['|', ('ean13', '=', barcode_str), ('default_code', '=', barcode_str)], context=context) if matching_product_ids: op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', matching_product_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer #check if the barcode correspond to a lot matching_lot_ids = lot_obj.search(cr, uid, [('name', '=', barcode_str)], context=context) if matching_lot_ids: lot = lot_obj.browse(cr, uid, matching_lot_ids[0], context=context) op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('product_id', '=', lot.product_id.id), ('lot_id', '=', lot.id)], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer #check if the barcode correspond to a package matching_package_ids = package_obj.search(cr, uid, [('name', '=', barcode_str)], context=context) if matching_package_ids: op_id = stock_operation_obj._search_and_increment(cr, uid, picking_id, [('package_id', '=', matching_package_ids[0])], filter_visible=True, visible_op_ids=visible_op_ids, increment=True, context=context) answer['operation_id'] = op_id return answer return answer class stock_production_lot(osv.osv): _name = 'stock.production.lot' _inherit = ['mail.thread'] _description = 'Lot/Serial' _columns = { 'name': fields.char('Serial Number', required=True, help="Unique Serial Number"), 'ref': fields.char('Internal Reference', help="Internal reference number in case it differs from the manufacturer's serial number"), 'product_id': fields.many2one('product.product', 'Product', required=True, domain=[('type', '<>', 'service')]), 'quant_ids': fields.one2many('stock.quant', 'lot_id', 'Quants', readonly=True), 'create_date': fields.datetime('Creation Date'), } _defaults = { 'name': lambda x, y, z, c: x.pool.get('ir.sequence').get(y, z, 'stock.lot.serial'), 'product_id': lambda x, y, z, c: c.get('product_id', False), } _sql_constraints = [ ('name_ref_uniq', 'unique (name, ref, product_id)', 'The combination of serial number, internal reference and product must be unique !'), ] def action_traceability(self, cr, uid, ids, context=None): """ It traces the information of lots @param self: The object pointer. @param cr: A database cursor @param uid: ID of the user currently logged in @param ids: List of IDs selected @param context: A standard dictionary @return: A dictionary of values """ quant_obj = self.pool.get("stock.quant") quants = quant_obj.search(cr, uid, [('lot_id', 'in', ids)], context=context) moves = set() for quant in quant_obj.browse(cr, uid, quants, context=context): moves |= {move.id for move in quant.history_ids} if moves: return { 'domain': "[('id','in',[" + ','.join(map(str, list(moves))) + "])]", 'name': _('Traceability'), 'view_mode': 'tree,form', 'view_type': 'form', 'context': {'tree_view_ref': 'stock.view_move_tree'}, 'res_model': 'stock.move', 'type': 'ir.actions.act_window', } return False # ---------------------------------------------------- # Move # ---------------------------------------------------- class stock_move(osv.osv): _name = "stock.move" _description = "Stock Move" _order = 'date_expected desc, id' _log_create = False def get_price_unit(self, cr, uid, move, context=None): """ Returns the unit price to store on the quant """ return move.price_unit or move.product_id.standard_price def name_get(self, cr, uid, ids, context=None): res = [] for line in self.browse(cr, uid, ids, context=context): name = line.location_id.name + ' > ' + line.location_dest_id.name if line.product_id.code: name = line.product_id.code + ': ' + name if line.picking_id.origin: name = line.picking_id.origin + '/ ' + name res.append((line.id, name)) return res def _quantity_normalize(self, cr, uid, ids, name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = uom_obj._compute_qty_obj(cr, uid, m.product_uom, m.product_uom_qty, m.product_id.uom_id, context=context) return res def _get_remaining_qty(self, cr, uid, ids, field_name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for move in self.browse(cr, uid, ids, context=context): qty = move.product_qty for record in move.linked_move_operation_ids: qty -= record.qty # Keeping in product default UoM res[move.id] = float_round(qty, precision_rounding=move.product_id.uom_id.rounding) return res def _get_lot_ids(self, cr, uid, ids, field_name, args, context=None): res = dict.fromkeys(ids, False) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': res[move.id] = [q.lot_id.id for q in move.quant_ids if q.lot_id] else: res[move.id] = [q.lot_id.id for q in move.reserved_quant_ids if q.lot_id] return res def _get_product_availability(self, cr, uid, ids, field_name, args, context=None): quant_obj = self.pool.get('stock.quant') res = dict.fromkeys(ids, False) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': res[move.id] = move.product_qty else: sublocation_ids = self.pool.get('stock.location').search(cr, uid, [('id', 'child_of', [move.location_id.id])], context=context) quant_ids = quant_obj.search(cr, uid, [('location_id', 'in', sublocation_ids), ('product_id', '=', move.product_id.id), ('reservation_id', '=', False)], context=context) availability = 0 for quant in quant_obj.browse(cr, uid, quant_ids, context=context): availability += quant.qty res[move.id] = min(move.product_qty, availability) return res def _get_string_qty_information(self, cr, uid, ids, field_name, args, context=None): settings_obj = self.pool.get('stock.config.settings') uom_obj = self.pool.get('product.uom') res = dict.fromkeys(ids, '') for move in self.browse(cr, uid, ids, context=context): if move.state in ('draft', 'done', 'cancel') or move.location_id.usage != 'internal': res[move.id] = '' # 'not applicable' or 'n/a' could work too continue total_available = min(move.product_qty, move.reserved_availability + move.availability) total_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, total_available, move.product_uom, context=context) info = str(total_available) #look in the settings if we need to display the UoM name or not config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context) if config_ids: stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context) if stock_settings.group_uom: info += ' ' + move.product_uom.name if move.reserved_availability: if move.reserved_availability != total_available: #some of the available quantity is assigned and some are available but not reserved reserved_available = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, move.reserved_availability, move.product_uom, context=context) info += _(' (%s reserved)') % str(reserved_available) else: #all available quantity is assigned info += _(' (reserved)') res[move.id] = info return res def _get_reserved_availability(self, cr, uid, ids, field_name, args, context=None): res = dict.fromkeys(ids, 0) for move in self.browse(cr, uid, ids, context=context): res[move.id] = sum([quant.qty for quant in move.reserved_quant_ids]) return res def _get_move(self, cr, uid, ids, context=None): res = set() for quant in self.browse(cr, uid, ids, context=context): if quant.reservation_id: res.add(quant.reservation_id.id) return list(res) def _get_move_ids(self, cr, uid, ids, context=None): res = [] for picking in self.browse(cr, uid, ids, context=context): res += [x.id for x in picking.move_lines] return res def _get_moves_from_prod(self, cr, uid, ids, context=None): if ids: return self.pool.get('stock.move').search(cr, uid, [('product_id', 'in', ids)], context=context) return [] def _set_product_qty(self, cr, uid, id, field, value, arg, context=None): """ The meaning of product_qty field changed lately and is now a functional field computing the quantity in the default product UoM. This code has been added to raise an error if a write is made given a value for `product_qty`, where the same write should set the `product_uom_qty` field instead, in order to detect errors. """ raise osv.except_osv(_('Programming Error!'), _('The requested operation cannot be processed because of a programming error setting the `product_qty` field instead of the `product_uom_qty`.')) _columns = { 'name': fields.char('Description', required=True, select=True), 'priority': fields.selection(procurement.PROCUREMENT_PRIORITIES, 'Priority'), 'create_date': fields.datetime('Creation Date', readonly=True, select=True), 'date': fields.datetime('Date', required=True, select=True, help="Move date: scheduled date until move is done, then date of actual move processing", states={'done': [('readonly', True)]}), 'date_expected': fields.datetime('Expected Date', states={'done': [('readonly', True)]}, required=True, select=True, help="Scheduled date for the processing of this move"), 'product_id': fields.many2one('product.product', 'Product', required=True, select=True, domain=[('type', '<>', 'service')], states={'done': [('readonly', True)]}), 'product_qty': fields.function(_quantity_normalize, fnct_inv=_set_product_qty, type='float', digits=0, store={ _name: (lambda self, cr, uid, ids, c={}: ids, ['product_id', 'product_uom', 'product_uom_qty'], 10), }, string='Quantity', help='Quantity in the default UoM of the product'), 'product_uom_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True, states={'done': [('readonly', True)]}, help="This is the quantity of products from an inventory " "point of view. For moves in the state 'done', this is the " "quantity of products that were actually moved. For other " "moves, this is the quantity of product that is planned to " "be moved. Lowering this quantity does not generate a " "backorder. Changing this quantity on assigned moves affects " "the product reservation, and should be done with care." ), 'product_uom': fields.many2one('product.uom', 'Unit of Measure', required=True, states={'done': [('readonly', True)]}), 'product_uos_qty': fields.float('Quantity (UOS)', digits_compute=dp.get_precision('Product UoS'), states={'done': [('readonly', True)]}), 'product_uos': fields.many2one('product.uom', 'Product UOS', states={'done': [('readonly', True)]}), 'product_tmpl_id': fields.related('product_id', 'product_tmpl_id', type='many2one', relation='product.template', string='Product Template'), 'product_packaging': fields.many2one('product.packaging', 'Prefered Packaging', help="It specifies attributes of packaging like type, quantity of packaging,etc."), 'location_id': fields.many2one('stock.location', 'Source Location', required=True, select=True, auto_join=True, states={'done': [('readonly', True)]}, help="Sets a location if you produce at a fixed location. This can be a partner location if you subcontract the manufacturing operations."), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True, states={'done': [('readonly', True)]}, select=True, auto_join=True, help="Location where the system will stock the finished products."), 'partner_id': fields.many2one('res.partner', 'Destination Address ', states={'done': [('readonly', True)]}, help="Optional address where goods are to be delivered, specifically used for allotment"), 'move_dest_id': fields.many2one('stock.move', 'Destination Move', help="Optional: next stock move when chaining them", select=True, copy=False), 'move_orig_ids': fields.one2many('stock.move', 'move_dest_id', 'Original Move', help="Optional: previous stock move when chaining them", select=True), 'picking_id': fields.many2one('stock.picking', 'Reference', select=True, states={'done': [('readonly', True)]}), 'note': fields.text('Notes'), 'state': fields.selection([('draft', 'New'), ('cancel', 'Cancelled'), ('waiting', 'Waiting Another Move'), ('confirmed', 'Waiting Availability'), ('assigned', 'Available'), ('done', 'Done'), ], 'Status', readonly=True, select=True, copy=False, help= "* New: When the stock move is created and not yet confirmed.\n"\ "* Waiting Another Move: This state can be seen when a move is waiting for another one, for example in a chained flow.\n"\ "* Waiting Availability: This state is reached when the procurement resolution is not straight forward. It may need the scheduler to run, a component to me manufactured...\n"\ "* Available: When products are reserved, it is set to \'Available\'.\n"\ "* Done: When the shipment is processed, the state is \'Done\'."), 'partially_available': fields.boolean('Partially Available', readonly=True, help="Checks if the move has some stock reserved", copy=False), 'price_unit': fields.float('Unit Price', help="Technical field used to record the product cost set by the user during a picking confirmation (when costing method used is 'average price' or 'real'). Value given in company currency and in product uom."), # as it's a technical field, we intentionally don't provide the digits attribute 'company_id': fields.many2one('res.company', 'Company', required=True, select=True), 'split_from': fields.many2one('stock.move', string="Move Split From", help="Technical field used to track the origin of a split move, which can be useful in case of debug", copy=False), 'backorder_id': fields.related('picking_id', 'backorder_id', type='many2one', relation="stock.picking", string="Back Order of", select=True), 'origin': fields.char("Source"), 'procure_method': fields.selection([('make_to_stock', 'Default: Take From Stock'), ('make_to_order', 'Advanced: Apply Procurement Rules')], 'Supply Method', required=True, help="""By default, the system will take from the stock in the source location and passively wait for availability. The other possibility allows you to directly create a procurement on the source location (and thus ignore its current stock) to gather products. If we want to chain moves and have this one to wait for the previous, this second option should be chosen."""), # used for colors in tree views: 'scrapped': fields.related('location_dest_id', 'scrap_location', type='boolean', relation='stock.location', string='Scrapped', readonly=True), 'quant_ids': fields.many2many('stock.quant', 'stock_quant_move_rel', 'move_id', 'quant_id', 'Moved Quants', copy=False), 'reserved_quant_ids': fields.one2many('stock.quant', 'reservation_id', 'Reserved quants'), 'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'move_id', string='Linked Operations', readonly=True, help='Operations that impact this move for the computation of the remaining quantities'), 'remaining_qty': fields.function(_get_remaining_qty, type='float', string='Remaining Quantity', digits=0, states={'done': [('readonly', True)]}, help="Remaining Quantity in default UoM according to operations matched with this move"), 'procurement_id': fields.many2one('procurement.order', 'Procurement'), 'group_id': fields.many2one('procurement.group', 'Procurement Group'), 'rule_id': fields.many2one('procurement.rule', 'Procurement Rule', help='The pull rule that created this stock move'), 'push_rule_id': fields.many2one('stock.location.path', 'Push Rule', help='The push rule that created this stock move'), 'propagate': fields.boolean('Propagate cancel and split', help='If checked, when this move is cancelled, cancel the linked move too'), 'picking_type_id': fields.many2one('stock.picking.type', 'Picking Type'), 'inventory_id': fields.many2one('stock.inventory', 'Inventory'), 'lot_ids': fields.function(_get_lot_ids, type='many2many', relation='stock.production.lot', string='Lots'), 'origin_returned_move_id': fields.many2one('stock.move', 'Origin return move', help='move that created the return move', copy=False), 'returned_move_ids': fields.one2many('stock.move', 'origin_returned_move_id', 'All returned moves', help='Optional: all returned moves created from this move'), 'reserved_availability': fields.function(_get_reserved_availability, type='float', string='Quantity Reserved', readonly=True, help='Quantity that has already been reserved for this move'), 'availability': fields.function(_get_product_availability, type='float', string='Quantity Available', readonly=True, help='Quantity in stock that can still be reserved for this move'), 'string_availability_info': fields.function(_get_string_qty_information, type='text', string='Availability', readonly=True, help='Show various information on stock availability for this move'), 'restrict_lot_id': fields.many2one('stock.production.lot', 'Lot', help="Technical field used to depict a restriction on the lot of quants to consider when marking this move as 'done'"), 'restrict_partner_id': fields.many2one('res.partner', 'Owner ', help="Technical field used to depict a restriction on the ownership of quants to consider when marking this move as 'done'"), 'route_ids': fields.many2many('stock.location.route', 'stock_location_route_move', 'move_id', 'route_id', 'Destination route', help="Preferred route to be followed by the procurement order"), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', help="Technical field depicting the warehouse to consider for the route selection on the next procurement (if any)."), } def _default_location_destination(self, cr, uid, context=None): context = context or {} if context.get('default_picking_type_id', False): pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context) return pick_type.default_location_dest_id and pick_type.default_location_dest_id.id or False return False def _default_location_source(self, cr, uid, context=None): context = context or {} if context.get('default_picking_type_id', False): pick_type = self.pool.get('stock.picking.type').browse(cr, uid, context['default_picking_type_id'], context=context) return pick_type.default_location_src_id and pick_type.default_location_src_id.id or False return False def _default_destination_address(self, cr, uid, context=None): return False def _default_group_id(self, cr, uid, context=None): context = context or {} if context.get('default_picking_id', False): picking = self.pool.get('stock.picking').browse(cr, uid, context['default_picking_id'], context=context) return picking.group_id.id return False _defaults = { 'location_id': _default_location_source, 'location_dest_id': _default_location_destination, 'partner_id': _default_destination_address, 'state': 'draft', 'priority': '1', 'product_uom_qty': 1.0, 'scrapped': False, 'date': fields.datetime.now, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.move', context=c), 'date_expected': fields.datetime.now, 'procure_method': 'make_to_stock', 'propagate': True, 'partially_available': False, 'group_id': _default_group_id, } def _check_uom(self, cr, uid, ids, context=None): for move in self.browse(cr, uid, ids, context=context): if move.product_id.uom_id.category_id.id != move.product_uom.category_id.id: return False return True _constraints = [ (_check_uom, 'You try to move a product using a UoM that is not compatible with the UoM of the product moved. Please use an UoM in the same UoM category.', ['product_uom']), ] def init(self, cr): cr.execute('SELECT indexname FROM pg_indexes WHERE indexname = %s', ('stock_move_product_location_index',)) if not cr.fetchone(): cr.execute('CREATE INDEX stock_move_product_location_index ON stock_move (product_id, location_id, location_dest_id, company_id, state)') @api.cr_uid_ids_context def do_unreserve(self, cr, uid, move_ids, context=None): quant_obj = self.pool.get("stock.quant") for move in self.browse(cr, uid, move_ids, context=context): if move.state in ('done', 'cancel'): raise osv.except_osv(_('Operation Forbidden!'), _('Cannot unreserve a done move')) quant_obj.quants_unreserve(cr, uid, move, context=context) if self.find_move_ancestors(cr, uid, move, context=context): self.write(cr, uid, [move.id], {'state': 'waiting'}, context=context) else: self.write(cr, uid, [move.id], {'state': 'confirmed'}, context=context) def _prepare_procurement_from_move(self, cr, uid, move, context=None): origin = (move.group_id and (move.group_id.name + ":") or "") + (move.rule_id and move.rule_id.name or move.origin or move.picking_id.name or "/") group_id = move.group_id and move.group_id.id or False if move.rule_id: if move.rule_id.group_propagation_option == 'fixed' and move.rule_id.group_id: group_id = move.rule_id.group_id.id elif move.rule_id.group_propagation_option == 'none': group_id = False return { 'name': move.rule_id and move.rule_id.name or "/", 'origin': origin, 'company_id': move.company_id and move.company_id.id or False, 'date_planned': move.date, 'product_id': move.product_id.id, 'product_qty': move.product_uom_qty, 'product_uom': move.product_uom.id, 'product_uos_qty': (move.product_uos and move.product_uos_qty) or move.product_uom_qty, 'product_uos': (move.product_uos and move.product_uos.id) or move.product_uom.id, 'location_id': move.location_id.id, 'move_dest_id': move.id, 'group_id': group_id, 'route_ids': [(4, x.id) for x in move.route_ids], 'warehouse_id': move.warehouse_id.id or (move.picking_type_id and move.picking_type_id.warehouse_id.id or False), 'priority': move.priority, } def _push_apply(self, cr, uid, moves, context=None): push_obj = self.pool.get("stock.location.path") for move in moves: #1) if the move is already chained, there is no need to check push rules #2) if the move is a returned move, we don't want to check push rules, as returning a returned move is the only decent way # to receive goods without triggering the push rules again (which would duplicate chained operations) if not move.move_dest_id and not move.origin_returned_move_id: domain = [('location_from_id', '=', move.location_dest_id.id)] #priority goes to the route defined on the product and product category route_ids = [x.id for x in move.product_id.route_ids + move.product_id.categ_id.total_route_ids] rules = push_obj.search(cr, uid, domain + [('route_id', 'in', route_ids)], order='route_sequence, sequence', context=context) if not rules: #then we search on the warehouse if a rule can apply wh_route_ids = [] if move.warehouse_id: wh_route_ids = [x.id for x in move.warehouse_id.route_ids] elif move.picking_type_id and move.picking_type_id.warehouse_id: wh_route_ids = [x.id for x in move.picking_type_id.warehouse_id.route_ids] if wh_route_ids: rules = push_obj.search(cr, uid, domain + [('route_id', 'in', wh_route_ids)], order='route_sequence, sequence', context=context) if not rules: #if no specialized push rule has been found yet, we try to find a general one (without route) rules = push_obj.search(cr, uid, domain + [('route_id', '=', False)], order='sequence', context=context) if rules: rule = push_obj.browse(cr, uid, rules[0], context=context) push_obj._apply(cr, uid, rule, move, context=context) return True def _create_procurement(self, cr, uid, move, context=None): """ This will create a procurement order """ return self.pool.get("procurement.order").create(cr, uid, self._prepare_procurement_from_move(cr, uid, move, context=context), context=context) def _create_procurements(self, cr, uid, moves, context=None): res = [] for move in moves: res.append(self._create_procurement(cr, uid, move, context=context)) return res def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] # Check that we do not modify a stock.move which is done frozen_fields = set(['product_qty', 'product_uom', 'product_uos_qty', 'product_uos', 'location_id', 'location_dest_id', 'product_id']) for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': if frozen_fields.intersection(vals): raise osv.except_osv(_('Operation Forbidden!'), _('Quantities, Units of Measure, Products and Locations cannot be modified on stock moves that have already been processed (except by the Administrator).')) propagated_changes_dict = {} #propagation of quantity change if vals.get('product_uom_qty'): propagated_changes_dict['product_uom_qty'] = vals['product_uom_qty'] if vals.get('product_uom_id'): propagated_changes_dict['product_uom_id'] = vals['product_uom_id'] #propagation of expected date: propagated_date_field = False if vals.get('date_expected'): #propagate any manual change of the expected date propagated_date_field = 'date_expected' elif (vals.get('state', '') == 'done' and vals.get('date')): #propagate also any delta observed when setting the move as done propagated_date_field = 'date' if not context.get('do_not_propagate', False) and (propagated_date_field or propagated_changes_dict): #any propagation is (maybe) needed for move in self.browse(cr, uid, ids, context=context): if move.move_dest_id and move.propagate: if 'date_expected' in propagated_changes_dict: propagated_changes_dict.pop('date_expected') if propagated_date_field: current_date = datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) new_date = datetime.strptime(vals.get(propagated_date_field), DEFAULT_SERVER_DATETIME_FORMAT) delta = new_date - current_date if abs(delta.days) >= move.company_id.propagation_minimum_delta: old_move_date = datetime.strptime(move.move_dest_id.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) new_move_date = (old_move_date + relativedelta.relativedelta(days=delta.days or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) propagated_changes_dict['date_expected'] = new_move_date #For pushed moves as well as for pulled moves, propagate by recursive call of write(). #Note that, for pulled moves we intentionally don't propagate on the procurement. if propagated_changes_dict: self.write(cr, uid, [move.move_dest_id.id], propagated_changes_dict, context=context) return super(stock_move, self).write(cr, uid, ids, vals, context=context) def onchange_quantity(self, cr, uid, ids, product_id, product_qty, product_uom, product_uos): """ On change of product quantity finds UoM and UoS quantities @param product_id: Product id @param product_qty: Changed Quantity of product @param product_uom: Unit of measure of product @param product_uos: Unit of sale of product @return: Dictionary of values """ result = { 'product_uos_qty': 0.00 } warning = {} if (not product_id) or (product_qty <= 0.0): result['product_qty'] = 0.0 return {'value': result} product_obj = self.pool.get('product.product') uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff']) # Warn if the quantity was decreased if ids: for move in self.read(cr, uid, ids, ['product_qty']): if product_qty < move['product_qty']: warning.update({ 'title': _('Information'), 'message': _("By changing this quantity here, you accept the " "new quantity as complete: Odoo will not " "automatically generate a back order.")}) break if product_uos and product_uom and (product_uom != product_uos): precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Product UoS') result['product_uos_qty'] = float_round(product_qty * uos_coeff['uos_coeff'], precision_digits=precision) else: result['product_uos_qty'] = product_qty return {'value': result, 'warning': warning} def onchange_uos_quantity(self, cr, uid, ids, product_id, product_uos_qty, product_uos, product_uom): """ On change of product quantity finds UoM and UoS quantities @param product_id: Product id @param product_uos_qty: Changed UoS Quantity of product @param product_uom: Unit of measure of product @param product_uos: Unit of sale of product @return: Dictionary of values """ result = { 'product_uom_qty': 0.00 } if (not product_id) or (product_uos_qty <= 0.0): result['product_uos_qty'] = 0.0 return {'value': result} product_obj = self.pool.get('product.product') uos_coeff = product_obj.read(cr, uid, product_id, ['uos_coeff']) # No warning if the quantity was decreased to avoid double warnings: # The clients should call onchange_quantity too anyway if product_uos and product_uom and (product_uom != product_uos): precision = self.pool.get('decimal.precision').precision_get(cr, uid, 'Product Unit of Measure') result['product_uom_qty'] = float_round(product_uos_qty / uos_coeff['uos_coeff'], precision_digits=precision) else: result['product_uom_qty'] = product_uos_qty return {'value': result} def onchange_product_id(self, cr, uid, ids, prod_id=False, loc_id=False, loc_dest_id=False, partner_id=False): """ On change of product id, if finds UoM, UoS, quantity and UoS quantity. @param prod_id: Changed Product id @param loc_id: Source location id @param loc_dest_id: Destination location id @param partner_id: Address id of partner @return: Dictionary of values """ if not prod_id: return {} user = self.pool.get('res.users').browse(cr, uid, uid) lang = user and user.lang or False if partner_id: addr_rec = self.pool.get('res.partner').browse(cr, uid, partner_id) if addr_rec: lang = addr_rec and addr_rec.lang or False ctx = {'lang': lang} product = self.pool.get('product.product').browse(cr, uid, [prod_id], context=ctx)[0] uos_id = product.uos_id and product.uos_id.id or False result = { 'name': product.partner_ref, 'product_uom': product.uom_id.id, 'product_uos': uos_id, 'product_uom_qty': 1.00, 'product_uos_qty': self.pool.get('stock.move').onchange_quantity(cr, uid, ids, prod_id, 1.00, product.uom_id.id, uos_id)['value']['product_uos_qty'], } if loc_id: result['location_id'] = loc_id if loc_dest_id: result['location_dest_id'] = loc_dest_id return {'value': result} def _prepare_picking_assign(self, cr, uid, move, context=None): """ Prepares a new picking for this move as it could not be assigned to another picking. This method is designed to be inherited. """ values = { 'origin': move.origin, 'company_id': move.company_id and move.company_id.id or False, 'move_type': move.group_id and move.group_id.move_type or 'direct', 'partner_id': move.partner_id.id or False, 'picking_type_id': move.picking_type_id and move.picking_type_id.id or False, } return values @api.cr_uid_ids_context def _picking_assign(self, cr, uid, move_ids, procurement_group, location_from, location_to, context=None): """Assign a picking on the given move_ids, which is a list of move supposed to share the same procurement_group, location_from and location_to (and company). Those attributes are also given as parameters. """ pick_obj = self.pool.get("stock.picking") # Use a SQL query as doing with the ORM will split it in different queries with id IN (,,) # In the next version, the locations on the picking should be stored again. query = """ SELECT stock_picking.id FROM stock_picking, stock_move WHERE stock_picking.state in ('draft', 'confirmed', 'waiting') AND stock_move.picking_id = stock_picking.id AND stock_move.location_id = %s AND stock_move.location_dest_id = %s AND """ params = (location_from, location_to) if not procurement_group: query += "stock_picking.group_id IS NULL LIMIT 1" else: query += "stock_picking.group_id = %s LIMIT 1" params += (procurement_group,) cr.execute(query, params) [pick] = cr.fetchone() or [None] if not pick: move = self.browse(cr, uid, move_ids, context=context)[0] values = self._prepare_picking_assign(cr, uid, move, context=context) pick = pick_obj.create(cr, uid, values, context=context) return self.write(cr, uid, move_ids, {'picking_id': pick}, context=context) def onchange_date(self, cr, uid, ids, date, date_expected, context=None): """ On change of Scheduled Date gives a Move date. @param date_expected: Scheduled Date @param date: Move Date @return: Move Date """ if not date_expected: date_expected = time.strftime(DEFAULT_SERVER_DATETIME_FORMAT) return {'value': {'date': date_expected}} def attribute_price(self, cr, uid, move, context=None): """ Attribute price to move, important in inter-company moves or receipts with only one partner """ if not move.price_unit: price = move.product_id.standard_price self.write(cr, uid, [move.id], {'price_unit': price}) def action_confirm(self, cr, uid, ids, context=None): """ Confirms stock move or put it in waiting if it's linked to another move. @return: List of ids. """ if not context: context = {} if isinstance(ids, (int, long)): ids = [ids] states = { 'confirmed': [], 'waiting': [] } to_assign = {} for move in self.browse(cr, uid, ids, context=context): self.attribute_price(cr, uid, move, context=context) state = 'confirmed' #if the move is preceeded, then it's waiting (if preceeding move is done, then action_assign has been called already and its state is already available) if move.move_orig_ids: state = 'waiting' #if the move is split and some of the ancestor was preceeded, then it's waiting as well elif move.split_from: move2 = move.split_from while move2 and state != 'waiting': if move2.move_orig_ids: state = 'waiting' move2 = move2.split_from states[state].append(move.id) if not move.picking_id and move.picking_type_id: key = (move.group_id.id, move.location_id.id, move.location_dest_id.id) if key not in to_assign: to_assign[key] = [] to_assign[key].append(move.id) moves = [move for move in self.browse(cr, uid, states['confirmed'], context=context) if move.procure_method == 'make_to_order'] self._create_procurements(cr, uid, moves, context=context) for move in moves: states['waiting'].append(move.id) states['confirmed'].remove(move.id) for state, write_ids in states.items(): if len(write_ids): self.write(cr, uid, write_ids, {'state': state}) #assign picking in batch for all confirmed move that share the same details for key, move_ids in to_assign.items(): procurement_group, location_from, location_to = key self._picking_assign(cr, uid, move_ids, procurement_group, location_from, location_to, context=context) moves = self.browse(cr, uid, ids, context=context) self._push_apply(cr, uid, moves, context=context) return ids def force_assign(self, cr, uid, ids, context=None): """ Changes the state to assigned. @return: True """ return self.write(cr, uid, ids, {'state': 'assigned'}, context=context) def check_tracking_product(self, cr, uid, product, lot_id, location, location_dest, context=None): check = False if product.track_all and not location_dest.usage == 'inventory': check = True elif product.track_incoming and location.usage in ('supplier', 'transit', 'inventory') and location_dest.usage == 'internal': check = True elif product.track_outgoing and location_dest.usage in ('customer', 'transit') and location.usage == 'internal': check = True if check and not lot_id: raise osv.except_osv(_('Warning!'), _('You must assign a serial number for the product %s') % (product.name)) def check_tracking(self, cr, uid, move, lot_id, context=None): """ Checks if serial number is assigned to stock move or not and raise an error if it had to. """ self.check_tracking_product(cr, uid, move.product_id, lot_id, move.location_id, move.location_dest_id, context=context) def action_assign(self, cr, uid, ids, context=None): """ Checks the product type and accordingly writes the state. """ context = context or {} quant_obj = self.pool.get("stock.quant") to_assign_moves = [] main_domain = {} todo_moves = [] operations = set() for move in self.browse(cr, uid, ids, context=context): if move.state not in ('confirmed', 'waiting', 'assigned'): continue if move.location_id.usage in ('supplier', 'inventory', 'production'): to_assign_moves.append(move.id) #in case the move is returned, we want to try to find quants before forcing the assignment if not move.origin_returned_move_id: continue if move.product_id.type == 'consu': to_assign_moves.append(move.id) continue else: todo_moves.append(move) #we always keep the quants already assigned and try to find the remaining quantity on quants not assigned only main_domain[move.id] = [('reservation_id', '=', False), ('qty', '>', 0)] #if the move is preceeded, restrict the choice of quants in the ones moved previously in original move ancestors = self.find_move_ancestors(cr, uid, move, context=context) if move.state == 'waiting' and not ancestors: #if the waiting move hasn't yet any ancestor (PO/MO not confirmed yet), don't find any quant available in stock main_domain[move.id] += [('id', '=', False)] elif ancestors: main_domain[move.id] += [('history_ids', 'in', ancestors)] #if the move is returned from another, restrict the choice of quants to the ones that follow the returned move if move.origin_returned_move_id: main_domain[move.id] += [('history_ids', 'in', move.origin_returned_move_id.id)] for link in move.linked_move_operation_ids: operations.add(link.operation_id) # Check all ops and sort them: we want to process first the packages, then operations with lot then the rest operations = list(operations) operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) for ops in operations: #first try to find quants based on specific domains given by linked operations for record in ops.linked_move_operation_ids: move = record.move_id if move.id in main_domain: domain = main_domain[move.id] + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context) qty = record.qty if qty: quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, qty, domain=domain, prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, record, context=context) for move in todo_moves: if move.linked_move_operation_ids: continue #then if the move isn't totally assigned, try to find quants without any specific domain if move.state != 'assigned': qty_already_assigned = move.reserved_availability qty = move.product_qty - qty_already_assigned quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain[move.id], prefered_domain_list=[], restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, context=context) #force assignation of consumable products and incoming from supplier/inventory/production if to_assign_moves: self.force_assign(cr, uid, to_assign_moves, context=context) def action_cancel(self, cr, uid, ids, context=None): """ Cancels the moves and if all moves are cancelled it cancels the picking. @return: True """ procurement_obj = self.pool.get('procurement.order') context = context or {} procs_to_check = [] for move in self.browse(cr, uid, ids, context=context): if move.state == 'done': raise osv.except_osv(_('Operation Forbidden!'), _('You cannot cancel a stock move that has been set to \'Done\'.')) if move.reserved_quant_ids: self.pool.get("stock.quant").quants_unreserve(cr, uid, move, context=context) if context.get('cancel_procurement'): if move.propagate: procurement_ids = procurement_obj.search(cr, uid, [('move_dest_id', '=', move.id)], context=context) procurement_obj.cancel(cr, uid, procurement_ids, context=context) else: if move.move_dest_id: if move.propagate: self.action_cancel(cr, uid, [move.move_dest_id.id], context=context) elif move.move_dest_id.state == 'waiting': #If waiting, the chain will be broken and we are not sure if we can still wait for it (=> could take from stock instead) self.write(cr, uid, [move.move_dest_id.id], {'state': 'confirmed'}, context=context) if move.procurement_id: # Does the same as procurement check, only eliminating a refresh procs_to_check.append(move.procurement_id.id) res = self.write(cr, uid, ids, {'state': 'cancel', 'move_dest_id': False}, context=context) if procs_to_check: procurement_obj.check(cr, uid, procs_to_check, context=context) return res def _check_package_from_moves(self, cr, uid, ids, context=None): pack_obj = self.pool.get("stock.quant.package") packs = set() for move in self.browse(cr, uid, ids, context=context): packs |= set([q.package_id for q in move.quant_ids if q.package_id and q.qty > 0]) return pack_obj._check_location_constraint(cr, uid, list(packs), context=context) def find_move_ancestors(self, cr, uid, move, context=None): '''Find the first level ancestors of given move ''' ancestors = [] move2 = move while move2: ancestors += [x.id for x in move2.move_orig_ids] #loop on the split_from to find the ancestor of split moves only if the move has not direct ancestor (priority goes to them) move2 = not move2.move_orig_ids and move2.split_from or False return ancestors @api.cr_uid_ids_context def recalculate_move_state(self, cr, uid, move_ids, context=None): '''Recompute the state of moves given because their reserved quants were used to fulfill another operation''' for move in self.browse(cr, uid, move_ids, context=context): vals = {} reserved_quant_ids = move.reserved_quant_ids if len(reserved_quant_ids) > 0 and not move.partially_available: vals['partially_available'] = True if len(reserved_quant_ids) == 0 and move.partially_available: vals['partially_available'] = False if move.state == 'assigned': if self.find_move_ancestors(cr, uid, move, context=context): vals['state'] = 'waiting' else: vals['state'] = 'confirmed' if vals: self.write(cr, uid, [move.id], vals, context=context) def action_done(self, cr, uid, ids, context=None): """ Process completely the moves given as ids and if all moves are done, it will finish the picking. """ context = context or {} picking_obj = self.pool.get("stock.picking") quant_obj = self.pool.get("stock.quant") todo = [move.id for move in self.browse(cr, uid, ids, context=context) if move.state == "draft"] if todo: ids = self.action_confirm(cr, uid, todo, context=context) pickings = set() procurement_ids = set() #Search operations that are linked to the moves operations = set() move_qty = {} for move in self.browse(cr, uid, ids, context=context): move_qty[move.id] = move.product_qty for link in move.linked_move_operation_ids: operations.add(link.operation_id) #Sort operations according to entire packages first, then package + lot, package only, lot only operations = list(operations) operations.sort(key=lambda x: ((x.package_id and not x.product_id) and -4 or 0) + (x.package_id and -2 or 0) + (x.lot_id and -1 or 0)) for ops in operations: if ops.picking_id: pickings.add(ops.picking_id.id) main_domain = [('qty', '>', 0)] for record in ops.linked_move_operation_ids: move = record.move_id self.check_tracking(cr, uid, move, not ops.product_id and ops.package_id.id or ops.lot_id.id, context=context) prefered_domain = [('reservation_id', '=', move.id)] fallback_domain = [('reservation_id', '=', False)] fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)] prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2] dom = main_domain + self.pool.get('stock.move.operation.link').get_specific_domain(cr, uid, record, context=context) quants = quant_obj.quants_get_prefered_domain(cr, uid, ops.location_id, move.product_id, record.qty, domain=dom, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) if ops.product_id: #If a product is given, the result is always put immediately in the result package (if it is False, they are without package) quant_dest_package_id = ops.result_package_id.id ctx = context else: # When a pack is moved entirely, the quants should not be written anything for the destination package quant_dest_package_id = False ctx = context.copy() ctx['entire_pack'] = True quant_obj.quants_move(cr, uid, quants, move, ops.location_dest_id, location_from=ops.location_id, lot_id=ops.lot_id.id, owner_id=ops.owner_id.id, src_package_id=ops.package_id.id, dest_package_id=quant_dest_package_id, context=ctx) # Handle pack in pack if not ops.product_id and ops.package_id and ops.result_package_id.id != ops.package_id.parent_id.id: self.pool.get('stock.quant.package').write(cr, SUPERUSER_ID, [ops.package_id.id], {'parent_id': ops.result_package_id.id}, context=context) if not move_qty.get(move.id): raise osv.except_osv(_("Error"), _("The roundings of your Unit of Measures %s on the move vs. %s on the product don't allow to do these operations or you are not transferring the picking at once. ") % (move.product_uom.name, move.product_id.uom_id.name)) move_qty[move.id] -= record.qty #Check for remaining qtys and unreserve/check move_dest_id in move_dest_ids = set() for move in self.browse(cr, uid, ids, context=context): move_qty_cmp = float_compare(move_qty[move.id], 0, precision_rounding=move.product_id.uom_id.rounding) if move_qty_cmp > 0: # (=In case no pack operations in picking) main_domain = [('qty', '>', 0)] prefered_domain = [('reservation_id', '=', move.id)] fallback_domain = [('reservation_id', '=', False)] fallback_domain2 = ['&', ('reservation_id', '!=', move.id), ('reservation_id', '!=', False)] prefered_domain_list = [prefered_domain] + [fallback_domain] + [fallback_domain2] self.check_tracking(cr, uid, move, move.restrict_lot_id.id, context=context) qty = move_qty[move.id] quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, qty, domain=main_domain, prefered_domain_list=prefered_domain_list, restrict_lot_id=move.restrict_lot_id.id, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_move(cr, uid, quants, move, move.location_dest_id, lot_id=move.restrict_lot_id.id, owner_id=move.restrict_partner_id.id, context=context) # If the move has a destination, add it to the list to reserve if move.move_dest_id and move.move_dest_id.state in ('waiting', 'confirmed'): move_dest_ids.add(move.move_dest_id.id) if move.procurement_id: procurement_ids.add(move.procurement_id.id) #unreserve the quants and make them available for other operations/moves quant_obj.quants_unreserve(cr, uid, move, context=context) # Check the packages have been placed in the correct locations self._check_package_from_moves(cr, uid, ids, context=context) #set the move as done self.write(cr, uid, ids, {'state': 'done', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) self.pool.get('procurement.order').check(cr, uid, list(procurement_ids), context=context) #assign destination moves if move_dest_ids: self.action_assign(cr, uid, list(move_dest_ids), context=context) #check picking state to set the date_done is needed done_picking = [] for picking in picking_obj.browse(cr, uid, list(pickings), context=context): if picking.state == 'done' and not picking.date_done: done_picking.append(picking.id) if done_picking: picking_obj.write(cr, uid, done_picking, {'date_done': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}, context=context) return True def unlink(self, cr, uid, ids, context=None): context = context or {} for move in self.browse(cr, uid, ids, context=context): if move.state not in ('draft', 'cancel'): raise osv.except_osv(_('User Error!'), _('You can only delete draft moves.')) return super(stock_move, self).unlink(cr, uid, ids, context=context) def action_scrap(self, cr, uid, ids, quantity, location_id, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Move the scrap/damaged product into scrap location @param cr: the database cursor @param uid: the user id @param ids: ids of stock move object to be scrapped @param quantity : specify scrap qty @param location_id : specify scrap location @param context: context arguments @return: Scraped lines """ quant_obj = self.pool.get("stock.quant") #quantity should be given in MOVE UOM if quantity <= 0: raise osv.except_osv(_('Warning!'), _('Please provide a positive quantity to scrap.')) res = [] for move in self.browse(cr, uid, ids, context=context): source_location = move.location_id if move.state == 'done': source_location = move.location_dest_id #Previously used to prevent scraping from virtual location but not necessary anymore #if source_location.usage != 'internal': #restrict to scrap from a virtual location because it's meaningless and it may introduce errors in stock ('creating' new products from nowhere) #raise osv.except_osv(_('Error!'), _('Forbidden operation: it is not allowed to scrap products from a virtual location.')) move_qty = move.product_qty uos_qty = quantity / move_qty * move.product_uos_qty default_val = { 'location_id': source_location.id, 'product_uom_qty': quantity, 'product_uos_qty': uos_qty, 'state': move.state, 'scrapped': True, 'location_dest_id': location_id, 'restrict_lot_id': restrict_lot_id, 'restrict_partner_id': restrict_partner_id, } new_move = self.copy(cr, uid, move.id, default_val) res += [new_move] product_obj = self.pool.get('product.product') for product in product_obj.browse(cr, uid, [move.product_id.id], context=context): if move.picking_id: uom = product.uom_id.name if product.uom_id else '' message = _("%s %s %s has been <b>moved to</b> scrap.") % (quantity, uom, product.name) move.picking_id.message_post(body=message) # We "flag" the quant from which we want to scrap the products. To do so: # - we select the quants related to the move we scrap from # - we reserve the quants with the scrapped move # See self.action_done, et particularly how is defined the "prefered_domain" for clarification scrap_move = self.browse(cr, uid, new_move, context=context) if move.state == 'done' and scrap_move.location_id.usage not in ('supplier', 'inventory', 'production'): domain = [('qty', '>', 0), ('history_ids', 'in', [move.id])] # We use scrap_move data since a reservation makes sense for a move not already done quants = quant_obj.quants_get_prefered_domain(cr, uid, scrap_move.location_id, scrap_move.product_id, quantity, domain=domain, prefered_domain_list=[], restrict_lot_id=scrap_move.restrict_lot_id.id, restrict_partner_id=scrap_move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, scrap_move, context=context) self.action_done(cr, uid, res, context=context) return res def split(self, cr, uid, move, qty, restrict_lot_id=False, restrict_partner_id=False, context=None): """ Splits qty from move move into a new move :param move: browse record :param qty: float. quantity to split (given in product UoM) :param restrict_lot_id: optional production lot that can be given in order to force the new move to restrict its choice of quants to this lot. :param restrict_partner_id: optional partner that can be given in order to force the new move to restrict its choice of quants to the ones belonging to this partner. :param context: dictionay. can contains the special key 'source_location_id' in order to force the source location when copying the move returns the ID of the backorder move created """ if move.state in ('done', 'cancel'): raise osv.except_osv(_('Error'), _('You cannot split a move done')) if move.state == 'draft': #we restrict the split of a draft move because if not confirmed yet, it may be replaced by several other moves in #case of phantom bom (with mrp module). And we don't want to deal with this complexity by copying the product that will explode. raise osv.except_osv(_('Error'), _('You cannot split a draft move. It needs to be confirmed first.')) if move.product_qty <= qty or qty == 0: return move.id uom_obj = self.pool.get('product.uom') context = context or {} #HALF-UP rounding as only rounding errors will be because of propagation of error from default UoM uom_qty = uom_obj._compute_qty_obj(cr, uid, move.product_id.uom_id, qty, move.product_uom, rounding_method='HALF-UP', context=context) uos_qty = uom_qty * move.product_uos_qty / move.product_uom_qty defaults = { 'product_uom_qty': uom_qty, 'product_uos_qty': uos_qty, 'procure_method': 'make_to_stock', 'restrict_lot_id': restrict_lot_id, 'restrict_partner_id': restrict_partner_id, 'split_from': move.id, 'procurement_id': move.procurement_id.id, 'move_dest_id': move.move_dest_id.id, 'origin_returned_move_id': move.origin_returned_move_id.id, } if context.get('source_location_id'): defaults['location_id'] = context['source_location_id'] new_move = self.copy(cr, uid, move.id, defaults, context=context) ctx = context.copy() ctx['do_not_propagate'] = True self.write(cr, uid, [move.id], { 'product_uom_qty': move.product_uom_qty - uom_qty, 'product_uos_qty': move.product_uos_qty - uos_qty, }, context=ctx) if move.move_dest_id and move.propagate and move.move_dest_id.state not in ('done', 'cancel'): new_move_prop = self.split(cr, uid, move.move_dest_id, qty, context=context) self.write(cr, uid, [new_move], {'move_dest_id': new_move_prop}, context=context) #returning the first element of list returned by action_confirm is ok because we checked it wouldn't be exploded (and #thus the result of action_confirm should always be a list of 1 element length) return self.action_confirm(cr, uid, [new_move], context=context)[0] def get_code_from_locs(self, cr, uid, move, location_id=False, location_dest_id=False, context=None): """ Returns the code the picking type should have. This can easily be used to check if a move is internal or not move, location_id and location_dest_id are browse records """ code = 'internal' src_loc = location_id or move.location_id dest_loc = location_dest_id or move.location_dest_id if src_loc.usage == 'internal' and dest_loc.usage != 'internal': code = 'outgoing' if src_loc.usage != 'internal' and dest_loc.usage == 'internal': code = 'incoming' return code def _get_taxes(self, cr, uid, move, context=None): return [] class stock_inventory(osv.osv): _name = "stock.inventory" _description = "Inventory" def _get_move_ids_exist(self, cr, uid, ids, field_name, arg, context=None): res = {} for inv in self.browse(cr, uid, ids, context=context): res[inv.id] = False if inv.move_ids: res[inv.id] = True return res def _get_available_filters(self, cr, uid, context=None): """ This function will return the list of filter allowed according to the options checked in 'Settings\Warehouse'. :rtype: list of tuple """ #default available choices res_filter = [('none', _('All products')), ('partial', _('Manual Selection of Products')), ('product', _('One product only'))] settings_obj = self.pool.get('stock.config.settings') config_ids = settings_obj.search(cr, uid, [], limit=1, order='id DESC', context=context) #If we don't have updated config until now, all fields are by default false and so should be not dipslayed if not config_ids: return res_filter stock_settings = settings_obj.browse(cr, uid, config_ids[0], context=context) if stock_settings.group_stock_tracking_owner: res_filter.append(('owner', _('One owner only'))) res_filter.append(('product_owner', _('One product for a specific owner'))) if stock_settings.group_stock_tracking_lot: res_filter.append(('lot', _('One Lot/Serial Number'))) if stock_settings.group_stock_packaging: res_filter.append(('pack', _('A Pack'))) return res_filter def _get_total_qty(self, cr, uid, ids, field_name, args, context=None): res = {} for inv in self.browse(cr, uid, ids, context=context): res[inv.id] = sum([x.product_qty for x in inv.line_ids]) return res INVENTORY_STATE_SELECTION = [ ('draft', 'Draft'), ('cancel', 'Cancelled'), ('confirm', 'In Progress'), ('done', 'Validated'), ] _columns = { 'name': fields.char('Inventory Reference', required=True, readonly=True, states={'draft': [('readonly', False)]}, help="Inventory Name."), 'date': fields.datetime('Inventory Date', required=True, readonly=True, help="The date that will be used for the stock level check of the products and the validation of the stock move related to this inventory."), 'line_ids': fields.one2many('stock.inventory.line', 'inventory_id', 'Inventories', readonly=False, states={'done': [('readonly', True)]}, help="Inventory Lines.", copy=True), 'move_ids': fields.one2many('stock.move', 'inventory_id', 'Created Moves', help="Inventory Moves.", states={'done': [('readonly', True)]}), 'state': fields.selection(INVENTORY_STATE_SELECTION, 'Status', readonly=True, select=True, copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True, select=True, readonly=True, states={'draft': [('readonly', False)]}), 'location_id': fields.many2one('stock.location', 'Inventoried Location', required=True, readonly=True, states={'draft': [('readonly', False)]}), 'product_id': fields.many2one('product.product', 'Inventoried Product', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Product to focus your inventory on a particular Product."), 'package_id': fields.many2one('stock.quant.package', 'Inventoried Pack', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Pack to focus your inventory on a particular Pack."), 'partner_id': fields.many2one('res.partner', 'Inventoried Owner', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Owner to focus your inventory on a particular Owner."), 'lot_id': fields.many2one('stock.production.lot', 'Inventoried Lot/Serial Number', readonly=True, states={'draft': [('readonly', False)]}, help="Specify Lot/Serial Number to focus your inventory on a particular Lot/Serial Number.", copy=False), 'move_ids_exist': fields.function(_get_move_ids_exist, type='boolean', string=' Stock Move Exists?', help='technical field for attrs in view'), 'filter': fields.selection(_get_available_filters, 'Inventory of', required=True, help="If you do an entire inventory, you can choose 'All Products' and it will prefill the inventory with the current stock. If you only do some products "\ "(e.g. Cycle Counting) you can choose 'Manual Selection of Products' and the system won't propose anything. You can also let the "\ "system propose for a single product / lot /... "), 'total_qty': fields.function(_get_total_qty, type="float"), } def _default_stock_location(self, cr, uid, context=None): try: warehouse = self.pool.get('ir.model.data').get_object(cr, uid, 'stock', 'warehouse0') return warehouse.lot_stock_id.id except: return False _defaults = { 'date': fields.datetime.now, 'state': 'draft', 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c), 'location_id': _default_stock_location, 'filter': 'none', } def reset_real_qty(self, cr, uid, ids, context=None): inventory = self.browse(cr, uid, ids[0], context=context) line_ids = [line.id for line in inventory.line_ids] self.pool.get('stock.inventory.line').write(cr, uid, line_ids, {'product_qty': 0}) return True def action_done(self, cr, uid, ids, context=None): """ Finish the inventory @return: True """ for inv in self.browse(cr, uid, ids, context=context): for inventory_line in inv.line_ids: if inventory_line.product_qty < 0 and inventory_line.product_qty != inventory_line.theoretical_qty: raise osv.except_osv(_('Warning'), _('You cannot set a negative product quantity in an inventory line:\n\t%s - qty: %s' % (inventory_line.product_id.name, inventory_line.product_qty))) self.action_check(cr, uid, [inv.id], context=context) self.write(cr, uid, [inv.id], {'state': 'done'}, context=context) self.post_inventory(cr, uid, inv, context=context) return True def post_inventory(self, cr, uid, inv, context=None): #The inventory is posted as a single step which means quants cannot be moved from an internal location to another using an inventory #as they will be moved to inventory loss, and other quants will be created to the encoded quant location. This is a normal behavior #as quants cannot be reuse from inventory location (users can still manually move the products before/after the inventory if they want). move_obj = self.pool.get('stock.move') move_obj.action_done(cr, uid, [x.id for x in inv.move_ids if x.state != 'done'], context=context) def action_check(self, cr, uid, ids, context=None): """ Checks the inventory and computes the stock move to do @return: True """ inventory_line_obj = self.pool.get('stock.inventory.line') stock_move_obj = self.pool.get('stock.move') for inventory in self.browse(cr, uid, ids, context=context): #first remove the existing stock moves linked to this inventory move_ids = [move.id for move in inventory.move_ids] stock_move_obj.unlink(cr, uid, move_ids, context=context) for line in inventory.line_ids: #compare the checked quantities on inventory lines to the theorical one stock_move = inventory_line_obj._resolve_inventory_line(cr, uid, line, context=context) def action_cancel_draft(self, cr, uid, ids, context=None): """ Cancels the stock move and change inventory state to draft. @return: True """ for inv in self.browse(cr, uid, ids, context=context): self.write(cr, uid, [inv.id], {'line_ids': [(5,)]}, context=context) self.pool.get('stock.move').action_cancel(cr, uid, [x.id for x in inv.move_ids], context=context) self.write(cr, uid, [inv.id], {'state': 'draft'}, context=context) return True def action_cancel_inventory(self, cr, uid, ids, context=None): self.action_cancel_draft(cr, uid, ids, context=context) def prepare_inventory(self, cr, uid, ids, context=None): inventory_line_obj = self.pool.get('stock.inventory.line') for inventory in self.browse(cr, uid, ids, context=context): # If there are inventory lines already (e.g. from import), respect those and set their theoretical qty line_ids = [line.id for line in inventory.line_ids] if not line_ids and inventory.filter != 'partial': #compute the inventory lines and create them vals = self._get_inventory_lines(cr, uid, inventory, context=context) for product_line in vals: inventory_line_obj.create(cr, uid, product_line, context=context) return self.write(cr, uid, ids, {'state': 'confirm', 'date': time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)}) def _get_inventory_lines(self, cr, uid, inventory, context=None): location_obj = self.pool.get('stock.location') product_obj = self.pool.get('product.product') location_ids = location_obj.search(cr, uid, [('id', 'child_of', [inventory.location_id.id])], context=context) domain = ' location_id in %s' args = (tuple(location_ids),) if inventory.partner_id: domain += ' and owner_id = %s' args += (inventory.partner_id.id,) if inventory.lot_id: domain += ' and lot_id = %s' args += (inventory.lot_id.id,) if inventory.product_id: domain += ' and product_id = %s' args += (inventory.product_id.id,) if inventory.package_id: domain += ' and package_id = %s' args += (inventory.package_id.id,) cr.execute(''' SELECT product_id, sum(qty) as product_qty, location_id, lot_id as prod_lot_id, package_id, owner_id as partner_id FROM stock_quant WHERE''' + domain + ''' GROUP BY product_id, location_id, lot_id, package_id, partner_id ''', args) vals = [] for product_line in cr.dictfetchall(): #replace the None the dictionary by False, because falsy values are tested later on for key, value in product_line.items(): if not value: product_line[key] = False product_line['inventory_id'] = inventory.id product_line['theoretical_qty'] = product_line['product_qty'] if product_line['product_id']: product = product_obj.browse(cr, uid, product_line['product_id'], context=context) product_line['product_uom_id'] = product.uom_id.id vals.append(product_line) return vals def _check_filter_product(self, cr, uid, ids, context=None): for inventory in self.browse(cr, uid, ids, context=context): if inventory.filter == 'none' and inventory.product_id and inventory.location_id and inventory.lot_id: return True if inventory.filter not in ('product', 'product_owner') and inventory.product_id: return False if inventory.filter != 'lot' and inventory.lot_id: return False if inventory.filter not in ('owner', 'product_owner') and inventory.partner_id: return False if inventory.filter != 'pack' and inventory.package_id: return False return True def onchange_filter(self, cr, uid, ids, filter, context=None): to_clean = { 'value': {} } if filter not in ('product', 'product_owner'): to_clean['value']['product_id'] = False if filter != 'lot': to_clean['value']['lot_id'] = False if filter not in ('owner', 'product_owner'): to_clean['value']['partner_id'] = False if filter != 'pack': to_clean['value']['package_id'] = False return to_clean _constraints = [ (_check_filter_product, 'The selected inventory options are not coherent.', ['filter', 'product_id', 'lot_id', 'partner_id', 'package_id']), ] class stock_inventory_line(osv.osv): _name = "stock.inventory.line" _description = "Inventory Line" _order = "inventory_id, location_name, product_code, product_name, prodlot_name" def _get_product_name_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('product_id', 'in', ids)], context=context) def _get_location_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('location_id', 'in', ids)], context=context) def _get_prodlot_change(self, cr, uid, ids, context=None): return self.pool.get('stock.inventory.line').search(cr, uid, [('prod_lot_id', 'in', ids)], context=context) def _get_theoretical_qty(self, cr, uid, ids, name, args, context=None): res = {} quant_obj = self.pool["stock.quant"] uom_obj = self.pool["product.uom"] for line in self.browse(cr, uid, ids, context=context): quant_ids = self._get_quants(cr, uid, line, context=context) quants = quant_obj.browse(cr, uid, quant_ids, context=context) tot_qty = sum([x.qty for x in quants]) if line.product_uom_id and line.product_id.uom_id.id != line.product_uom_id.id: tot_qty = uom_obj._compute_qty_obj(cr, uid, line.product_id.uom_id, tot_qty, line.product_uom_id, context=context) res[line.id] = tot_qty return res _columns = { 'inventory_id': fields.many2one('stock.inventory', 'Inventory', ondelete='cascade', select=True), 'location_id': fields.many2one('stock.location', 'Location', required=True, select=True), 'product_id': fields.many2one('product.product', 'Product', required=True, select=True), 'package_id': fields.many2one('stock.quant.package', 'Pack', select=True), 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure', required=True), 'product_qty': fields.float('Checked Quantity', digits_compute=dp.get_precision('Product Unit of Measure')), 'company_id': fields.related('inventory_id', 'company_id', type='many2one', relation='res.company', string='Company', store=True, select=True, readonly=True), 'prod_lot_id': fields.many2one('stock.production.lot', 'Serial Number', domain="[('product_id','=',product_id)]"), 'state': fields.related('inventory_id', 'state', type='char', string='Status', readonly=True), 'theoretical_qty': fields.function(_get_theoretical_qty, type='float', digits_compute=dp.get_precision('Product Unit of Measure'), store={'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id', 'product_id', 'package_id', 'product_uom_id', 'company_id', 'prod_lot_id', 'partner_id'], 20),}, readonly=True, string="Theoretical Quantity"), 'partner_id': fields.many2one('res.partner', 'Owner'), 'product_name': fields.related('product_id', 'name', type='char', string='Product Name', store={ 'product.product': (_get_product_name_change, ['name', 'default_code'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}), 'product_code': fields.related('product_id', 'default_code', type='char', string='Product Code', store={ 'product.product': (_get_product_name_change, ['name', 'default_code'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['product_id'], 20),}), 'location_name': fields.related('location_id', 'complete_name', type='char', string='Location Name', store={ 'stock.location': (_get_location_change, ['name', 'location_id', 'active'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['location_id'], 20),}), 'prodlot_name': fields.related('prod_lot_id', 'name', type='char', string='Serial Number Name', store={ 'stock.production.lot': (_get_prodlot_change, ['name'], 20), 'stock.inventory.line': (lambda self, cr, uid, ids, c={}: ids, ['prod_lot_id'], 20),}), } _defaults = { 'product_qty': 0, 'product_uom_id': lambda self, cr, uid, ctx=None: self.pool['ir.model.data'].get_object_reference(cr, uid, 'product', 'product_uom_unit')[1] } def _get_quants(self, cr, uid, line, context=None): quant_obj = self.pool["stock.quant"] dom = [('company_id', '=', line.company_id.id), ('location_id', '=', line.location_id.id), ('lot_id', '=', line.prod_lot_id.id), ('product_id','=', line.product_id.id), ('owner_id', '=', line.partner_id.id), ('package_id', '=', line.package_id.id)] quants = quant_obj.search(cr, uid, dom, context=context) return quants def onchange_createline(self, cr, uid, ids, location_id=False, product_id=False, uom_id=False, package_id=False, prod_lot_id=False, partner_id=False, company_id=False, context=None): quant_obj = self.pool["stock.quant"] uom_obj = self.pool["product.uom"] res = {'value': {}} # If no UoM already put the default UoM of the product if product_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) uom = self.pool['product.uom'].browse(cr, uid, uom_id, context=context) if product.uom_id.category_id.id != uom.category_id.id: res['value']['product_uom_id'] = product.uom_id.id res['domain'] = {'product_uom_id': [('category_id','=',product.uom_id.category_id.id)]} uom_id = product.uom_id.id # Calculate theoretical quantity by searching the quants as in quants_get if product_id and location_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) if not company_id: company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id dom = [('company_id', '=', company_id), ('location_id', '=', location_id), ('lot_id', '=', prod_lot_id), ('product_id','=', product_id), ('owner_id', '=', partner_id), ('package_id', '=', package_id)] quants = quant_obj.search(cr, uid, dom, context=context) th_qty = sum([x.qty for x in quant_obj.browse(cr, uid, quants, context=context)]) if product_id and uom_id and product.uom_id.id != uom_id: th_qty = uom_obj._compute_qty(cr, uid, product.uom_id.id, th_qty, uom_id) res['value']['theoretical_qty'] = th_qty res['value']['product_qty'] = th_qty return res def _resolve_inventory_line(self, cr, uid, inventory_line, context=None): stock_move_obj = self.pool.get('stock.move') quant_obj = self.pool.get('stock.quant') diff = inventory_line.theoretical_qty - inventory_line.product_qty if not diff: return #each theorical_lines where difference between theoretical and checked quantities is not 0 is a line for which we need to create a stock move vals = { 'name': _('INV:') + (inventory_line.inventory_id.name or ''), 'product_id': inventory_line.product_id.id, 'product_uom': inventory_line.product_uom_id.id, 'date': inventory_line.inventory_id.date, 'company_id': inventory_line.inventory_id.company_id.id, 'inventory_id': inventory_line.inventory_id.id, 'state': 'confirmed', 'restrict_lot_id': inventory_line.prod_lot_id.id, 'restrict_partner_id': inventory_line.partner_id.id, } inventory_location_id = inventory_line.product_id.property_stock_inventory.id if diff < 0: #found more than expected vals['location_id'] = inventory_location_id vals['location_dest_id'] = inventory_line.location_id.id vals['product_uom_qty'] = -diff else: #found less than expected vals['location_id'] = inventory_line.location_id.id vals['location_dest_id'] = inventory_location_id vals['product_uom_qty'] = diff move_id = stock_move_obj.create(cr, uid, vals, context=context) move = stock_move_obj.browse(cr, uid, move_id, context=context) if diff > 0: domain = [('qty', '>', 0.0), ('package_id', '=', inventory_line.package_id.id), ('lot_id', '=', inventory_line.prod_lot_id.id), ('location_id', '=', inventory_line.location_id.id)] preferred_domain_list = [[('reservation_id', '=', False)], [('reservation_id.inventory_id', '!=', inventory_line.inventory_id.id)]] quants = quant_obj.quants_get_prefered_domain(cr, uid, move.location_id, move.product_id, move.product_qty, domain=domain, prefered_domain_list=preferred_domain_list, restrict_partner_id=move.restrict_partner_id.id, context=context) quant_obj.quants_reserve(cr, uid, quants, move, context=context) elif inventory_line.package_id: stock_move_obj.action_done(cr, uid, move_id, context=context) quants = [x.id for x in move.quant_ids] quant_obj.write(cr, uid, quants, {'package_id': inventory_line.package_id.id}, context=context) res = quant_obj.search(cr, uid, [('qty', '<', 0.0), ('product_id', '=', move.product_id.id), ('location_id', '=', move.location_dest_id.id), ('package_id', '!=', False)], limit=1, context=context) if res: for quant in move.quant_ids: if quant.location_id.id == move.location_dest_id.id: #To avoid we take a quant that was reconcile already quant_obj._quant_reconcile_negative(cr, uid, quant, move, context=context) return move_id # Should be left out in next version def restrict_change(self, cr, uid, ids, theoretical_qty, context=None): return {} # Should be left out in next version def on_change_product_id(self, cr, uid, ids, product, uom, theoretical_qty, context=None): """ Changes UoM @param location_id: Location id @param product: Changed product_id @param uom: UoM product @return: Dictionary of changed values """ if not product: return {'value': {'product_uom_id': False}} obj_product = self.pool.get('product.product').browse(cr, uid, product, context=context) return {'value': {'product_uom_id': uom or obj_product.uom_id.id}} #---------------------------------------------------------- # Stock Warehouse #---------------------------------------------------------- class stock_warehouse(osv.osv): _name = "stock.warehouse" _description = "Warehouse" _columns = { 'name': fields.char('Warehouse Name', required=True, select=True), 'company_id': fields.many2one('res.company', 'Company', required=True, readonly=True, select=True), 'partner_id': fields.many2one('res.partner', 'Address'), 'view_location_id': fields.many2one('stock.location', 'View Location', required=True, domain=[('usage', '=', 'view')]), 'lot_stock_id': fields.many2one('stock.location', 'Location Stock', domain=[('usage', '=', 'internal')], required=True), 'code': fields.char('Short Name', size=5, required=True, help="Short name used to identify your warehouse"), 'route_ids': fields.many2many('stock.location.route', 'stock_route_warehouse', 'warehouse_id', 'route_id', 'Routes', domain="[('warehouse_selectable', '=', True)]", help='Defaults routes through the warehouse'), 'reception_steps': fields.selection([ ('one_step', 'Receive goods directly in stock (1 step)'), ('two_steps', 'Unload in input location then go to stock (2 steps)'), ('three_steps', 'Unload in input location, go through a quality control before being admitted in stock (3 steps)')], 'Incoming Shipments', help="Default incoming route to follow", required=True), 'delivery_steps': fields.selection([ ('ship_only', 'Ship directly from stock (Ship only)'), ('pick_ship', 'Bring goods to output location before shipping (Pick + Ship)'), ('pick_pack_ship', 'Make packages into a dedicated location, then bring them to the output location for shipping (Pick + Pack + Ship)')], 'Outgoing Shippings', help="Default outgoing route to follow", required=True), 'wh_input_stock_loc_id': fields.many2one('stock.location', 'Input Location'), 'wh_qc_stock_loc_id': fields.many2one('stock.location', 'Quality Control Location'), 'wh_output_stock_loc_id': fields.many2one('stock.location', 'Output Location'), 'wh_pack_stock_loc_id': fields.many2one('stock.location', 'Packing Location'), 'mto_pull_id': fields.many2one('procurement.rule', 'MTO rule'), 'pick_type_id': fields.many2one('stock.picking.type', 'Pick Type'), 'pack_type_id': fields.many2one('stock.picking.type', 'Pack Type'), 'out_type_id': fields.many2one('stock.picking.type', 'Out Type'), 'in_type_id': fields.many2one('stock.picking.type', 'In Type'), 'int_type_id': fields.many2one('stock.picking.type', 'Internal Type'), 'crossdock_route_id': fields.many2one('stock.location.route', 'Crossdock Route'), 'reception_route_id': fields.many2one('stock.location.route', 'Receipt Route'), 'delivery_route_id': fields.many2one('stock.location.route', 'Delivery Route'), 'resupply_from_wh': fields.boolean('Resupply From Other Warehouses'), 'resupply_wh_ids': fields.many2many('stock.warehouse', 'stock_wh_resupply_table', 'supplied_wh_id', 'supplier_wh_id', 'Resupply Warehouses'), 'resupply_route_ids': fields.one2many('stock.location.route', 'supplied_wh_id', 'Resupply Routes', help="Routes will be created for these resupply warehouses and you can select them on products and product categories"), 'default_resupply_wh_id': fields.many2one('stock.warehouse', 'Default Resupply Warehouse', help="Goods will always be resupplied from this warehouse"), } def onchange_filter_default_resupply_wh_id(self, cr, uid, ids, default_resupply_wh_id, resupply_wh_ids, context=None): resupply_wh_ids = set([x['id'] for x in (self.resolve_2many_commands(cr, uid, 'resupply_wh_ids', resupply_wh_ids, ['id']))]) if default_resupply_wh_id: #If we are removing the default resupply, we don't have default_resupply_wh_id resupply_wh_ids.add(default_resupply_wh_id) resupply_wh_ids = list(resupply_wh_ids) return {'value': {'resupply_wh_ids': resupply_wh_ids}} def _get_external_transit_location(self, cr, uid, warehouse, context=None): ''' returns browse record of inter company transit location, if found''' data_obj = self.pool.get('ir.model.data') location_obj = self.pool.get('stock.location') try: inter_wh_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_inter_wh')[1] except: return False return location_obj.browse(cr, uid, inter_wh_loc, context=context) def _get_inter_wh_route(self, cr, uid, warehouse, wh, context=None): return { 'name': _('%s: Supply Product from %s') % (warehouse.name, wh.name), 'warehouse_selectable': False, 'product_selectable': True, 'product_categ_selectable': True, 'supplied_wh_id': warehouse.id, 'supplier_wh_id': wh.id, } def _create_resupply_routes(self, cr, uid, warehouse, supplier_warehouses, default_resupply_wh, context=None): route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') #create route selectable on the product to resupply the warehouse from another one external_transit_location = self._get_external_transit_location(cr, uid, warehouse, context=context) internal_transit_location = warehouse.company_id.internal_transit_location_id input_loc = warehouse.wh_input_stock_loc_id if warehouse.reception_steps == 'one_step': input_loc = warehouse.lot_stock_id for wh in supplier_warehouses: transit_location = wh.company_id.id == warehouse.company_id.id and internal_transit_location or external_transit_location if transit_location: output_loc = wh.wh_output_stock_loc_id if wh.delivery_steps == 'ship_only': output_loc = wh.lot_stock_id # Create extra MTO rule (only for 'ship only' because in the other cases MTO rules already exists) mto_pull_vals = self._get_mto_pull_rule(cr, uid, wh, [(output_loc, transit_location, wh.out_type_id.id)], context=context)[0] pull_obj.create(cr, uid, mto_pull_vals, context=context) inter_wh_route_vals = self._get_inter_wh_route(cr, uid, warehouse, wh, context=context) inter_wh_route_id = route_obj.create(cr, uid, vals=inter_wh_route_vals, context=context) values = [(output_loc, transit_location, wh.out_type_id.id, wh), (transit_location, input_loc, warehouse.in_type_id.id, warehouse)] pull_rules_list = self._get_supply_pull_rules(cr, uid, wh.id, values, inter_wh_route_id, context=context) for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #if the warehouse is also set as default resupply method, assign this route automatically to the warehouse if default_resupply_wh and default_resupply_wh.id == wh.id: self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]}, context=context) _defaults = { 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.inventory', context=c), 'reception_steps': 'one_step', 'delivery_steps': 'ship_only', } _sql_constraints = [ ('warehouse_name_uniq', 'unique(name, company_id)', 'The name of the warehouse must be unique per company!'), ('warehouse_code_uniq', 'unique(code, company_id)', 'The code of the warehouse must be unique per company!'), ] def _get_partner_locations(self, cr, uid, ids, context=None): ''' returns a tuple made of the browse record of customer location and the browse record of supplier location''' data_obj = self.pool.get('ir.model.data') location_obj = self.pool.get('stock.location') try: customer_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_customers')[1] supplier_loc = data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_suppliers')[1] except: customer_loc = location_obj.search(cr, uid, [('usage', '=', 'customer')], context=context) customer_loc = customer_loc and customer_loc[0] or False supplier_loc = location_obj.search(cr, uid, [('usage', '=', 'supplier')], context=context) supplier_loc = supplier_loc and supplier_loc[0] or False if not (customer_loc and supplier_loc): raise osv.except_osv(_('Error!'), _('Can\'t find any customer or supplier location.')) return location_obj.browse(cr, uid, [customer_loc, supplier_loc], context=context) def _location_used(self, cr, uid, location_id, warehouse, context=None): pull_obj = self.pool['procurement.rule'] push_obj = self.pool['stock.location.path'] pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_src_id', '=', location_id), ('location_id', '=', location_id)], context=context) pushs = push_obj.search(cr, uid, ['&', ('route_id', 'not in', [x.id for x in warehouse.route_ids]), '|', ('location_from_id', '=', location_id), ('location_dest_id', '=', location_id)], context=context) if pulls or pushs: return True return False def switch_location(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None): location_obj = self.pool.get('stock.location') new_reception_step = new_reception_step or warehouse.reception_steps new_delivery_step = new_delivery_step or warehouse.delivery_steps if warehouse.reception_steps != new_reception_step: if not self._location_used(cr, uid, warehouse.wh_input_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_input_stock_loc_id.id, warehouse.wh_qc_stock_loc_id.id], {'active': False}, context=context) if new_reception_step != 'one_step': location_obj.write(cr, uid, warehouse.wh_input_stock_loc_id.id, {'active': True}, context=context) if new_reception_step == 'three_steps': location_obj.write(cr, uid, warehouse.wh_qc_stock_loc_id.id, {'active': True}, context=context) if warehouse.delivery_steps != new_delivery_step: if not self._location_used(cr, uid, warehouse.wh_output_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_output_stock_loc_id.id], {'active': False}, context=context) if not self._location_used(cr, uid, warehouse.wh_pack_stock_loc_id.id, warehouse, context=context): location_obj.write(cr, uid, [warehouse.wh_pack_stock_loc_id.id], {'active': False}, context=context) if new_delivery_step != 'ship_only': location_obj.write(cr, uid, warehouse.wh_output_stock_loc_id.id, {'active': True}, context=context) if new_delivery_step == 'pick_pack_ship': location_obj.write(cr, uid, warehouse.wh_pack_stock_loc_id.id, {'active': True}, context=context) return True def _get_reception_delivery_route(self, cr, uid, warehouse, route_name, context=None): return { 'name': self._format_routename(cr, uid, warehouse, route_name, context=context), 'product_categ_selectable': True, 'product_selectable': False, 'sequence': 10, } def _get_supply_pull_rules(self, cr, uid, supply_warehouse, values, new_route_id, context=None): pull_rules_list = [] for from_loc, dest_loc, pick_type_id, warehouse in values: pull_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': new_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': warehouse.lot_stock_id.id != from_loc.id and 'make_to_order' or 'make_to_stock', # first part of the resuply route is MTS 'warehouse_id': warehouse.id, 'propagate_warehouse_id': supply_warehouse, }) return pull_rules_list def _get_push_pull_rules(self, cr, uid, warehouse, active, values, new_route_id, context=None): first_rule = True push_rules_list = [] pull_rules_list = [] for from_loc, dest_loc, pick_type_id in values: push_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_from_id': from_loc.id, 'location_dest_id': dest_loc.id, 'route_id': new_route_id, 'auto': 'manual', 'picking_type_id': pick_type_id, 'active': active, 'warehouse_id': warehouse.id, }) pull_rules_list.append({ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': new_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': first_rule is True and 'make_to_stock' or 'make_to_order', 'active': active, 'warehouse_id': warehouse.id, }) first_rule = False return push_rules_list, pull_rules_list def _get_mto_route(self, cr, uid, context=None): route_obj = self.pool.get('stock.location.route') data_obj = self.pool.get('ir.model.data') try: mto_route_id = data_obj.get_object_reference(cr, uid, 'stock', 'route_warehouse0_mto')[1] except: mto_route_id = route_obj.search(cr, uid, [('name', 'like', _('Make To Order'))], context=context) mto_route_id = mto_route_id and mto_route_id[0] or False if not mto_route_id: raise osv.except_osv(_('Error!'), _('Can\'t find any generic Make To Order route.')) return mto_route_id def _check_remove_mto_resupply_rules(self, cr, uid, warehouse, context=None): """ Checks that the moves from the different """ pull_obj = self.pool.get('procurement.rule') mto_route_id = self._get_mto_route(cr, uid, context=context) rules = pull_obj.search(cr, uid, ['&', ('location_src_id', '=', warehouse.lot_stock_id.id), ('location_id.usage', '=', 'transit')], context=context) pull_obj.unlink(cr, uid, rules, context=context) def _get_mto_pull_rule(self, cr, uid, warehouse, values, context=None): mto_route_id = self._get_mto_route(cr, uid, context=context) res = [] for value in values: from_loc, dest_loc, pick_type_id = value res += [{ 'name': self._format_rulename(cr, uid, warehouse, from_loc, dest_loc, context=context) + _(' MTO'), 'location_src_id': from_loc.id, 'location_id': dest_loc.id, 'route_id': mto_route_id, 'action': 'move', 'picking_type_id': pick_type_id, 'procure_method': 'make_to_order', 'active': True, 'warehouse_id': warehouse.id, }] return res def _get_crossdock_route(self, cr, uid, warehouse, route_name, context=None): return { 'name': self._format_routename(cr, uid, warehouse, route_name, context=context), 'warehouse_selectable': False, 'product_selectable': True, 'product_categ_selectable': True, 'active': warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', 'sequence': 20, } def create_routes(self, cr, uid, ids, warehouse, context=None): wh_route_ids = [] route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context) #create reception route and rules route_name, values = routes_dict[warehouse.reception_steps] route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context) reception_route_id = route_obj.create(cr, uid, route_vals, context=context) wh_route_ids.append((4, reception_route_id)) push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, reception_route_id, context=context) #create the push/pull rules for push_rule in push_rules_list: push_obj.create(cr, uid, vals=push_rule, context=context) for pull_rule in pull_rules_list: #all pull rules in reception route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) #create MTS route and pull rules for delivery and a specific route MTO to be set on the product route_name, values = routes_dict[warehouse.delivery_steps] route_vals = self._get_reception_delivery_route(cr, uid, warehouse, route_name, context=context) #create the route and its pull rules delivery_route_id = route_obj.create(cr, uid, route_vals, context=context) wh_route_ids.append((4, delivery_route_id)) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, delivery_route_id, context=context) for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #create MTO pull rule and link it to the generic MTO route mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0] mto_pull_id = pull_obj.create(cr, uid, mto_pull_vals, context=context) #create a route for cross dock operations, that can be set on products and product categories route_name, values = routes_dict['crossdock'] crossdock_route_vals = self._get_crossdock_route(cr, uid, warehouse, route_name, context=context) crossdock_route_id = route_obj.create(cr, uid, vals=crossdock_route_vals, context=context) wh_route_ids.append((4, crossdock_route_id)) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, warehouse.delivery_steps != 'ship_only' and warehouse.reception_steps != 'one_step', values, crossdock_route_id, context=context) for pull_rule in pull_rules_list: # Fixed cross-dock is logically mto pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) #create route selectable on the product to resupply the warehouse from another one self._create_resupply_routes(cr, uid, warehouse, warehouse.resupply_wh_ids, warehouse.default_resupply_wh_id, context=context) #return routes and mto pull rule to store on the warehouse return { 'route_ids': wh_route_ids, 'mto_pull_id': mto_pull_id, 'reception_route_id': reception_route_id, 'delivery_route_id': delivery_route_id, 'crossdock_route_id': crossdock_route_id, } def change_route(self, cr, uid, ids, warehouse, new_reception_step=False, new_delivery_step=False, context=None): picking_type_obj = self.pool.get('stock.picking.type') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') route_obj = self.pool.get('stock.location.route') new_reception_step = new_reception_step or warehouse.reception_steps new_delivery_step = new_delivery_step or warehouse.delivery_steps #change the default source and destination location and (de)activate picking types input_loc = warehouse.wh_input_stock_loc_id if new_reception_step == 'one_step': input_loc = warehouse.lot_stock_id output_loc = warehouse.wh_output_stock_loc_id if new_delivery_step == 'ship_only': output_loc = warehouse.lot_stock_id picking_type_obj.write(cr, uid, warehouse.in_type_id.id, {'default_location_dest_id': input_loc.id}, context=context) picking_type_obj.write(cr, uid, warehouse.out_type_id.id, {'default_location_src_id': output_loc.id}, context=context) picking_type_obj.write(cr, uid, warehouse.pick_type_id.id, { 'active': new_delivery_step != 'ship_only', 'default_location_dest_id': output_loc.id if new_delivery_step == 'pick_ship' else warehouse.wh_pack_stock_loc_id.id, }, context=context) picking_type_obj.write(cr, uid, warehouse.pack_type_id.id, {'active': new_delivery_step == 'pick_pack_ship'}, context=context) routes_dict = self.get_routes_dict(cr, uid, ids, warehouse, context=context) #update delivery route and rules: unlink the existing rules of the warehouse delivery route and recreate it pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.delivery_route_id.pull_ids], context=context) route_name, values = routes_dict[new_delivery_step] route_obj.write(cr, uid, warehouse.delivery_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context) dummy, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.delivery_route_id.id, context=context) #create the pull rules for pull_rule in pull_rules_list: pull_obj.create(cr, uid, vals=pull_rule, context=context) #update receipt route and rules: unlink the existing rules of the warehouse receipt route and recreate it pull_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.pull_ids], context=context) push_obj.unlink(cr, uid, [pu.id for pu in warehouse.reception_route_id.push_ids], context=context) route_name, values = routes_dict[new_reception_step] route_obj.write(cr, uid, warehouse.reception_route_id.id, {'name': self._format_routename(cr, uid, warehouse, route_name, context=context)}, context=context) push_rules_list, pull_rules_list = self._get_push_pull_rules(cr, uid, warehouse, True, values, warehouse.reception_route_id.id, context=context) #create the push/pull rules for push_rule in push_rules_list: push_obj.create(cr, uid, vals=push_rule, context=context) for pull_rule in pull_rules_list: #all pull rules in receipt route are mto, because we don't want to wait for the scheduler to trigger an orderpoint on input location pull_rule['procure_method'] = 'make_to_order' pull_obj.create(cr, uid, vals=pull_rule, context=context) route_obj.write(cr, uid, warehouse.crossdock_route_id.id, {'active': new_reception_step != 'one_step' and new_delivery_step != 'ship_only'}, context=context) #change MTO rule dummy, values = routes_dict[new_delivery_step] mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, values, context=context)[0] pull_obj.write(cr, uid, warehouse.mto_pull_id.id, mto_pull_vals, context=context) return True def create_sequences_and_picking_types(self, cr, uid, warehouse, context=None): seq_obj = self.pool.get('ir.sequence') picking_type_obj = self.pool.get('stock.picking.type') #create new sequences in_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence in'), 'prefix': warehouse.code + '/IN/', 'padding': 5}, context=context) out_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence out'), 'prefix': warehouse.code + '/OUT/', 'padding': 5}, context=context) pack_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence packing'), 'prefix': warehouse.code + '/PACK/', 'padding': 5}, context=context) pick_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence picking'), 'prefix': warehouse.code + '/PICK/', 'padding': 5}, context=context) int_seq_id = seq_obj.create(cr, SUPERUSER_ID, values={'name': warehouse.name + _(' Sequence internal'), 'prefix': warehouse.code + '/INT/', 'padding': 5}, context=context) wh_stock_loc = warehouse.lot_stock_id wh_input_stock_loc = warehouse.wh_input_stock_loc_id wh_output_stock_loc = warehouse.wh_output_stock_loc_id wh_pack_stock_loc = warehouse.wh_pack_stock_loc_id #fetch customer and supplier locations, for references customer_loc, supplier_loc = self._get_partner_locations(cr, uid, warehouse.id, context=context) #create in, out, internal picking types for warehouse input_loc = wh_input_stock_loc if warehouse.reception_steps == 'one_step': input_loc = wh_stock_loc output_loc = wh_output_stock_loc if warehouse.delivery_steps == 'ship_only': output_loc = wh_stock_loc #choose the next available color for the picking types of this warehouse color = 0 available_colors = [c%9 for c in range(3, 12)] # put flashy colors first all_used_colors = self.pool.get('stock.picking.type').search_read(cr, uid, [('warehouse_id', '!=', False), ('color', '!=', False)], ['color'], order='color') #don't use sets to preserve the list order for x in all_used_colors: if x['color'] in available_colors: available_colors.remove(x['color']) if available_colors: color = available_colors[0] #order the picking types with a sequence allowing to have the following suit for each warehouse: reception, internal, pick, pack, ship. max_sequence = self.pool.get('stock.picking.type').search_read(cr, uid, [], ['sequence'], order='sequence desc') max_sequence = max_sequence and max_sequence[0]['sequence'] or 0 in_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Receipts'), 'warehouse_id': warehouse.id, 'code': 'incoming', 'sequence_id': in_seq_id, 'default_location_src_id': supplier_loc.id, 'default_location_dest_id': input_loc.id, 'sequence': max_sequence + 1, 'color': color}, context=context) out_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Delivery Orders'), 'warehouse_id': warehouse.id, 'code': 'outgoing', 'sequence_id': out_seq_id, 'return_picking_type_id': in_type_id, 'default_location_src_id': output_loc.id, 'default_location_dest_id': customer_loc.id, 'sequence': max_sequence + 4, 'color': color}, context=context) picking_type_obj.write(cr, uid, [in_type_id], {'return_picking_type_id': out_type_id}, context=context) int_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Internal Transfers'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': int_seq_id, 'default_location_src_id': wh_stock_loc.id, 'default_location_dest_id': wh_stock_loc.id, 'active': True, 'sequence': max_sequence + 2, 'color': color}, context=context) pack_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Pack'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': pack_seq_id, 'default_location_src_id': wh_pack_stock_loc.id, 'default_location_dest_id': output_loc.id, 'active': warehouse.delivery_steps == 'pick_pack_ship', 'sequence': max_sequence + 3, 'color': color}, context=context) pick_type_id = picking_type_obj.create(cr, uid, vals={ 'name': _('Pick'), 'warehouse_id': warehouse.id, 'code': 'internal', 'sequence_id': pick_seq_id, 'default_location_src_id': wh_stock_loc.id, 'default_location_dest_id': output_loc.id if warehouse.delivery_steps == 'pick_ship' else wh_pack_stock_loc.id, 'active': warehouse.delivery_steps != 'ship_only', 'sequence': max_sequence + 2, 'color': color}, context=context) #write picking types on WH vals = { 'in_type_id': in_type_id, 'out_type_id': out_type_id, 'pack_type_id': pack_type_id, 'pick_type_id': pick_type_id, 'int_type_id': int_type_id, } super(stock_warehouse, self).write(cr, uid, warehouse.id, vals=vals, context=context) def create(self, cr, uid, vals, context=None): if context is None: context = {} if vals is None: vals = {} data_obj = self.pool.get('ir.model.data') seq_obj = self.pool.get('ir.sequence') picking_type_obj = self.pool.get('stock.picking.type') location_obj = self.pool.get('stock.location') #create view location for warehouse loc_vals = { 'name': _(vals.get('code')), 'usage': 'view', 'location_id': data_obj.get_object_reference(cr, uid, 'stock', 'stock_location_locations')[1], } if vals.get('company_id'): loc_vals['company_id'] = vals.get('company_id') wh_loc_id = location_obj.create(cr, uid, loc_vals, context=context) vals['view_location_id'] = wh_loc_id #create all location def_values = self.default_get(cr, uid, {'reception_steps', 'delivery_steps'}) reception_steps = vals.get('reception_steps', def_values['reception_steps']) delivery_steps = vals.get('delivery_steps', def_values['delivery_steps']) context_with_inactive = context.copy() context_with_inactive['active_test'] = False sub_locations = [ {'name': _('Stock'), 'active': True, 'field': 'lot_stock_id'}, {'name': _('Input'), 'active': reception_steps != 'one_step', 'field': 'wh_input_stock_loc_id'}, {'name': _('Quality Control'), 'active': reception_steps == 'three_steps', 'field': 'wh_qc_stock_loc_id'}, {'name': _('Output'), 'active': delivery_steps != 'ship_only', 'field': 'wh_output_stock_loc_id'}, {'name': _('Packing Zone'), 'active': delivery_steps == 'pick_pack_ship', 'field': 'wh_pack_stock_loc_id'}, ] for values in sub_locations: loc_vals = { 'name': values['name'], 'usage': 'internal', 'location_id': wh_loc_id, 'active': values['active'], } if vals.get('company_id'): loc_vals['company_id'] = vals.get('company_id') location_id = location_obj.create(cr, uid, loc_vals, context=context_with_inactive) vals[values['field']] = location_id #create WH new_id = super(stock_warehouse, self).create(cr, uid, vals=vals, context=context) warehouse = self.browse(cr, uid, new_id, context=context) self.create_sequences_and_picking_types(cr, uid, warehouse, context=context) #create routes and push/pull rules new_objects_dict = self.create_routes(cr, uid, new_id, warehouse, context=context) self.write(cr, uid, warehouse.id, new_objects_dict, context=context) return new_id def _format_rulename(self, cr, uid, obj, from_loc, dest_loc, context=None): return obj.code + ': ' + from_loc.name + ' -> ' + dest_loc.name def _format_routename(self, cr, uid, obj, name, context=None): return obj.name + ': ' + name def get_routes_dict(self, cr, uid, ids, warehouse, context=None): #fetch customer and supplier locations, for references customer_loc, supplier_loc = self._get_partner_locations(cr, uid, ids, context=context) return { 'one_step': (_('Receipt in 1 step'), []), 'two_steps': (_('Receipt in 2 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]), 'three_steps': (_('Receipt in 3 steps'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_qc_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_qc_stock_loc_id, warehouse.lot_stock_id, warehouse.int_type_id.id)]), 'crossdock': (_('Cross-Dock'), [(warehouse.wh_input_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.int_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), 'ship_only': (_('Ship Only'), [(warehouse.lot_stock_id, customer_loc, warehouse.out_type_id.id)]), 'pick_ship': (_('Pick + Ship'), [(warehouse.lot_stock_id, warehouse.wh_output_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), 'pick_pack_ship': (_('Pick + Pack + Ship'), [(warehouse.lot_stock_id, warehouse.wh_pack_stock_loc_id, warehouse.pick_type_id.id), (warehouse.wh_pack_stock_loc_id, warehouse.wh_output_stock_loc_id, warehouse.pack_type_id.id), (warehouse.wh_output_stock_loc_id, customer_loc, warehouse.out_type_id.id)]), } def _handle_renaming(self, cr, uid, warehouse, name, code, context=None): location_obj = self.pool.get('stock.location') route_obj = self.pool.get('stock.location.route') pull_obj = self.pool.get('procurement.rule') push_obj = self.pool.get('stock.location.path') #rename location location_id = warehouse.lot_stock_id.location_id.id location_obj.write(cr, uid, location_id, {'name': code}, context=context) #rename route and push-pull rules for route in warehouse.route_ids: route_obj.write(cr, uid, route.id, {'name': route.name.replace(warehouse.name, name, 1)}, context=context) for pull in route.pull_ids: pull_obj.write(cr, uid, pull.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context) for push in route.push_ids: push_obj.write(cr, uid, push.id, {'name': pull.name.replace(warehouse.name, name, 1)}, context=context) #change the mto pull rule name if warehouse.mto_pull_id.id: pull_obj.write(cr, uid, warehouse.mto_pull_id.id, {'name': warehouse.mto_pull_id.name.replace(warehouse.name, name, 1)}, context=context) def _check_delivery_resupply(self, cr, uid, warehouse, new_location, change_to_multiple, context=None): """ Will check if the resupply routes from this warehouse follow the changes of number of delivery steps """ #Check routes that are being delivered by this warehouse and change the rule going to transit location route_obj = self.pool.get("stock.location.route") pull_obj = self.pool.get("procurement.rule") routes = route_obj.search(cr, uid, [('supplier_wh_id','=', warehouse.id)], context=context) pulls = pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_id.usage', '=', 'transit')], context=context) if pulls: pull_obj.write(cr, uid, pulls, {'location_src_id': new_location, 'procure_method': change_to_multiple and "make_to_order" or "make_to_stock"}, context=context) # Create or clean MTO rules mto_route_id = self._get_mto_route(cr, uid, context=context) if not change_to_multiple: # If single delivery we should create the necessary MTO rules for the resupply # pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context) pull_recs = pull_obj.browse(cr, uid, pulls, context=context) transfer_locs = list(set([x.location_id for x in pull_recs])) vals = [(warehouse.lot_stock_id , x, warehouse.out_type_id.id) for x in transfer_locs] mto_pull_vals = self._get_mto_pull_rule(cr, uid, warehouse, vals, context=context) for mto_pull_val in mto_pull_vals: pull_obj.create(cr, uid, mto_pull_val, context=context) else: # We need to delete all the MTO pull rules, otherwise they risk to be used in the system pulls = pull_obj.search(cr, uid, ['&', ('route_id', '=', mto_route_id), ('location_id.usage', '=', 'transit'), ('location_src_id', '=', warehouse.lot_stock_id.id)], context=context) if pulls: pull_obj.unlink(cr, uid, pulls, context=context) def _check_reception_resupply(self, cr, uid, warehouse, new_location, context=None): """ Will check if the resupply routes to this warehouse follow the changes of number of receipt steps """ #Check routes that are being delivered by this warehouse and change the rule coming from transit location route_obj = self.pool.get("stock.location.route") pull_obj = self.pool.get("procurement.rule") routes = route_obj.search(cr, uid, [('supplied_wh_id','=', warehouse.id)], context=context) pulls= pull_obj.search(cr, uid, ['&', ('route_id', 'in', routes), ('location_src_id.usage', '=', 'transit')]) if pulls: pull_obj.write(cr, uid, pulls, {'location_id': new_location}, context=context) def _check_resupply(self, cr, uid, warehouse, reception_new, delivery_new, context=None): if reception_new: old_val = warehouse.reception_steps new_val = reception_new change_to_one = (old_val != 'one_step' and new_val == 'one_step') change_to_multiple = (old_val == 'one_step' and new_val != 'one_step') if change_to_one or change_to_multiple: new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_input_stock_loc_id.id self._check_reception_resupply(cr, uid, warehouse, new_location, context=context) if delivery_new: old_val = warehouse.delivery_steps new_val = delivery_new change_to_one = (old_val != 'ship_only' and new_val == 'ship_only') change_to_multiple = (old_val == 'ship_only' and new_val != 'ship_only') if change_to_one or change_to_multiple: new_location = change_to_one and warehouse.lot_stock_id.id or warehouse.wh_output_stock_loc_id.id self._check_delivery_resupply(cr, uid, warehouse, new_location, change_to_multiple, context=context) def write(self, cr, uid, ids, vals, context=None): if context is None: context = {} if isinstance(ids, (int, long)): ids = [ids] seq_obj = self.pool.get('ir.sequence') route_obj = self.pool.get('stock.location.route') context_with_inactive = context.copy() context_with_inactive['active_test'] = False for warehouse in self.browse(cr, uid, ids, context=context_with_inactive): #first of all, check if we need to delete and recreate route if vals.get('reception_steps') or vals.get('delivery_steps'): #activate and deactivate location according to reception and delivery option self.switch_location(cr, uid, warehouse.id, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context) # switch between route self.change_route(cr, uid, ids, warehouse, vals.get('reception_steps', False), vals.get('delivery_steps', False), context=context_with_inactive) # Check if we need to change something to resupply warehouses and associated MTO rules self._check_resupply(cr, uid, warehouse, vals.get('reception_steps'), vals.get('delivery_steps'), context=context) if vals.get('code') or vals.get('name'): name = warehouse.name #rename sequence if vals.get('name'): name = vals.get('name', warehouse.name) self._handle_renaming(cr, uid, warehouse, name, vals.get('code', warehouse.code), context=context_with_inactive) if warehouse.in_type_id: seq_obj.write(cr, uid, warehouse.in_type_id.sequence_id.id, {'name': name + _(' Sequence in'), 'prefix': vals.get('code', warehouse.code) + '\IN\\'}, context=context) seq_obj.write(cr, uid, warehouse.out_type_id.sequence_id.id, {'name': name + _(' Sequence out'), 'prefix': vals.get('code', warehouse.code) + '\OUT\\'}, context=context) seq_obj.write(cr, uid, warehouse.pack_type_id.sequence_id.id, {'name': name + _(' Sequence packing'), 'prefix': vals.get('code', warehouse.code) + '\PACK\\'}, context=context) seq_obj.write(cr, uid, warehouse.pick_type_id.sequence_id.id, {'name': name + _(' Sequence picking'), 'prefix': vals.get('code', warehouse.code) + '\PICK\\'}, context=context) seq_obj.write(cr, uid, warehouse.int_type_id.sequence_id.id, {'name': name + _(' Sequence internal'), 'prefix': vals.get('code', warehouse.code) + '\INT\\'}, context=context) if vals.get('resupply_wh_ids') and not vals.get('resupply_route_ids'): for cmd in vals.get('resupply_wh_ids'): if cmd[0] == 6: new_ids = set(cmd[2]) old_ids = set([wh.id for wh in warehouse.resupply_wh_ids]) to_add_wh_ids = new_ids - old_ids if to_add_wh_ids: supplier_warehouses = self.browse(cr, uid, list(to_add_wh_ids), context=context) self._create_resupply_routes(cr, uid, warehouse, supplier_warehouses, warehouse.default_resupply_wh_id, context=context) to_remove_wh_ids = old_ids - new_ids if to_remove_wh_ids: to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', 'in', list(to_remove_wh_ids))], context=context) if to_remove_route_ids: route_obj.unlink(cr, uid, to_remove_route_ids, context=context) else: #not implemented pass if 'default_resupply_wh_id' in vals: if vals.get('default_resupply_wh_id') == warehouse.id: raise osv.except_osv(_('Warning'),_('The default resupply warehouse should be different than the warehouse itself!')) if warehouse.default_resupply_wh_id: #remove the existing resupplying route on the warehouse to_remove_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', warehouse.default_resupply_wh_id.id)], context=context) for inter_wh_route_id in to_remove_route_ids: self.write(cr, uid, [warehouse.id], {'route_ids': [(3, inter_wh_route_id)]}) if vals.get('default_resupply_wh_id'): #assign the new resupplying route on all products to_assign_route_ids = route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id), ('supplier_wh_id', '=', vals.get('default_resupply_wh_id'))], context=context) for inter_wh_route_id in to_assign_route_ids: self.write(cr, uid, [warehouse.id], {'route_ids': [(4, inter_wh_route_id)]}) return super(stock_warehouse, self).write(cr, uid, ids, vals=vals, context=context) def get_all_routes_for_wh(self, cr, uid, warehouse, context=None): route_obj = self.pool.get("stock.location.route") all_routes = [route.id for route in warehouse.route_ids] all_routes += route_obj.search(cr, uid, [('supplied_wh_id', '=', warehouse.id)], context=context) all_routes += [warehouse.mto_pull_id.route_id.id] return all_routes def view_all_routes_for_wh(self, cr, uid, ids, context=None): all_routes = [] for wh in self.browse(cr, uid, ids, context=context): all_routes += self.get_all_routes_for_wh(cr, uid, wh, context=context) domain = [('id', 'in', all_routes)] return { 'name': _('Warehouse\'s Routes'), 'domain': domain, 'res_model': 'stock.location.route', 'type': 'ir.actions.act_window', 'view_id': False, 'view_mode': 'tree,form', 'view_type': 'form', 'limit': 20 } class stock_location_path(osv.osv): _name = "stock.location.path" _description = "Pushed Flows" _order = "name" def _get_rules(self, cr, uid, ids, context=None): res = [] for route in self.browse(cr, uid, ids, context=context): res += [x.id for x in route.push_ids] return res _columns = { 'name': fields.char('Operation Name', required=True), 'company_id': fields.many2one('res.company', 'Company'), 'route_id': fields.many2one('stock.location.route', 'Route'), 'location_from_id': fields.many2one('stock.location', 'Source Location', ondelete='cascade', select=1, required=True), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', ondelete='cascade', select=1, required=True), 'delay': fields.integer('Delay (days)', help="Number of days to do this transition"), 'picking_type_id': fields.many2one('stock.picking.type', 'Type of the new Operation', required=True, help="This is the picking type associated with the different pickings"), 'auto': fields.selection( [('auto','Automatic Move'), ('manual','Manual Operation'),('transparent','Automatic No Step Added')], 'Automatic Move', required=True, select=1, help="This is used to define paths the product has to follow within the location tree.\n" \ "The 'Automatic Move' value will create a stock move after the current one that will be "\ "validated automatically. With 'Manual Operation', the stock move has to be validated "\ "by a worker. With 'Automatic No Step Added', the location is replaced in the original move." ), 'propagate': fields.boolean('Propagate cancel and split', help='If checked, when the previous move is cancelled or split, the move generated by this move will too'), 'active': fields.boolean('Active', help="If unchecked, it will allow you to hide the rule without removing it."), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse'), 'route_sequence': fields.related('route_id', 'sequence', string='Route Sequence', store={ 'stock.location.route': (_get_rules, ['sequence'], 10), 'stock.location.path': (lambda self, cr, uid, ids, c={}: ids, ['route_id'], 10), }), 'sequence': fields.integer('Sequence'), } _defaults = { 'auto': 'auto', 'delay': 0, 'company_id': lambda self, cr, uid, c: self.pool.get('res.company')._company_default_get(cr, uid, 'procurement.order', context=c), 'propagate': True, 'active': True, } def _prepare_push_apply(self, cr, uid, rule, move, context=None): newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) return { 'origin': move.origin or move.picking_id.name or "/", 'location_id': move.location_dest_id.id, 'location_dest_id': rule.location_dest_id.id, 'date': newdate, 'company_id': rule.company_id and rule.company_id.id or False, 'date_expected': newdate, 'picking_id': False, 'picking_type_id': rule.picking_type_id and rule.picking_type_id.id or False, 'propagate': rule.propagate, 'push_rule_id': rule.id, 'warehouse_id': rule.warehouse_id and rule.warehouse_id.id or False, } def _apply(self, cr, uid, rule, move, context=None): move_obj = self.pool.get('stock.move') newdate = (datetime.strptime(move.date_expected, DEFAULT_SERVER_DATETIME_FORMAT) + relativedelta.relativedelta(days=rule.delay or 0)).strftime(DEFAULT_SERVER_DATETIME_FORMAT) if rule.auto == 'transparent': old_dest_location = move.location_dest_id.id move_obj.write(cr, uid, [move.id], { 'date': newdate, 'date_expected': newdate, 'location_dest_id': rule.location_dest_id.id }) #avoid looping if a push rule is not well configured if rule.location_dest_id.id != old_dest_location: #call again push_apply to see if a next step is defined move_obj._push_apply(cr, uid, [move], context=context) else: vals = self._prepare_push_apply(cr, uid, rule, move, context=context) move_id = move_obj.copy(cr, uid, move.id, vals, context=context) move_obj.write(cr, uid, [move.id], { 'move_dest_id': move_id, }) move_obj.action_confirm(cr, uid, [move_id], context=None) # ------------------------- # Packaging related stuff # ------------------------- from openerp.report import report_sxw class stock_package(osv.osv): """ These are the packages, containing quants and/or other packages """ _name = "stock.quant.package" _description = "Physical Packages" _parent_name = "parent_id" _parent_store = True _parent_order = 'name' _order = 'parent_left' def name_get(self, cr, uid, ids, context=None): res = self._complete_name(cr, uid, ids, 'complete_name', None, context=context) return res.items() def _complete_name(self, cr, uid, ids, name, args, context=None): """ Forms complete name of location from parent location to child location. @return: Dictionary of values """ res = {} for m in self.browse(cr, uid, ids, context=context): res[m.id] = m.name parent = m.parent_id while parent: res[m.id] = parent.name + ' / ' + res[m.id] parent = parent.parent_id return res def _get_packages(self, cr, uid, ids, context=None): """Returns packages from quants for store""" res = set() for quant in self.browse(cr, uid, ids, context=context): pack = quant.package_id while pack: res.add(pack.id) pack = pack.parent_id return list(res) def _get_package_info(self, cr, uid, ids, name, args, context=None): quant_obj = self.pool.get("stock.quant") default_company_id = self.pool.get('res.users').browse(cr, uid, uid, context=context).company_id.id res = dict((res_id, {'location_id': False, 'company_id': default_company_id, 'owner_id': False}) for res_id in ids) for pack in self.browse(cr, uid, ids, context=context): quants = quant_obj.search(cr, uid, [('package_id', 'child_of', pack.id)], context=context) if quants: quant = quant_obj.browse(cr, uid, quants[0], context=context) res[pack.id]['location_id'] = quant.location_id.id res[pack.id]['owner_id'] = quant.owner_id.id res[pack.id]['company_id'] = quant.company_id.id else: res[pack.id]['location_id'] = False res[pack.id]['owner_id'] = False res[pack.id]['company_id'] = False return res def _get_packages_to_relocate(self, cr, uid, ids, context=None): res = set() for pack in self.browse(cr, uid, ids, context=context): res.add(pack.id) if pack.parent_id: res.add(pack.parent_id.id) return list(res) _columns = { 'name': fields.char('Package Reference', select=True, copy=False), 'complete_name': fields.function(_complete_name, type='char', string="Package Name",), 'parent_left': fields.integer('Left Parent', select=1), 'parent_right': fields.integer('Right Parent', select=1), 'packaging_id': fields.many2one('product.packaging', 'Packaging', help="This field should be completed only if everything inside the package share the same product, otherwise it doesn't really makes sense.", select=True), 'ul_id': fields.many2one('product.ul', 'Logistic Unit'), 'location_id': fields.function(_get_package_info, type='many2one', relation='stock.location', string='Location', multi="package", store={ 'stock.quant': (_get_packages, ['location_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), 'quant_ids': fields.one2many('stock.quant', 'package_id', 'Bulk Content', readonly=True), 'parent_id': fields.many2one('stock.quant.package', 'Parent Package', help="The package containing this item", ondelete='restrict', readonly=True), 'children_ids': fields.one2many('stock.quant.package', 'parent_id', 'Contained Packages', readonly=True), 'company_id': fields.function(_get_package_info, type="many2one", relation='res.company', string='Company', multi="package", store={ 'stock.quant': (_get_packages, ['company_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), 'owner_id': fields.function(_get_package_info, type='many2one', relation='res.partner', string='Owner', multi="package", store={ 'stock.quant': (_get_packages, ['owner_id'], 10), 'stock.quant.package': (_get_packages_to_relocate, ['quant_ids', 'children_ids', 'parent_id'], 10), }, readonly=True, select=True), } _defaults = { 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.quant.package') or _('Unknown Pack') } def _check_location_constraint(self, cr, uid, packs, context=None): '''checks that all quants in a package are stored in the same location. This function cannot be used as a constraint because it needs to be checked on pack operations (they may not call write on the package) ''' quant_obj = self.pool.get('stock.quant') for pack in packs: parent = pack while parent.parent_id: parent = parent.parent_id quant_ids = self.get_content(cr, uid, [parent.id], context=context) quants = [x for x in quant_obj.browse(cr, uid, quant_ids, context=context) if x.qty > 0] location_id = quants and quants[0].location_id.id or False if not [quant.location_id.id == location_id for quant in quants]: raise osv.except_osv(_('Error'), _('Everything inside a package should be in the same location')) return True def action_print(self, cr, uid, ids, context=None): context = dict(context or {}, active_ids=ids) return self.pool.get("report").get_action(cr, uid, ids, 'stock.report_package_barcode_small', context=context) def unpack(self, cr, uid, ids, context=None): quant_obj = self.pool.get('stock.quant') for package in self.browse(cr, uid, ids, context=context): quant_ids = [quant.id for quant in package.quant_ids] quant_obj.write(cr, uid, quant_ids, {'package_id': package.parent_id.id or False}, context=context) children_package_ids = [child_package.id for child_package in package.children_ids] self.write(cr, uid, children_package_ids, {'parent_id': package.parent_id.id or False}, context=context) #delete current package since it contains nothing anymore self.unlink(cr, uid, ids, context=context) return self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'action_package_view', context=context) def get_content(self, cr, uid, ids, context=None): child_package_ids = self.search(cr, uid, [('id', 'child_of', ids)], context=context) return self.pool.get('stock.quant').search(cr, uid, [('package_id', 'in', child_package_ids)], context=context) def get_content_package(self, cr, uid, ids, context=None): quants_ids = self.get_content(cr, uid, ids, context=context) res = self.pool.get('ir.actions.act_window').for_xml_id(cr, uid, 'stock', 'quantsact', context=context) res['domain'] = [('id', 'in', quants_ids)] return res def _get_product_total_qty(self, cr, uid, package_record, product_id, context=None): ''' find the total of given product 'product_id' inside the given package 'package_id''' quant_obj = self.pool.get('stock.quant') all_quant_ids = self.get_content(cr, uid, [package_record.id], context=context) total = 0 for quant in quant_obj.browse(cr, uid, all_quant_ids, context=context): if quant.product_id.id == product_id: total += quant.qty return total def _get_all_products_quantities(self, cr, uid, package_id, context=None): '''This function computes the different product quantities for the given package ''' quant_obj = self.pool.get('stock.quant') res = {} for quant in quant_obj.browse(cr, uid, self.get_content(cr, uid, package_id, context=context)): if quant.product_id.id not in res: res[quant.product_id.id] = 0 res[quant.product_id.id] += quant.qty return res def copy_pack(self, cr, uid, id, default_pack_values=None, default=None, context=None): stock_pack_operation_obj = self.pool.get('stock.pack.operation') if default is None: default = {} new_package_id = self.copy(cr, uid, id, default_pack_values, context=context) default['result_package_id'] = new_package_id op_ids = stock_pack_operation_obj.search(cr, uid, [('result_package_id', '=', id)], context=context) for op_id in op_ids: stock_pack_operation_obj.copy(cr, uid, op_id, default, context=context) class stock_pack_operation(osv.osv): _name = "stock.pack.operation" _description = "Packing Operation" def _get_remaining_prod_quantities(self, cr, uid, operation, context=None): '''Get the remaining quantities per product on an operation with a package. This function returns a dictionary''' #if the operation doesn't concern a package, it's not relevant to call this function if not operation.package_id or operation.product_id: return {operation.product_id.id: operation.remaining_qty} #get the total of products the package contains res = self.pool.get('stock.quant.package')._get_all_products_quantities(cr, uid, operation.package_id.id, context=context) #reduce by the quantities linked to a move for record in operation.linked_move_operation_ids: if record.move_id.product_id.id not in res: res[record.move_id.product_id.id] = 0 res[record.move_id.product_id.id] -= record.qty return res def _get_remaining_qty(self, cr, uid, ids, name, args, context=None): uom_obj = self.pool.get('product.uom') res = {} for ops in self.browse(cr, uid, ids, context=context): res[ops.id] = 0 if ops.package_id and not ops.product_id: #dont try to compute the remaining quantity for packages because it's not relevant (a package could include different products). #should use _get_remaining_prod_quantities instead continue else: qty = ops.product_qty if ops.product_uom_id: qty = uom_obj._compute_qty_obj(cr, uid, ops.product_uom_id, ops.product_qty, ops.product_id.uom_id, context=context) for record in ops.linked_move_operation_ids: qty -= record.qty res[ops.id] = float_round(qty, precision_rounding=ops.product_id.uom_id.rounding) return res def product_id_change(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None): res = self.on_change_tests(cr, uid, ids, product_id, product_uom_id, product_qty, context=context) if product_id and not product_uom_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) res['value']['product_uom_id'] = product.uom_id.id return res def on_change_tests(self, cr, uid, ids, product_id, product_uom_id, product_qty, context=None): res = {'value': {}} uom_obj = self.pool.get('product.uom') if product_id: product = self.pool.get('product.product').browse(cr, uid, product_id, context=context) product_uom_id = product_uom_id or product.uom_id.id selected_uom = uom_obj.browse(cr, uid, product_uom_id, context=context) if selected_uom.category_id.id != product.uom_id.category_id.id: res['warning'] = { 'title': _('Warning: wrong UoM!'), 'message': _('The selected UoM for product %s is not compatible with the UoM set on the product form. \nPlease choose an UoM within the same UoM category.') % (product.name) } if product_qty and 'warning' not in res: rounded_qty = uom_obj._compute_qty(cr, uid, product_uom_id, product_qty, product_uom_id, round=True) if rounded_qty != product_qty: res['warning'] = { 'title': _('Warning: wrong quantity!'), 'message': _('The chosen quantity for product %s is not compatible with the UoM rounding. It will be automatically converted at confirmation') % (product.name) } return res _columns = { 'picking_id': fields.many2one('stock.picking', 'Stock Picking', help='The stock operation where the packing has been made', required=True), 'product_id': fields.many2one('product.product', 'Product', ondelete="CASCADE"), # 1 'product_uom_id': fields.many2one('product.uom', 'Product Unit of Measure'), 'product_qty': fields.float('Quantity', digits_compute=dp.get_precision('Product Unit of Measure'), required=True), 'qty_done': fields.float('Quantity Processed', digits_compute=dp.get_precision('Product Unit of Measure')), 'package_id': fields.many2one('stock.quant.package', 'Source Package'), # 2 'lot_id': fields.many2one('stock.production.lot', 'Lot/Serial Number'), 'result_package_id': fields.many2one('stock.quant.package', 'Destination Package', help="If set, the operations are packed into this package", required=False, ondelete='cascade'), 'date': fields.datetime('Date', required=True), 'owner_id': fields.many2one('res.partner', 'Owner', help="Owner of the quants"), #'update_cost': fields.boolean('Need cost update'), 'cost': fields.float("Cost", help="Unit Cost for this product line"), 'currency': fields.many2one('res.currency', string="Currency", help="Currency in which Unit cost is expressed", ondelete='CASCADE'), 'linked_move_operation_ids': fields.one2many('stock.move.operation.link', 'operation_id', string='Linked Moves', readonly=True, help='Moves impacted by this operation for the computation of the remaining quantities'), 'remaining_qty': fields.function(_get_remaining_qty, type='float', digits = 0, string="Remaining Qty", help="Remaining quantity in default UoM according to moves matched with this operation. "), 'location_id': fields.many2one('stock.location', 'Source Location', required=True), 'location_dest_id': fields.many2one('stock.location', 'Destination Location', required=True), 'processed': fields.selection([('true','Yes'), ('false','No')],'Has been processed?', required=True), } _defaults = { 'date': fields.date.context_today, 'qty_done': 0, 'processed': lambda *a: 'false', } def write(self, cr, uid, ids, vals, context=None): context = context or {} res = super(stock_pack_operation, self).write(cr, uid, ids, vals, context=context) if isinstance(ids, (int, long)): ids = [ids] if not context.get("no_recompute"): pickings = vals.get('picking_id') and [vals['picking_id']] or list(set([x.picking_id.id for x in self.browse(cr, uid, ids, context=context)])) self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, pickings, context=context) return res def create(self, cr, uid, vals, context=None): context = context or {} res_id = super(stock_pack_operation, self).create(cr, uid, vals, context=context) if vals.get("picking_id") and not context.get("no_recompute"): self.pool.get("stock.picking").do_recompute_remaining_quantities(cr, uid, [vals['picking_id']], context=context) return res_id def action_drop_down(self, cr, uid, ids, context=None): ''' Used by barcode interface to say that pack_operation has been moved from src location to destination location, if qty_done is less than product_qty than we have to split the operation in two to process the one with the qty moved ''' processed_ids = [] move_obj = self.pool.get("stock.move") for pack_op in self.browse(cr, uid, ids, context=None): if pack_op.product_id and pack_op.location_id and pack_op.location_dest_id: move_obj.check_tracking_product(cr, uid, pack_op.product_id, pack_op.lot_id.id, pack_op.location_id, pack_op.location_dest_id, context=context) op = pack_op.id if pack_op.qty_done < pack_op.product_qty: # we split the operation in two op = self.copy(cr, uid, pack_op.id, {'product_qty': pack_op.qty_done, 'qty_done': pack_op.qty_done}, context=context) self.write(cr, uid, [pack_op.id], {'product_qty': pack_op.product_qty - pack_op.qty_done, 'qty_done': 0, 'lot_id': False}, context=context) processed_ids.append(op) self.write(cr, uid, processed_ids, {'processed': 'true'}, context=context) def create_and_assign_lot(self, cr, uid, id, name, context=None): ''' Used by barcode interface to create a new lot and assign it to the operation ''' obj = self.browse(cr,uid,id,context) product_id = obj.product_id.id val = {'product_id': product_id} new_lot_id = False if name: lots = self.pool.get('stock.production.lot').search(cr, uid, ['&', ('name', '=', name), ('product_id', '=', product_id)], context=context) if lots: new_lot_id = lots[0] val.update({'name': name}) if not new_lot_id: new_lot_id = self.pool.get('stock.production.lot').create(cr, uid, val, context=context) self.write(cr, uid, id, {'lot_id': new_lot_id}, context=context) def _search_and_increment(self, cr, uid, picking_id, domain, filter_visible=False, visible_op_ids=False, increment=True, context=None): '''Search for an operation with given 'domain' in a picking, if it exists increment the qty (+1) otherwise create it :param domain: list of tuple directly reusable as a domain context can receive a key 'current_package_id' with the package to consider for this operation returns True ''' if context is None: context = {} #if current_package_id is given in the context, we increase the number of items in this package package_clause = [('result_package_id', '=', context.get('current_package_id', False))] existing_operation_ids = self.search(cr, uid, [('picking_id', '=', picking_id)] + domain + package_clause, context=context) todo_operation_ids = [] if existing_operation_ids: if filter_visible: todo_operation_ids = [val for val in existing_operation_ids if val in visible_op_ids] else: todo_operation_ids = existing_operation_ids if todo_operation_ids: #existing operation found for the given domain and picking => increment its quantity operation_id = todo_operation_ids[0] op_obj = self.browse(cr, uid, operation_id, context=context) qty = op_obj.qty_done if increment: qty += 1 else: qty -= 1 if qty >= 1 else 0 if qty == 0 and op_obj.product_qty == 0: #we have a line with 0 qty set, so delete it self.unlink(cr, uid, [operation_id], context=context) return False self.write(cr, uid, [operation_id], {'qty_done': qty}, context=context) else: #no existing operation found for the given domain and picking => create a new one picking_obj = self.pool.get("stock.picking") picking = picking_obj.browse(cr, uid, picking_id, context=context) values = { 'picking_id': picking_id, 'product_qty': 0, 'location_id': picking.location_id.id, 'location_dest_id': picking.location_dest_id.id, 'qty_done': 1, } for key in domain: var_name, dummy, value = key uom_id = False if var_name == 'product_id': uom_id = self.pool.get('product.product').browse(cr, uid, value, context=context).uom_id.id update_dict = {var_name: value} if uom_id: update_dict['product_uom_id'] = uom_id values.update(update_dict) operation_id = self.create(cr, uid, values, context=context) return operation_id class stock_move_operation_link(osv.osv): """ Table making the link between stock.moves and stock.pack.operations to compute the remaining quantities on each of these objects """ _name = "stock.move.operation.link" _description = "Link between stock moves and pack operations" _columns = { 'qty': fields.float('Quantity', help="Quantity of products to consider when talking about the contribution of this pack operation towards the remaining quantity of the move (and inverse). Given in the product main uom."), 'operation_id': fields.many2one('stock.pack.operation', 'Operation', required=True, ondelete="cascade"), 'move_id': fields.many2one('stock.move', 'Move', required=True, ondelete="cascade"), 'reserved_quant_id': fields.many2one('stock.quant', 'Reserved Quant', help="Technical field containing the quant that created this link between an operation and a stock move. Used at the stock_move_obj.action_done() time to avoid seeking a matching quant again"), } def get_specific_domain(self, cr, uid, record, context=None): '''Returns the specific domain to consider for quant selection in action_assign() or action_done() of stock.move, having the record given as parameter making the link between the stock move and a pack operation''' op = record.operation_id domain = [] if op.package_id and op.product_id: #if removing a product from a box, we restrict the choice of quants to this box domain.append(('package_id', '=', op.package_id.id)) elif op.package_id: #if moving a box, we allow to take everything from inside boxes as well domain.append(('package_id', 'child_of', [op.package_id.id])) else: #if not given any information about package, we don't open boxes domain.append(('package_id', '=', False)) #if lot info is given, we restrict choice to this lot otherwise we can take any if op.lot_id: domain.append(('lot_id', '=', op.lot_id.id)) #if owner info is given, we restrict to this owner otherwise we restrict to no owner if op.owner_id: domain.append(('owner_id', '=', op.owner_id.id)) else: domain.append(('owner_id', '=', False)) return domain class stock_warehouse_orderpoint(osv.osv): """ Defines Minimum stock rules. """ _name = "stock.warehouse.orderpoint" _description = "Minimum Inventory Rule" def subtract_procurements(self, cr, uid, orderpoint, context=None): '''This function returns quantity of product that needs to be deducted from the orderpoint computed quantity because there's already a procurement created with aim to fulfill it. ''' qty = 0 uom_obj = self.pool.get("product.uom") for procurement in orderpoint.procurement_ids: if procurement.state in ('cancel', 'done'): continue procurement_qty = uom_obj._compute_qty_obj(cr, uid, procurement.product_uom, procurement.product_qty, procurement.product_id.uom_id, context=context) for move in procurement.move_ids: #need to add the moves in draft as they aren't in the virtual quantity + moves that have not been created yet if move.state not in ('draft'): #if move is already confirmed, assigned or done, the virtual stock is already taking this into account so it shouldn't be deducted procurement_qty -= move.product_qty qty += procurement_qty return qty def _check_product_uom(self, cr, uid, ids, context=None): ''' Check if the UoM has the same category as the product standard UoM ''' if not context: context = {} for rule in self.browse(cr, uid, ids, context=context): if rule.product_id.uom_id.category_id.id != rule.product_uom.category_id.id: return False return True def action_view_proc_to_process(self, cr, uid, ids, context=None): act_obj = self.pool.get('ir.actions.act_window') mod_obj = self.pool.get('ir.model.data') proc_ids = self.pool.get('procurement.order').search(cr, uid, [('orderpoint_id', 'in', ids), ('state', 'not in', ('done', 'cancel'))], context=context) result = mod_obj.get_object_reference(cr, uid, 'procurement', 'do_view_procurements') if not result: return False result = act_obj.read(cr, uid, [result[1]], context=context)[0] result['domain'] = "[('id', 'in', [" + ','.join(map(str, proc_ids)) + "])]" return result _columns = { 'name': fields.char('Name', required=True, copy=False), 'active': fields.boolean('Active', help="If the active field is set to False, it will allow you to hide the orderpoint without removing it."), 'logic': fields.selection([('max', 'Order to Max'), ('price', 'Best price (not yet active!)')], 'Reordering Mode', required=True), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', required=True, ondelete="cascade"), 'location_id': fields.many2one('stock.location', 'Location', required=True, ondelete="cascade"), 'product_id': fields.many2one('product.product', 'Product', required=True, ondelete='cascade', domain=[('type', '=', 'product')]), 'product_uom': fields.related('product_id', 'uom_id', type='many2one', relation='product.uom', string='Product Unit of Measure', readonly=True, required=True), 'product_min_qty': fields.float('Minimum Quantity', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="When the virtual stock goes below the Min Quantity specified for this field, Odoo generates "\ "a procurement to bring the forecasted quantity to the Max Quantity."), 'product_max_qty': fields.float('Maximum Quantity', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="When the virtual stock goes below the Min Quantity, Odoo generates "\ "a procurement to bring the forecasted quantity to the Quantity specified as Max Quantity."), 'qty_multiple': fields.float('Qty Multiple', required=True, digits_compute=dp.get_precision('Product Unit of Measure'), help="The procurement quantity will be rounded up to this multiple. If it is 0, the exact quantity will be used. "), 'procurement_ids': fields.one2many('procurement.order', 'orderpoint_id', 'Created Procurements'), 'group_id': fields.many2one('procurement.group', 'Procurement Group', help="Moves created through this orderpoint will be put in this procurement group. If none is given, the moves generated by procurement rules will be grouped into one big picking.", copy=False), 'company_id': fields.many2one('res.company', 'Company', required=True), } _defaults = { 'active': lambda *a: 1, 'logic': lambda *a: 'max', 'qty_multiple': lambda *a: 1, 'name': lambda self, cr, uid, context: self.pool.get('ir.sequence').get(cr, uid, 'stock.orderpoint') or '', 'product_uom': lambda self, cr, uid, context: context.get('product_uom', False), 'company_id': lambda self, cr, uid, context: self.pool.get('res.company')._company_default_get(cr, uid, 'stock.warehouse.orderpoint', context=context) } _sql_constraints = [ ('qty_multiple_check', 'CHECK( qty_multiple >= 0 )', 'Qty Multiple must be greater than or equal to zero.'), ] _constraints = [ (_check_product_uom, 'You have to select a product unit of measure in the same category than the default unit of measure of the product', ['product_id', 'product_uom']), ] def default_get(self, cr, uid, fields, context=None): warehouse_obj = self.pool.get('stock.warehouse') res = super(stock_warehouse_orderpoint, self).default_get(cr, uid, fields, context) # default 'warehouse_id' and 'location_id' if 'warehouse_id' not in res: warehouse_ids = res.get('company_id') and warehouse_obj.search(cr, uid, [('company_id', '=', res['company_id'])], limit=1, context=context) or [] res['warehouse_id'] = warehouse_ids and warehouse_ids[0] or False if 'location_id' not in res: res['location_id'] = res.get('warehouse_id') and warehouse_obj.browse(cr, uid, res['warehouse_id'], context).lot_stock_id.id or False return res def onchange_warehouse_id(self, cr, uid, ids, warehouse_id, context=None): """ Finds location id for changed warehouse. @param warehouse_id: Changed id of warehouse. @return: Dictionary of values. """ if warehouse_id: w = self.pool.get('stock.warehouse').browse(cr, uid, warehouse_id, context=context) v = {'location_id': w.lot_stock_id.id} return {'value': v} return {} def onchange_product_id(self, cr, uid, ids, product_id, context=None): """ Finds UoM for changed product. @param product_id: Changed id of product. @return: Dictionary of values. """ if product_id: prod = self.pool.get('product.product').browse(cr, uid, product_id, context=context) d = {'product_uom': [('category_id', '=', prod.uom_id.category_id.id)]} v = {'product_uom': prod.uom_id.id} return {'value': v, 'domain': d} return {'domain': {'product_uom': []}} class stock_picking_type(osv.osv): _name = "stock.picking.type" _description = "The picking type determines the picking view" _order = 'sequence' def open_barcode_interface(self, cr, uid, ids, context=None): final_url = "/barcode/web/#action=stock.ui&picking_type_id=" + str(ids[0]) if len(ids) else '0' return {'type': 'ir.actions.act_url', 'url': final_url, 'target': 'self'} def _get_tristate_values(self, cr, uid, ids, field_name, arg, context=None): picking_obj = self.pool.get('stock.picking') res = {} for picking_type_id in ids: #get last 10 pickings of this type picking_ids = picking_obj.search(cr, uid, [('picking_type_id', '=', picking_type_id), ('state', '=', 'done')], order='date_done desc', limit=10, context=context) tristates = [] for picking in picking_obj.browse(cr, uid, picking_ids, context=context): if picking.date_done > picking.date: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Late'), 'value': -1}) elif picking.backorder_id: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('Backorder exists'), 'value': 0}) else: tristates.insert(0, {'tooltip': picking.name or '' + ": " + _('OK'), 'value': 1}) res[picking_type_id] = json.dumps(tristates) return res def _get_picking_count(self, cr, uid, ids, field_names, arg, context=None): obj = self.pool.get('stock.picking') domains = { 'count_picking_draft': [('state', '=', 'draft')], 'count_picking_waiting': [('state', '=', 'confirmed')], 'count_picking_ready': [('state', 'in', ('assigned', 'partially_available'))], 'count_picking': [('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))], 'count_picking_late': [('min_date', '<', time.strftime(DEFAULT_SERVER_DATETIME_FORMAT)), ('state', 'in', ('assigned', 'waiting', 'confirmed', 'partially_available'))], 'count_picking_backorders': [('backorder_id', '!=', False), ('state', 'in', ('confirmed', 'assigned', 'waiting', 'partially_available'))], } result = {} for field in domains: data = obj.read_group(cr, uid, domains[field] + [('state', 'not in', ('done', 'cancel')), ('picking_type_id', 'in', ids)], ['picking_type_id'], ['picking_type_id'], context=context) count = dict(map(lambda x: (x['picking_type_id'] and x['picking_type_id'][0], x['picking_type_id_count']), data)) for tid in ids: result.setdefault(tid, {})[field] = count.get(tid, 0) for tid in ids: if result[tid]['count_picking']: result[tid]['rate_picking_late'] = result[tid]['count_picking_late'] * 100 / result[tid]['count_picking'] result[tid]['rate_picking_backorders'] = result[tid]['count_picking_backorders'] * 100 / result[tid]['count_picking'] else: result[tid]['rate_picking_late'] = 0 result[tid]['rate_picking_backorders'] = 0 return result def onchange_picking_code(self, cr, uid, ids, picking_code=False): if not picking_code: return False obj_data = self.pool.get('ir.model.data') stock_loc = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_stock') result = { 'default_location_src_id': stock_loc, 'default_location_dest_id': stock_loc, } if picking_code == 'incoming': result['default_location_src_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_suppliers') elif picking_code == 'outgoing': result['default_location_dest_id'] = obj_data.xmlid_to_res_id(cr, uid, 'stock.stock_location_customers') return {'value': result} def _get_name(self, cr, uid, ids, field_names, arg, context=None): return dict(self.name_get(cr, uid, ids, context=context)) def name_get(self, cr, uid, ids, context=None): """Overides orm name_get method to display 'Warehouse_name: PickingType_name' """ if context is None: context = {} if not isinstance(ids, list): ids = [ids] res = [] if not ids: return res for record in self.browse(cr, uid, ids, context=context): name = record.name if record.warehouse_id: name = record.warehouse_id.name + ': ' +name if context.get('special_shortened_wh_name'): if record.warehouse_id: name = record.warehouse_id.name else: name = _('Customer') + ' (' + record.name + ')' res.append((record.id, name)) return res def _default_warehouse(self, cr, uid, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context) res = self.pool.get('stock.warehouse').search(cr, uid, [('company_id', '=', user.company_id.id)], limit=1, context=context) return res and res[0] or False _columns = { 'name': fields.char('Picking Type Name', translate=True, required=True), 'complete_name': fields.function(_get_name, type='char', string='Name'), 'color': fields.integer('Color'), 'sequence': fields.integer('Sequence', help="Used to order the 'All Operations' kanban view"), 'sequence_id': fields.many2one('ir.sequence', 'Reference Sequence', required=True), 'default_location_src_id': fields.many2one('stock.location', 'Default Source Location'), 'default_location_dest_id': fields.many2one('stock.location', 'Default Destination Location'), 'code': fields.selection([('incoming', 'Suppliers'), ('outgoing', 'Customers'), ('internal', 'Internal')], 'Type of Operation', required=True), 'return_picking_type_id': fields.many2one('stock.picking.type', 'Picking Type for Returns'), 'warehouse_id': fields.many2one('stock.warehouse', 'Warehouse', ondelete='cascade'), 'active': fields.boolean('Active'), # Statistics for the kanban view 'last_done_picking': fields.function(_get_tristate_values, type='char', string='Last 10 Done Pickings'), 'count_picking_draft': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_ready': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_waiting': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_late': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'count_picking_backorders': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'rate_picking_late': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), 'rate_picking_backorders': fields.function(_get_picking_count, type='integer', multi='_get_picking_count'), } _defaults = { 'warehouse_id': _default_warehouse, 'active': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
white-wolf-17/hero-instantcms
templates/hero/node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/ninja_syntax.py
2485
5536
# This file comes from # https://github.com/martine/ninja/blob/master/misc/ninja_syntax.py # Do not edit! Edit the upstream one instead. """Python module for generating .ninja files. Note that this is emphatically not a required piece of Ninja; it's just a helpful utility for build-file-generation systems that already use Python. """ import textwrap import re def escape_path(word): return word.replace('$ ','$$ ').replace(' ','$ ').replace(':', '$:') class Writer(object): def __init__(self, output, width=78): self.output = output self.width = width def newline(self): self.output.write('\n') def comment(self, text): for line in textwrap.wrap(text, self.width - 2): self.output.write('# ' + line + '\n') def variable(self, key, value, indent=0): if value is None: return if isinstance(value, list): value = ' '.join(filter(None, value)) # Filter out empty strings. self._line('%s = %s' % (key, value), indent) def pool(self, name, depth): self._line('pool %s' % name) self.variable('depth', depth, indent=1) def rule(self, name, command, description=None, depfile=None, generator=False, pool=None, restat=False, rspfile=None, rspfile_content=None, deps=None): self._line('rule %s' % name) self.variable('command', command, indent=1) if description: self.variable('description', description, indent=1) if depfile: self.variable('depfile', depfile, indent=1) if generator: self.variable('generator', '1', indent=1) if pool: self.variable('pool', pool, indent=1) if restat: self.variable('restat', '1', indent=1) if rspfile: self.variable('rspfile', rspfile, indent=1) if rspfile_content: self.variable('rspfile_content', rspfile_content, indent=1) if deps: self.variable('deps', deps, indent=1) def build(self, outputs, rule, inputs=None, implicit=None, order_only=None, variables=None): outputs = self._as_list(outputs) all_inputs = self._as_list(inputs)[:] out_outputs = list(map(escape_path, outputs)) all_inputs = list(map(escape_path, all_inputs)) if implicit: implicit = map(escape_path, self._as_list(implicit)) all_inputs.append('|') all_inputs.extend(implicit) if order_only: order_only = map(escape_path, self._as_list(order_only)) all_inputs.append('||') all_inputs.extend(order_only) self._line('build %s: %s' % (' '.join(out_outputs), ' '.join([rule] + all_inputs))) if variables: if isinstance(variables, dict): iterator = iter(variables.items()) else: iterator = iter(variables) for key, val in iterator: self.variable(key, val, indent=1) return outputs def include(self, path): self._line('include %s' % path) def subninja(self, path): self._line('subninja %s' % path) def default(self, paths): self._line('default %s' % ' '.join(self._as_list(paths))) def _count_dollars_before_index(self, s, i): """Returns the number of '$' characters right in front of s[i].""" dollar_count = 0 dollar_index = i - 1 while dollar_index > 0 and s[dollar_index] == '$': dollar_count += 1 dollar_index -= 1 return dollar_count def _line(self, text, indent=0): """Write 'text' word-wrapped at self.width characters.""" leading_space = ' ' * indent while len(leading_space) + len(text) > self.width: # The text is too wide; wrap if possible. # Find the rightmost space that would obey our width constraint and # that's not an escaped space. available_space = self.width - len(leading_space) - len(' $') space = available_space while True: space = text.rfind(' ', 0, space) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # No such space; just use the first unescaped space we can find. space = available_space - 1 while True: space = text.find(' ', space + 1) if space < 0 or \ self._count_dollars_before_index(text, space) % 2 == 0: break if space < 0: # Give up on breaking. break self.output.write(leading_space + text[0:space] + ' $\n') text = text[space+1:] # Subsequent lines are continuations, so indent them. leading_space = ' ' * (indent+2) self.output.write(leading_space + text + '\n') def _as_list(self, input): if input is None: return [] if isinstance(input, list): return input return [input] def escape(string): """Escape a string such that it can be embedded into a Ninja file without further interpretation.""" assert '\n' not in string, 'Ninja syntax does not allow newlines' # We only have one special metacharacter: '$'. return string.replace('$', '$$')
gpl-2.0
merc-devel/merc
merc/errors.py
1
2503
from merc import message class BaseError(Exception, message.Reply): pass class Error(Exception, message.Message): NAME = "ERROR" FORCE_TRAILING = True def __init__(self, reason): self.reason = reason def as_params(self, client): return [self.reason] class LinkError(Error): NAME = "ERROR" FORCE_TRAILING = True def as_params(self, client): host, *_ = client.protocol.transport.get_extra_info("peername") return ["Closing link: {} ({})".format(host, self.reason)] class SimpleError(BaseError): def as_reply_params(self): return [self.REASON] class ParametrizedError(BaseError): def __init__(self, *params): self.params = params def as_reply_params(self): return list(self.params) + [self.REASON] class NoSuchNick(ParametrizedError): NAME = "401" REASON = "No such nick/channel" class NoSuchServer(ParametrizedError): NAME = "402" REASON = "No such server" class NoSuchChannel(ParametrizedError): NAME = "403" REASON = "No such channel" class CannotSendToChan(ParametrizedError): NAME = "404" REASON = "Cannot send to channel" class ErroneousNickname(SimpleError): NAME = "432" REASON = "Erroneous nickname" class NicknameInUse(ParametrizedError): NAME = "433" REASON = "Nickname in use" class NotRegistered(SimpleError): NAME = "451" REASON = "You have not registered" class NeedMoreParams(ParametrizedError): NAME = "461" REASON = "Not enough parameters" class UnknownCommand(ParametrizedError): NAME = "421" REASON = "Unknown command" class UnknownMode(ParametrizedError): NAME = "472" REASON = "is an unknown mode char to me" class UmodeUnknownFlag(SimpleError): NAME = "501" REASON = "Unknown MODE flag" class UsersDontMatch(SimpleError): NAME = "502" REASON = "Can't change mode for other users" class ChanOpPrivsNeeded(ParametrizedError): NAME = "482" REASON = "You're not a channel operator" class NotOnChannel(ParametrizedError): NAME = "442" REASON = "You're not on that channel" class AlreadyOnChannel(ParametrizedError): NAME = "443" REASON = "is already on channel" class PasswordMismatch(SimpleError): NAME = "464" REASON = "Password mismatch" class NoPrivileges(SimpleError): NAME = "481" REASON = "You're not an IRC operator" class BannedFromChannel(ParametrizedError): NAME = "474" REASON = "You are banned from the channel" class AlreadyRegistered(SimpleError): NAME = "462" REASON = "You may not reregister"
mit
fitermay/intellij-community
python/helpers/py2only/docutils/utils/math/unichar2tex.py
130
17590
# LaTeX math to Unicode symbols translation table # for use with the translate() method of unicode objects. # Generated with ``write_unichar2tex.py`` from the data in # http://milde.users.sourceforge.net/LUCR/Math/ # Includes commands from: standard LaTeX, amssymb, amsmath uni2tex_table = { 160: u'~', 163: u'\\pounds ', 165: u'\\yen ', 172: u'\\neg ', 174: u'\\circledR ', 177: u'\\pm ', 215: u'\\times ', 240: u'\\eth ', 247: u'\\div ', 305: u'\\imath ', 567: u'\\jmath ', 915: u'\\Gamma ', 916: u'\\Delta ', 920: u'\\Theta ', 923: u'\\Lambda ', 926: u'\\Xi ', 928: u'\\Pi ', 931: u'\\Sigma ', 933: u'\\Upsilon ', 934: u'\\Phi ', 936: u'\\Psi ', 937: u'\\Omega ', 945: u'\\alpha ', 946: u'\\beta ', 947: u'\\gamma ', 948: u'\\delta ', 949: u'\\varepsilon ', 950: u'\\zeta ', 951: u'\\eta ', 952: u'\\theta ', 953: u'\\iota ', 954: u'\\kappa ', 955: u'\\lambda ', 956: u'\\mu ', 957: u'\\nu ', 958: u'\\xi ', 960: u'\\pi ', 961: u'\\rho ', 962: u'\\varsigma ', 963: u'\\sigma ', 964: u'\\tau ', 965: u'\\upsilon ', 966: u'\\varphi ', 967: u'\\chi ', 968: u'\\psi ', 969: u'\\omega ', 977: u'\\vartheta ', 981: u'\\phi ', 982: u'\\varpi ', 989: u'\\digamma ', 1014: u'\\backepsilon ', 8193: u'\\quad ', 8214: u'\\| ', 8224: u'\\dagger ', 8225: u'\\ddagger ', 8230: u'\\ldots ', 8242: u'\\prime ', 8245: u'\\backprime ', 8287: u'\\: ', 8450: u'\\mathbb{C}', 8459: u'\\mathcal{H}', 8460: u'\\mathfrak{H}', 8461: u'\\mathbb{H}', 8463: u'\\hslash ', 8464: u'\\mathcal{I}', 8465: u'\\Im ', 8466: u'\\mathcal{L}', 8467: u'\\ell ', 8469: u'\\mathbb{N}', 8472: u'\\wp ', 8473: u'\\mathbb{P}', 8474: u'\\mathbb{Q}', 8475: u'\\mathcal{R}', 8476: u'\\Re ', 8477: u'\\mathbb{R}', 8484: u'\\mathbb{Z}', 8487: u'\\mho ', 8488: u'\\mathfrak{Z}', 8492: u'\\mathcal{B}', 8493: u'\\mathfrak{C}', 8496: u'\\mathcal{E}', 8497: u'\\mathcal{F}', 8498: u'\\Finv ', 8499: u'\\mathcal{M}', 8501: u'\\aleph ', 8502: u'\\beth ', 8503: u'\\gimel ', 8504: u'\\daleth ', 8592: u'\\leftarrow ', 8593: u'\\uparrow ', 8594: u'\\rightarrow ', 8595: u'\\downarrow ', 8596: u'\\leftrightarrow ', 8597: u'\\updownarrow ', 8598: u'\\nwarrow ', 8599: u'\\nearrow ', 8600: u'\\searrow ', 8601: u'\\swarrow ', 8602: u'\\nleftarrow ', 8603: u'\\nrightarrow ', 8606: u'\\twoheadleftarrow ', 8608: u'\\twoheadrightarrow ', 8610: u'\\leftarrowtail ', 8611: u'\\rightarrowtail ', 8614: u'\\mapsto ', 8617: u'\\hookleftarrow ', 8618: u'\\hookrightarrow ', 8619: u'\\looparrowleft ', 8620: u'\\looparrowright ', 8621: u'\\leftrightsquigarrow ', 8622: u'\\nleftrightarrow ', 8624: u'\\Lsh ', 8625: u'\\Rsh ', 8630: u'\\curvearrowleft ', 8631: u'\\curvearrowright ', 8634: u'\\circlearrowleft ', 8635: u'\\circlearrowright ', 8636: u'\\leftharpoonup ', 8637: u'\\leftharpoondown ', 8638: u'\\upharpoonright ', 8639: u'\\upharpoonleft ', 8640: u'\\rightharpoonup ', 8641: u'\\rightharpoondown ', 8642: u'\\downharpoonright ', 8643: u'\\downharpoonleft ', 8644: u'\\rightleftarrows ', 8646: u'\\leftrightarrows ', 8647: u'\\leftleftarrows ', 8648: u'\\upuparrows ', 8649: u'\\rightrightarrows ', 8650: u'\\downdownarrows ', 8651: u'\\leftrightharpoons ', 8652: u'\\rightleftharpoons ', 8653: u'\\nLeftarrow ', 8654: u'\\nLeftrightarrow ', 8655: u'\\nRightarrow ', 8656: u'\\Leftarrow ', 8657: u'\\Uparrow ', 8658: u'\\Rightarrow ', 8659: u'\\Downarrow ', 8660: u'\\Leftrightarrow ', 8661: u'\\Updownarrow ', 8666: u'\\Lleftarrow ', 8667: u'\\Rrightarrow ', 8669: u'\\rightsquigarrow ', 8672: u'\\dashleftarrow ', 8674: u'\\dashrightarrow ', 8704: u'\\forall ', 8705: u'\\complement ', 8706: u'\\partial ', 8707: u'\\exists ', 8708: u'\\nexists ', 8709: u'\\varnothing ', 8711: u'\\nabla ', 8712: u'\\in ', 8713: u'\\notin ', 8715: u'\\ni ', 8719: u'\\prod ', 8720: u'\\coprod ', 8721: u'\\sum ', 8722: u'-', 8723: u'\\mp ', 8724: u'\\dotplus ', 8725: u'\\slash ', 8726: u'\\smallsetminus ', 8727: u'\\ast ', 8728: u'\\circ ', 8729: u'\\bullet ', 8730: u'\\sqrt ', 8731: u'\\sqrt[3] ', 8732: u'\\sqrt[4] ', 8733: u'\\propto ', 8734: u'\\infty ', 8736: u'\\angle ', 8737: u'\\measuredangle ', 8738: u'\\sphericalangle ', 8739: u'\\mid ', 8740: u'\\nmid ', 8741: u'\\parallel ', 8742: u'\\nparallel ', 8743: u'\\wedge ', 8744: u'\\vee ', 8745: u'\\cap ', 8746: u'\\cup ', 8747: u'\\int ', 8748: u'\\iint ', 8749: u'\\iiint ', 8750: u'\\oint ', 8756: u'\\therefore ', 8757: u'\\because ', 8758: u':', 8764: u'\\sim ', 8765: u'\\backsim ', 8768: u'\\wr ', 8769: u'\\nsim ', 8770: u'\\eqsim ', 8771: u'\\simeq ', 8773: u'\\cong ', 8775: u'\\ncong ', 8776: u'\\approx ', 8778: u'\\approxeq ', 8781: u'\\asymp ', 8782: u'\\Bumpeq ', 8783: u'\\bumpeq ', 8784: u'\\doteq ', 8785: u'\\Doteq ', 8786: u'\\fallingdotseq ', 8787: u'\\risingdotseq ', 8790: u'\\eqcirc ', 8791: u'\\circeq ', 8796: u'\\triangleq ', 8800: u'\\neq ', 8801: u'\\equiv ', 8804: u'\\leq ', 8805: u'\\geq ', 8806: u'\\leqq ', 8807: u'\\geqq ', 8808: u'\\lneqq ', 8809: u'\\gneqq ', 8810: u'\\ll ', 8811: u'\\gg ', 8812: u'\\between ', 8814: u'\\nless ', 8815: u'\\ngtr ', 8816: u'\\nleq ', 8817: u'\\ngeq ', 8818: u'\\lesssim ', 8819: u'\\gtrsim ', 8822: u'\\lessgtr ', 8823: u'\\gtrless ', 8826: u'\\prec ', 8827: u'\\succ ', 8828: u'\\preccurlyeq ', 8829: u'\\succcurlyeq ', 8830: u'\\precsim ', 8831: u'\\succsim ', 8832: u'\\nprec ', 8833: u'\\nsucc ', 8834: u'\\subset ', 8835: u'\\supset ', 8838: u'\\subseteq ', 8839: u'\\supseteq ', 8840: u'\\nsubseteq ', 8841: u'\\nsupseteq ', 8842: u'\\subsetneq ', 8843: u'\\supsetneq ', 8846: u'\\uplus ', 8847: u'\\sqsubset ', 8848: u'\\sqsupset ', 8849: u'\\sqsubseteq ', 8850: u'\\sqsupseteq ', 8851: u'\\sqcap ', 8852: u'\\sqcup ', 8853: u'\\oplus ', 8854: u'\\ominus ', 8855: u'\\otimes ', 8856: u'\\oslash ', 8857: u'\\odot ', 8858: u'\\circledcirc ', 8859: u'\\circledast ', 8861: u'\\circleddash ', 8862: u'\\boxplus ', 8863: u'\\boxminus ', 8864: u'\\boxtimes ', 8865: u'\\boxdot ', 8866: u'\\vdash ', 8867: u'\\dashv ', 8868: u'\\top ', 8869: u'\\bot ', 8871: u'\\models ', 8872: u'\\vDash ', 8873: u'\\Vdash ', 8874: u'\\Vvdash ', 8876: u'\\nvdash ', 8877: u'\\nvDash ', 8878: u'\\nVdash ', 8879: u'\\nVDash ', 8882: u'\\vartriangleleft ', 8883: u'\\vartriangleright ', 8884: u'\\trianglelefteq ', 8885: u'\\trianglerighteq ', 8888: u'\\multimap ', 8890: u'\\intercal ', 8891: u'\\veebar ', 8892: u'\\barwedge ', 8896: u'\\bigwedge ', 8897: u'\\bigvee ', 8898: u'\\bigcap ', 8899: u'\\bigcup ', 8900: u'\\diamond ', 8901: u'\\cdot ', 8902: u'\\star ', 8903: u'\\divideontimes ', 8904: u'\\bowtie ', 8905: u'\\ltimes ', 8906: u'\\rtimes ', 8907: u'\\leftthreetimes ', 8908: u'\\rightthreetimes ', 8909: u'\\backsimeq ', 8910: u'\\curlyvee ', 8911: u'\\curlywedge ', 8912: u'\\Subset ', 8913: u'\\Supset ', 8914: u'\\Cap ', 8915: u'\\Cup ', 8916: u'\\pitchfork ', 8918: u'\\lessdot ', 8919: u'\\gtrdot ', 8920: u'\\lll ', 8921: u'\\ggg ', 8922: u'\\lesseqgtr ', 8923: u'\\gtreqless ', 8926: u'\\curlyeqprec ', 8927: u'\\curlyeqsucc ', 8928: u'\\npreceq ', 8929: u'\\nsucceq ', 8934: u'\\lnsim ', 8935: u'\\gnsim ', 8936: u'\\precnsim ', 8937: u'\\succnsim ', 8938: u'\\ntriangleleft ', 8939: u'\\ntriangleright ', 8940: u'\\ntrianglelefteq ', 8941: u'\\ntrianglerighteq ', 8942: u'\\vdots ', 8943: u'\\cdots ', 8945: u'\\ddots ', 8968: u'\\lceil ', 8969: u'\\rceil ', 8970: u'\\lfloor ', 8971: u'\\rfloor ', 8988: u'\\ulcorner ', 8989: u'\\urcorner ', 8990: u'\\llcorner ', 8991: u'\\lrcorner ', 8994: u'\\frown ', 8995: u'\\smile ', 9182: u'\\overbrace ', 9183: u'\\underbrace ', 9651: u'\\bigtriangleup ', 9655: u'\\rhd ', 9661: u'\\bigtriangledown ', 9665: u'\\lhd ', 9671: u'\\Diamond ', 9674: u'\\lozenge ', 9723: u'\\square ', 9724: u'\\blacksquare ', 9733: u'\\bigstar ', 9824: u'\\spadesuit ', 9825: u'\\heartsuit ', 9826: u'\\diamondsuit ', 9827: u'\\clubsuit ', 9837: u'\\flat ', 9838: u'\\natural ', 9839: u'\\sharp ', 10003: u'\\checkmark ', 10016: u'\\maltese ', 10178: u'\\perp ', 10216: u'\\langle ', 10217: u'\\rangle ', 10222: u'\\lgroup ', 10223: u'\\rgroup ', 10229: u'\\longleftarrow ', 10230: u'\\longrightarrow ', 10231: u'\\longleftrightarrow ', 10232: u'\\Longleftarrow ', 10233: u'\\Longrightarrow ', 10234: u'\\Longleftrightarrow ', 10236: u'\\longmapsto ', 10731: u'\\blacklozenge ', 10741: u'\\setminus ', 10752: u'\\bigodot ', 10753: u'\\bigoplus ', 10754: u'\\bigotimes ', 10756: u'\\biguplus ', 10758: u'\\bigsqcup ', 10764: u'\\iiiint ', 10781: u'\\Join ', 10815: u'\\amalg ', 10846: u'\\doublebarwedge ', 10877: u'\\leqslant ', 10878: u'\\geqslant ', 10885: u'\\lessapprox ', 10886: u'\\gtrapprox ', 10887: u'\\lneq ', 10888: u'\\gneq ', 10889: u'\\lnapprox ', 10890: u'\\gnapprox ', 10891: u'\\lesseqqgtr ', 10892: u'\\gtreqqless ', 10901: u'\\eqslantless ', 10902: u'\\eqslantgtr ', 10927: u'\\preceq ', 10928: u'\\succeq ', 10935: u'\\precapprox ', 10936: u'\\succapprox ', 10937: u'\\precnapprox ', 10938: u'\\succnapprox ', 10949: u'\\subseteqq ', 10950: u'\\supseteqq ', 10955: u'\\subsetneqq ', 10956: u'\\supsetneqq ', 119808: u'\\mathbf{A}', 119809: u'\\mathbf{B}', 119810: u'\\mathbf{C}', 119811: u'\\mathbf{D}', 119812: u'\\mathbf{E}', 119813: u'\\mathbf{F}', 119814: u'\\mathbf{G}', 119815: u'\\mathbf{H}', 119816: u'\\mathbf{I}', 119817: u'\\mathbf{J}', 119818: u'\\mathbf{K}', 119819: u'\\mathbf{L}', 119820: u'\\mathbf{M}', 119821: u'\\mathbf{N}', 119822: u'\\mathbf{O}', 119823: u'\\mathbf{P}', 119824: u'\\mathbf{Q}', 119825: u'\\mathbf{R}', 119826: u'\\mathbf{S}', 119827: u'\\mathbf{T}', 119828: u'\\mathbf{U}', 119829: u'\\mathbf{V}', 119830: u'\\mathbf{W}', 119831: u'\\mathbf{X}', 119832: u'\\mathbf{Y}', 119833: u'\\mathbf{Z}', 119834: u'\\mathbf{a}', 119835: u'\\mathbf{b}', 119836: u'\\mathbf{c}', 119837: u'\\mathbf{d}', 119838: u'\\mathbf{e}', 119839: u'\\mathbf{f}', 119840: u'\\mathbf{g}', 119841: u'\\mathbf{h}', 119842: u'\\mathbf{i}', 119843: u'\\mathbf{j}', 119844: u'\\mathbf{k}', 119845: u'\\mathbf{l}', 119846: u'\\mathbf{m}', 119847: u'\\mathbf{n}', 119848: u'\\mathbf{o}', 119849: u'\\mathbf{p}', 119850: u'\\mathbf{q}', 119851: u'\\mathbf{r}', 119852: u'\\mathbf{s}', 119853: u'\\mathbf{t}', 119854: u'\\mathbf{u}', 119855: u'\\mathbf{v}', 119856: u'\\mathbf{w}', 119857: u'\\mathbf{x}', 119858: u'\\mathbf{y}', 119859: u'\\mathbf{z}', 119860: u'A', 119861: u'B', 119862: u'C', 119863: u'D', 119864: u'E', 119865: u'F', 119866: u'G', 119867: u'H', 119868: u'I', 119869: u'J', 119870: u'K', 119871: u'L', 119872: u'M', 119873: u'N', 119874: u'O', 119875: u'P', 119876: u'Q', 119877: u'R', 119878: u'S', 119879: u'T', 119880: u'U', 119881: u'V', 119882: u'W', 119883: u'X', 119884: u'Y', 119885: u'Z', 119886: u'a', 119887: u'b', 119888: u'c', 119889: u'd', 119890: u'e', 119891: u'f', 119892: u'g', 119894: u'i', 119895: u'j', 119896: u'k', 119897: u'l', 119898: u'm', 119899: u'n', 119900: u'o', 119901: u'p', 119902: u'q', 119903: u'r', 119904: u's', 119905: u't', 119906: u'u', 119907: u'v', 119908: u'w', 119909: u'x', 119910: u'y', 119911: u'z', 119964: u'\\mathcal{A}', 119966: u'\\mathcal{C}', 119967: u'\\mathcal{D}', 119970: u'\\mathcal{G}', 119973: u'\\mathcal{J}', 119974: u'\\mathcal{K}', 119977: u'\\mathcal{N}', 119978: u'\\mathcal{O}', 119979: u'\\mathcal{P}', 119980: u'\\mathcal{Q}', 119982: u'\\mathcal{S}', 119983: u'\\mathcal{T}', 119984: u'\\mathcal{U}', 119985: u'\\mathcal{V}', 119986: u'\\mathcal{W}', 119987: u'\\mathcal{X}', 119988: u'\\mathcal{Y}', 119989: u'\\mathcal{Z}', 120068: u'\\mathfrak{A}', 120069: u'\\mathfrak{B}', 120071: u'\\mathfrak{D}', 120072: u'\\mathfrak{E}', 120073: u'\\mathfrak{F}', 120074: u'\\mathfrak{G}', 120077: u'\\mathfrak{J}', 120078: u'\\mathfrak{K}', 120079: u'\\mathfrak{L}', 120080: u'\\mathfrak{M}', 120081: u'\\mathfrak{N}', 120082: u'\\mathfrak{O}', 120083: u'\\mathfrak{P}', 120084: u'\\mathfrak{Q}', 120086: u'\\mathfrak{S}', 120087: u'\\mathfrak{T}', 120088: u'\\mathfrak{U}', 120089: u'\\mathfrak{V}', 120090: u'\\mathfrak{W}', 120091: u'\\mathfrak{X}', 120092: u'\\mathfrak{Y}', 120094: u'\\mathfrak{a}', 120095: u'\\mathfrak{b}', 120096: u'\\mathfrak{c}', 120097: u'\\mathfrak{d}', 120098: u'\\mathfrak{e}', 120099: u'\\mathfrak{f}', 120100: u'\\mathfrak{g}', 120101: u'\\mathfrak{h}', 120102: u'\\mathfrak{i}', 120103: u'\\mathfrak{j}', 120104: u'\\mathfrak{k}', 120105: u'\\mathfrak{l}', 120106: u'\\mathfrak{m}', 120107: u'\\mathfrak{n}', 120108: u'\\mathfrak{o}', 120109: u'\\mathfrak{p}', 120110: u'\\mathfrak{q}', 120111: u'\\mathfrak{r}', 120112: u'\\mathfrak{s}', 120113: u'\\mathfrak{t}', 120114: u'\\mathfrak{u}', 120115: u'\\mathfrak{v}', 120116: u'\\mathfrak{w}', 120117: u'\\mathfrak{x}', 120118: u'\\mathfrak{y}', 120119: u'\\mathfrak{z}', 120120: u'\\mathbb{A}', 120121: u'\\mathbb{B}', 120123: u'\\mathbb{D}', 120124: u'\\mathbb{E}', 120125: u'\\mathbb{F}', 120126: u'\\mathbb{G}', 120128: u'\\mathbb{I}', 120129: u'\\mathbb{J}', 120130: u'\\mathbb{K}', 120131: u'\\mathbb{L}', 120132: u'\\mathbb{M}', 120134: u'\\mathbb{O}', 120138: u'\\mathbb{S}', 120139: u'\\mathbb{T}', 120140: u'\\mathbb{U}', 120141: u'\\mathbb{V}', 120142: u'\\mathbb{W}', 120143: u'\\mathbb{X}', 120144: u'\\mathbb{Y}', 120156: u'\\Bbbk ', 120224: u'\\mathsf{A}', 120225: u'\\mathsf{B}', 120226: u'\\mathsf{C}', 120227: u'\\mathsf{D}', 120228: u'\\mathsf{E}', 120229: u'\\mathsf{F}', 120230: u'\\mathsf{G}', 120231: u'\\mathsf{H}', 120232: u'\\mathsf{I}', 120233: u'\\mathsf{J}', 120234: u'\\mathsf{K}', 120235: u'\\mathsf{L}', 120236: u'\\mathsf{M}', 120237: u'\\mathsf{N}', 120238: u'\\mathsf{O}', 120239: u'\\mathsf{P}', 120240: u'\\mathsf{Q}', 120241: u'\\mathsf{R}', 120242: u'\\mathsf{S}', 120243: u'\\mathsf{T}', 120244: u'\\mathsf{U}', 120245: u'\\mathsf{V}', 120246: u'\\mathsf{W}', 120247: u'\\mathsf{X}', 120248: u'\\mathsf{Y}', 120249: u'\\mathsf{Z}', 120250: u'\\mathsf{a}', 120251: u'\\mathsf{b}', 120252: u'\\mathsf{c}', 120253: u'\\mathsf{d}', 120254: u'\\mathsf{e}', 120255: u'\\mathsf{f}', 120256: u'\\mathsf{g}', 120257: u'\\mathsf{h}', 120258: u'\\mathsf{i}', 120259: u'\\mathsf{j}', 120260: u'\\mathsf{k}', 120261: u'\\mathsf{l}', 120262: u'\\mathsf{m}', 120263: u'\\mathsf{n}', 120264: u'\\mathsf{o}', 120265: u'\\mathsf{p}', 120266: u'\\mathsf{q}', 120267: u'\\mathsf{r}', 120268: u'\\mathsf{s}', 120269: u'\\mathsf{t}', 120270: u'\\mathsf{u}', 120271: u'\\mathsf{v}', 120272: u'\\mathsf{w}', 120273: u'\\mathsf{x}', 120274: u'\\mathsf{y}', 120275: u'\\mathsf{z}', 120432: u'\\mathtt{A}', 120433: u'\\mathtt{B}', 120434: u'\\mathtt{C}', 120435: u'\\mathtt{D}', 120436: u'\\mathtt{E}', 120437: u'\\mathtt{F}', 120438: u'\\mathtt{G}', 120439: u'\\mathtt{H}', 120440: u'\\mathtt{I}', 120441: u'\\mathtt{J}', 120442: u'\\mathtt{K}', 120443: u'\\mathtt{L}', 120444: u'\\mathtt{M}', 120445: u'\\mathtt{N}', 120446: u'\\mathtt{O}', 120447: u'\\mathtt{P}', 120448: u'\\mathtt{Q}', 120449: u'\\mathtt{R}', 120450: u'\\mathtt{S}', 120451: u'\\mathtt{T}', 120452: u'\\mathtt{U}', 120453: u'\\mathtt{V}', 120454: u'\\mathtt{W}', 120455: u'\\mathtt{X}', 120456: u'\\mathtt{Y}', 120457: u'\\mathtt{Z}', 120458: u'\\mathtt{a}', 120459: u'\\mathtt{b}', 120460: u'\\mathtt{c}', 120461: u'\\mathtt{d}', 120462: u'\\mathtt{e}', 120463: u'\\mathtt{f}', 120464: u'\\mathtt{g}', 120465: u'\\mathtt{h}', 120466: u'\\mathtt{i}', 120467: u'\\mathtt{j}', 120468: u'\\mathtt{k}', 120469: u'\\mathtt{l}', 120470: u'\\mathtt{m}', 120471: u'\\mathtt{n}', 120472: u'\\mathtt{o}', 120473: u'\\mathtt{p}', 120474: u'\\mathtt{q}', 120475: u'\\mathtt{r}', 120476: u'\\mathtt{s}', 120477: u'\\mathtt{t}', 120478: u'\\mathtt{u}', 120479: u'\\mathtt{v}', 120480: u'\\mathtt{w}', 120481: u'\\mathtt{x}', 120482: u'\\mathtt{y}', 120483: u'\\mathtt{z}', 120484: u'\\imath ', 120485: u'\\jmath ', 120490: u'\\mathbf{\\Gamma}', 120491: u'\\mathbf{\\Delta}', 120495: u'\\mathbf{\\Theta}', 120498: u'\\mathbf{\\Lambda}', 120501: u'\\mathbf{\\Xi}', 120503: u'\\mathbf{\\Pi}', 120506: u'\\mathbf{\\Sigma}', 120508: u'\\mathbf{\\Upsilon}', 120509: u'\\mathbf{\\Phi}', 120511: u'\\mathbf{\\Psi}', 120512: u'\\mathbf{\\Omega}', 120548: u'\\mathit{\\Gamma}', 120549: u'\\mathit{\\Delta}', 120553: u'\\mathit{\\Theta}', 120556: u'\\mathit{\\Lambda}', 120559: u'\\mathit{\\Xi}', 120561: u'\\mathit{\\Pi}', 120564: u'\\mathit{\\Sigma}', 120566: u'\\mathit{\\Upsilon}', 120567: u'\\mathit{\\Phi}', 120569: u'\\mathit{\\Psi}', 120570: u'\\mathit{\\Omega}', 120572: u'\\alpha ', 120573: u'\\beta ', 120574: u'\\gamma ', 120575: u'\\delta ', 120576: u'\\varepsilon ', 120577: u'\\zeta ', 120578: u'\\eta ', 120579: u'\\theta ', 120580: u'\\iota ', 120581: u'\\kappa ', 120582: u'\\lambda ', 120583: u'\\mu ', 120584: u'\\nu ', 120585: u'\\xi ', 120587: u'\\pi ', 120588: u'\\rho ', 120589: u'\\varsigma ', 120590: u'\\sigma ', 120591: u'\\tau ', 120592: u'\\upsilon ', 120593: u'\\varphi ', 120594: u'\\chi ', 120595: u'\\psi ', 120596: u'\\omega ', 120597: u'\\partial ', 120598: u'\\epsilon ', 120599: u'\\vartheta ', 120600: u'\\varkappa ', 120601: u'\\phi ', 120602: u'\\varrho ', 120603: u'\\varpi ', 120782: u'\\mathbf{0}', 120783: u'\\mathbf{1}', 120784: u'\\mathbf{2}', 120785: u'\\mathbf{3}', 120786: u'\\mathbf{4}', 120787: u'\\mathbf{5}', 120788: u'\\mathbf{6}', 120789: u'\\mathbf{7}', 120790: u'\\mathbf{8}', 120791: u'\\mathbf{9}', 120802: u'\\mathsf{0}', 120803: u'\\mathsf{1}', 120804: u'\\mathsf{2}', 120805: u'\\mathsf{3}', 120806: u'\\mathsf{4}', 120807: u'\\mathsf{5}', 120808: u'\\mathsf{6}', 120809: u'\\mathsf{7}', 120810: u'\\mathsf{8}', 120811: u'\\mathsf{9}', 120822: u'\\mathtt{0}', 120823: u'\\mathtt{1}', 120824: u'\\mathtt{2}', 120825: u'\\mathtt{3}', 120826: u'\\mathtt{4}', 120827: u'\\mathtt{5}', 120828: u'\\mathtt{6}', 120829: u'\\mathtt{7}', 120830: u'\\mathtt{8}', 120831: u'\\mathtt{9}', }
apache-2.0
andmos/ansible
lib/ansible/plugins/action/net_lldp.py
756
1058
# (c) 2017, Ansible Inc, # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type from ansible.plugins.action.net_base import ActionModule as _ActionModule class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): result = super(ActionModule, self).run(tmp, task_vars) del tmp # tmp no longer has any effect return result
gpl-3.0
ypcs/bearded-adventure
bearded_adventure/webvm/api.py
1
1904
from tastypie.resources import ModelResource, ALL, ALL_WITH_RELATIONS from webvm.models import Slave, Snapshot, VirtualMachine, MachineImage, HwConfiguration, JobQueueItem from bearded_adventure.common import CamelCaseJSONSerializer from tastypie import fields from tastypie.authentication import ApiKeyAuthentication class SlaveResource(ModelResource): class Meta: queryset = Slave.objects.all() resource_name = 'slave' excludes = [ 'id', 'ssh_public_key', ] filtering = { 'uuid': ALL, } serializer = CamelCaseJSONSerializer() authentication = ApiKeyAuthentication() # detail_uri_name = 'uuid' # def prepend_urls(self): # return [url(r'^(?P<resource_name>%s)/(?P<uuid>[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12})$' % self._meta.resource_name, self.wrap_view('dispatch_detail'), name='api_dispatch_detail')] class MachineImageResource(ModelResource): class Meta: queryset = MachineImage.objects.all() resource_name = 'machine-image' serializer = CamelCaseJSONSerializer() excludes = ['id',] authentication = ApiKeyAuthentication() class VirtualMachineResource(ModelResource): machine_image = fields.ForeignKey(MachineImageResource, 'machine_image') class Meta: queryset = VirtualMachine.objects.all() resource_name = 'virtual_machine' serializer = CamelCaseJSONSerializer() authentication = ApiKeyAuthentication() class JobQueueResource(ModelResource): virtual_machine = fields.ForeignKey(VirtualMachineResource, 'vm') class Meta: queryset = JobQueueItem.objects.all().order_by('-priority', 'created') resource_name = 'queue' serializer = CamelCaseJSONSerializer() excludes = ['id',] authentication = ApiKeyAuthentication()
gpl-3.0
zenoss/ZenPacks.community.IBMSystemxIMM
ZenPacks/community/IBMSystemxIMM/modeler/plugins/community/snmp/IBMIMMMemVpdMap.py
1
3309
# ============================================================================== # IBMIMMMemVpdMap modeler plugin # # Zenoss community Zenpack for IBM SystemX Integrated Management Module # version: 0.3 # # (C) Copyright IBM Corp. 2011. All Rights Reserved. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # ============================================================================== __doc__="""IBMIMMMemVpdMap maps Memory DIMM VPD entries associated with an IMM""" __author__ = "IBM" __copyright__ = "(C) Copyright IBM Corp. 2011. All Rights Reserved." __license__ = "GPL" __version__ = "0.3.0" from Products.DataCollector.plugins.CollectorPlugin import SnmpPlugin, GetTableMap, GetMap from Products.DataCollector.plugins.DataMaps import ObjectMap class IBMIMMMemVpdMap(SnmpPlugin): relname = "IMMMEMVPD" modname = "ZenPacks.community.IBMSystemxIMM.IMMMemVpd" columns = { '.1': 'memoryVpdIndex', '.2': 'memoryVpdDescription', '.3': 'memoryVpdPartNumber', '.4': 'memoryVpdFRUSerialNumber', '.5': 'memoryVpdManufactureDate', '.6': 'memoryVpdType', '.7': 'memoryVpdSize', } # snmpGetTableMaps gets tabular data snmpGetTableMaps = ( # Memory VPD table GetTableMap('systemMemoryVpdEntry', '.1.3.6.1.4.1.2.3.51.3.1.5.21.1', columns), ) def process(self, device, results, log): """collect snmp information from this device""" log.info('processing %s for device %s', self.name(), device.id) # Collect the data from device getdata, tabledata = results # Debug: print data retrieved from device. log.warn( "Get data = %s", getdata ) log.warn( "Table data = %s", tabledata ) VpdTable = tabledata.get("systemMemoryVpdEntry") # If no data retrieved return nothing. if not VpdTable: log.warn( 'No data collected from %s for the %s plugin', device.id, self.name() ) log.warn( "Data = %s", getdata ) log.warn( "Columns = %s", self.columns ) return rm = self.relMap() for oid, data in VpdTable.items(): om = self.objectMap(data) om.id = self.prepId(om.memoryVpdDescription) # om.snmpindex = int(om.memoryVpdIndex) om.memoryVpdIndex = int(om.memoryVpdIndex) # Debug: print values of object map. # for key,value in om.__dict__.items(): # log.warn("om key=value: %s = %s", key,value) rm.append(om) return rm
gpl-2.0
llvm-mirror/llvm
test/CodeGen/SystemZ/Large/branch-range-09.py
16
3627
# Test 32-bit COMPARE LOGICAL AND BRANCH in cases where the sheer number of # instructions causes some branches to be out of range. # RUN: python %s | llc -mtriple=s390x-linux-gnu | FileCheck %s # Construct: # # before0: # conditional branch to after0 # ... # beforeN: # conditional branch to after0 # main: # 0xffcc bytes, from MVIY instructions # conditional branch to main # after0: # ... # conditional branch to main # afterN: # # Each conditional branch sequence occupies 12 bytes if it uses a short # branch and 14 if it uses a long one. The ones before "main:" have to # take the branch length into account, which is 6 for short branches, # so the final (0x34 - 6) / 12 == 3 blocks can use short branches. # The ones after "main:" do not, so the first 0x34 / 12 == 4 blocks # can use short branches. # # CHECK: lb [[REG:%r[0-5]]], 0(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL:\.L[^ ]*]] # CHECK: lb [[REG:%r[0-5]]], 1(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 2(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 3(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 4(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 5(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 6(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 7(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # ...main goes here... # CHECK: lb [[REG:%r[0-5]]], 25(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL:\.L[^ ]*]] # CHECK: lb [[REG:%r[0-5]]], 26(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 27(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 28(%r3) # CHECK: clrjl %r4, [[REG]], [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 29(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 30(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 31(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] # CHECK: lb [[REG:%r[0-5]]], 32(%r3) # CHECK: clr %r4, [[REG]] # CHECK: jgl [[LABEL]] from __future__ import print_function branch_blocks = 8 main_size = 0xffcc print('@global = global i32 0') print('define void @f1(i8 *%base, i8 *%stop, i32 %limit) {') print('entry:') print(' br label %before0') print('') for i in range(branch_blocks): next = 'before%d' % (i + 1) if i + 1 < branch_blocks else 'main' print('before%d:' % i) print(' %%bstop%d = getelementptr i8, i8 *%%stop, i64 %d' % (i, i)) print(' %%bcur%d = load i8 , i8 *%%bstop%d' % (i, i)) print(' %%bext%d = sext i8 %%bcur%d to i32' % (i, i)) print(' %%btest%d = icmp ult i32 %%limit, %%bext%d' % (i, i)) print(' br i1 %%btest%d, label %%after0, label %%%s' % (i, next)) print('') print('%s:' % next) a, b = 1, 1 for i in range(0, main_size, 6): a, b = b, a + b offset = 4096 + b % 500000 value = a % 256 print(' %%ptr%d = getelementptr i8, i8 *%%base, i64 %d' % (i, offset)) print(' store volatile i8 %d, i8 *%%ptr%d' % (value, i)) for i in range(branch_blocks): print(' %%astop%d = getelementptr i8, i8 *%%stop, i64 %d' % (i, i + 25)) print(' %%acur%d = load i8 , i8 *%%astop%d' % (i, i)) print(' %%aext%d = sext i8 %%acur%d to i32' % (i, i)) print(' %%atest%d = icmp ult i32 %%limit, %%aext%d' % (i, i)) print(' br i1 %%atest%d, label %%main, label %%after%d' % (i, i)) print('') print('after%d:' % i) print(' %dummy = load volatile i32, i32 *@global') print(' ret void') print('}')
apache-2.0
OpenDSA/OpenDSA-stable
SourceCode/Python/Sorting/Bubblesort.py
5
2991
def sorttime(B): A = [randrange(1,1000) for _ in range(testsize)] # To make it create a real A for copying numruns = 5 for i in range(len(B)): A[i] = B[i] time1 = datetime.now() bubblesort(A) time2 = datetime.now() checkorder(A) print "Standard Bubble Sort: Size ", testsize, ", Time: ", millis(time2 - time1) for i in range(len(B)): A[i] = B[i] time1 = datetime.now() bubblesortcheck(A) time2 = datetime.now() checkorder(A) print "Swap Check Bubble Sort: Size ", testsize, ", Time: ", millis(time2 - time1) for i in range(len(B)): A[i] = B[i] time1 = datetime.now() bubblesortcheck2(A) time2 = datetime.now() checkorder(A) print "Swap Check Bubble Sort 2: Size ", testsize, ", Time: ", millis(time2 - time1) totaltime = 0 for runs in range(1, numruns): for i in range(len(B)): A[i] = B[i] time1 = datetime.now() wikipedia(A) time2 = datetime.now() checkorder(A) totaltime += millis(time2-time1) print "Wikipedia Bubble Sort: Size ", testsize, ", for ", numruns, " runs, Time: ", totaltime totaltime = 0 for runs in range(1, numruns): for i in range(len(B)): A[i] = B[i] time1 = datetime.now() unwikipedia(A) time2 = datetime.now() checkorder(A) totaltime += millis(time2-time1) print "Wikipedia-compatible Bubble Sort without swap checking: Size ", testsize, " for ", numruns, "runs, Time: ", totaltime # A flag check if a pass did not have any swaps, which lets us quit def bubblesortcheck(A): for i in range(len(A)): # Insert i'th record swaps = False for j in range (1, len(A) - i): if (A[j-1] > A[j]): swap(A, j-1, j) swaps = True if (not swaps): print "Quit at ", i break # Can quit early # Modify the flag to check position of last swap taken def bubblesortcheck2(A): for i in range(len(A)-1): # Insert i'th record lastseen = 0; top = len(A) for j in range(1, top-1): if (A[j - 1] > A[j]): swap(A, j - 1, j) lastseen = j - 1 top = lastseen if (top == 0): print "Quit at ", i break # Can quit early # Wikipedia article "optimization" to only swap up to the last swap seen def wikipedia(A): n = len(A) - 1; while (n > 0): newn = 0 for i in range(n): # if this pair is out of order if (A[i] > A[i + 1]): swap(A, i, i + 1) newn = i n = newn # Wikipedia article-compatible version without swap checking def unwikipedia(A): n = len(A) - 1; while (n > 0): for i in range(n): # if this pair is out of order if (A[i] > A[i + 1]): swap(A, i, i + 1) n -= 1 def success(): print "Success! (Need to define this)" def sorttest(A): bubblesort(A) #/* *** ODSATag: Bubblesort *** */ def bubblesort(A): for i in range (len(A)): # Insert i'th record for j in range(1, len(A) - i): if (A[j - 1] > A[j]): swap(A, j - 1, j) #/* *** ODSAendTag: Bubblesort *** */
mit
saukrIppl/seahub
tests/api/test_dir.py
1
1041
import json import os from django.core.urlresolvers import reverse from seahub.test_utils import BaseTestCase class DirTest(BaseTestCase): def setUp(self): self.login_as(self.user) self.endpoint = reverse('DirView', args=[self.repo.id]) self.folder_name = os.path.basename(self.folder) def tearDown(self): self.remove_repo() def test_can_list(self): resp = self.client.get(self.endpoint) json_resp = json.loads(resp.content) self.assertEqual(200, resp.status_code) assert len(json_resp) == 1 assert self.folder_name == json_resp[0]['name'] def test_can_create(self): resp = self.client.post(self.endpoint + '?p=/new_dir', { 'operation': 'mkdir' }) self.assertEqual(201, resp.status_code) def test_create_with_nonexistent_parent(self): resp = self.client.post(self.endpoint + '?p=/new_parent/new_dir', { 'operation': 'mkdir' }) self.assertEqual(400, resp.status_code)
apache-2.0
BoltzmannBrain/nupic.research
tests/sensorimotor/unit/one_d_universe_test.py
9
2621
#!/usr/bin/env python # ---------------------------------------------------------------------- # Numenta Platform for Intelligent Computing (NuPIC) # Copyright (C) 2014, Numenta, Inc. Unless you have an agreement # with Numenta, Inc., for a separate license for this software code, the # following terms and conditions apply: # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero Public License version 3 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # See the GNU Affero Public License for more details. # # You should have received a copy of the GNU Affero Public License # along with this program. If not, see http://www.gnu.org/licenses. # # http://numenta.org/licenses/ # ---------------------------------------------------------------------- import unittest2 as unittest from sensorimotor.one_d_universe import OneDUniverse class OneDUniverseTest(unittest.TestCase): def testEncodeSensorValue(self): universe = OneDUniverse(debugSensor=True, nSensor=105, wSensor=5, nMotor=105, wMotor=5) self.assertEqual(universe.encodeSensorValue(0), set(xrange(0, 5))) self.assertEqual(universe.encodeSensorValue(19), set(xrange(95, 100))) self.assertEqual(universe.encodeSensorValue(20), set(xrange(100, 105))) universe = OneDUniverse(debugSensor=False, nSensor=105, wSensor=5, nMotor=105, wMotor=5) self.assertNotEqual(universe.encodeSensorValue(0), set(xrange(0, 5))) def testEncodeMotorValue(self): universe = OneDUniverse(debugMotor=True, nSensor=105, wSensor=5, nMotor=48*21, wMotor=48) self.assertEqual(universe.encodeMotorValue(-10), set(xrange(0, 48))) self.assertEqual(universe.encodeMotorValue(0), set(xrange(480, 528))) self.assertEqual(universe.encodeMotorValue(10), set(xrange(960, 1008))) universe = OneDUniverse(debugMotor=False, nSensor=105, wSensor=5, nMotor=48*21, wMotor=48) self.assertNotEqual(universe.encodeMotorValue(-10), set(xrange(0, 48))) def testDecodeSensorValue(self): universe = OneDUniverse() for i in range(universe.numDecodedElements): self.assertIsNotNone(universe.decodeSensorValue(i)) self.assertRaises(IndexError, universe.decodeSensorValue, universe.numDecodedElements) if __name__ == "__main__": unittest.main()
agpl-3.0
sdague/home-assistant
tests/components/homekit_controller/test_fan.py
8
12536
"""Basic checks for HomeKit motion sensors and contact sensors.""" from aiohomekit.model.characteristics import CharacteristicsTypes from aiohomekit.model.services import ServicesTypes from tests.components.homekit_controller.common import setup_test_component V1_ON = ("fan", "on") V1_ROTATION_DIRECTION = ("fan", "rotation.direction") V1_ROTATION_SPEED = ("fan", "rotation.speed") V2_ACTIVE = ("fanv2", "active") V2_ROTATION_DIRECTION = ("fanv2", "rotation.direction") V2_ROTATION_SPEED = ("fanv2", "rotation.speed") V2_SWING_MODE = ("fanv2", "swing-mode") def create_fan_service(accessory): """ Define fan v1 characteristics as per HAP spec. This service is no longer documented in R2 of the public HAP spec but existing devices out there use it (like the SIMPLEconnect fan) """ service = accessory.add_service(ServicesTypes.FAN) cur_state = service.add_char(CharacteristicsTypes.ON) cur_state.value = 0 direction = service.add_char(CharacteristicsTypes.ROTATION_DIRECTION) direction.value = 0 speed = service.add_char(CharacteristicsTypes.ROTATION_SPEED) speed.value = 0 def create_fanv2_service(accessory): """Define fan v2 characteristics as per HAP spec.""" service = accessory.add_service(ServicesTypes.FAN_V2) cur_state = service.add_char(CharacteristicsTypes.ACTIVE) cur_state.value = 0 direction = service.add_char(CharacteristicsTypes.ROTATION_DIRECTION) direction.value = 0 speed = service.add_char(CharacteristicsTypes.ROTATION_SPEED) speed.value = 0 swing_mode = service.add_char(CharacteristicsTypes.SWING_MODE) swing_mode.value = 0 async def test_fan_read_state(hass, utcnow): """Test that we can read the state of a HomeKit fan accessory.""" helper = await setup_test_component(hass, create_fan_service) helper.characteristics[V1_ON].value = False state = await helper.poll_and_get_state() assert state.state == "off" helper.characteristics[V1_ON].value = True state = await helper.poll_and_get_state() assert state.state == "on" async def test_turn_on(hass, utcnow): """Test that we can turn a fan on.""" helper = await setup_test_component(hass, create_fan_service) await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "high"}, blocking=True, ) assert helper.characteristics[V1_ON].value == 1 assert helper.characteristics[V1_ROTATION_SPEED].value == 100 await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "medium"}, blocking=True, ) assert helper.characteristics[V1_ON].value == 1 assert helper.characteristics[V1_ROTATION_SPEED].value == 50 await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "low"}, blocking=True, ) assert helper.characteristics[V1_ON].value == 1 assert helper.characteristics[V1_ROTATION_SPEED].value == 25 async def test_turn_off(hass, utcnow): """Test that we can turn a fan off.""" helper = await setup_test_component(hass, create_fan_service) helper.characteristics[V1_ON].value = 1 await hass.services.async_call( "fan", "turn_off", {"entity_id": "fan.testdevice"}, blocking=True, ) assert helper.characteristics[V1_ON].value == 0 async def test_set_speed(hass, utcnow): """Test that we set fan speed.""" helper = await setup_test_component(hass, create_fan_service) helper.characteristics[V1_ON].value = 1 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "high"}, blocking=True, ) assert helper.characteristics[V1_ROTATION_SPEED].value == 100 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "medium"}, blocking=True, ) assert helper.characteristics[V1_ROTATION_SPEED].value == 50 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "low"}, blocking=True, ) assert helper.characteristics[V1_ROTATION_SPEED].value == 25 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "off"}, blocking=True, ) assert helper.characteristics[V1_ON].value == 0 async def test_speed_read(hass, utcnow): """Test that we can read a fans oscillation.""" helper = await setup_test_component(hass, create_fan_service) helper.characteristics[V1_ON].value = 1 helper.characteristics[V1_ROTATION_SPEED].value = 100 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "high" helper.characteristics[V1_ROTATION_SPEED].value = 50 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "medium" helper.characteristics[V1_ROTATION_SPEED].value = 25 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "low" helper.characteristics[V1_ON].value = 0 helper.characteristics[V1_ROTATION_SPEED].value = 0 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "off" async def test_set_direction(hass, utcnow): """Test that we can set fan spin direction.""" helper = await setup_test_component(hass, create_fan_service) await hass.services.async_call( "fan", "set_direction", {"entity_id": "fan.testdevice", "direction": "reverse"}, blocking=True, ) assert helper.characteristics[V1_ROTATION_DIRECTION].value == 1 await hass.services.async_call( "fan", "set_direction", {"entity_id": "fan.testdevice", "direction": "forward"}, blocking=True, ) assert helper.characteristics[V1_ROTATION_DIRECTION].value == 0 async def test_direction_read(hass, utcnow): """Test that we can read a fans oscillation.""" helper = await setup_test_component(hass, create_fan_service) helper.characteristics[V1_ROTATION_DIRECTION].value = 0 state = await helper.poll_and_get_state() assert state.attributes["direction"] == "forward" helper.characteristics[V1_ROTATION_DIRECTION].value = 1 state = await helper.poll_and_get_state() assert state.attributes["direction"] == "reverse" async def test_fanv2_read_state(hass, utcnow): """Test that we can read the state of a HomeKit fan accessory.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_ACTIVE].value = False state = await helper.poll_and_get_state() assert state.state == "off" helper.characteristics[V2_ACTIVE].value = True state = await helper.poll_and_get_state() assert state.state == "on" async def test_v2_turn_on(hass, utcnow): """Test that we can turn a fan on.""" helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "high"}, blocking=True, ) assert helper.characteristics[V2_ACTIVE].value == 1 assert helper.characteristics[V2_ROTATION_SPEED].value == 100 await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "medium"}, blocking=True, ) assert helper.characteristics[V2_ACTIVE].value == 1 assert helper.characteristics[V2_ROTATION_SPEED].value == 50 await hass.services.async_call( "fan", "turn_on", {"entity_id": "fan.testdevice", "speed": "low"}, blocking=True, ) assert helper.characteristics[V2_ACTIVE].value == 1 assert helper.characteristics[V2_ROTATION_SPEED].value == 25 async def test_v2_turn_off(hass, utcnow): """Test that we can turn a fan off.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_ACTIVE].value = 1 await hass.services.async_call( "fan", "turn_off", {"entity_id": "fan.testdevice"}, blocking=True, ) assert helper.characteristics[V2_ACTIVE].value == 0 async def test_v2_set_speed(hass, utcnow): """Test that we set fan speed.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_ACTIVE].value = 1 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "high"}, blocking=True, ) assert helper.characteristics[V2_ROTATION_SPEED].value == 100 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "medium"}, blocking=True, ) assert helper.characteristics[V2_ROTATION_SPEED].value == 50 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "low"}, blocking=True, ) assert helper.characteristics[V2_ROTATION_SPEED].value == 25 await hass.services.async_call( "fan", "set_speed", {"entity_id": "fan.testdevice", "speed": "off"}, blocking=True, ) assert helper.characteristics[V2_ACTIVE].value == 0 async def test_v2_speed_read(hass, utcnow): """Test that we can read a fans oscillation.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_ACTIVE].value = 1 helper.characteristics[V2_ROTATION_SPEED].value = 100 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "high" helper.characteristics[V2_ROTATION_SPEED].value = 50 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "medium" helper.characteristics[V2_ROTATION_SPEED].value = 25 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "low" helper.characteristics[V2_ACTIVE].value = 0 helper.characteristics[V2_ROTATION_SPEED].value = 0 state = await helper.poll_and_get_state() assert state.attributes["speed"] == "off" async def test_v2_set_direction(hass, utcnow): """Test that we can set fan spin direction.""" helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", "set_direction", {"entity_id": "fan.testdevice", "direction": "reverse"}, blocking=True, ) assert helper.characteristics[V2_ROTATION_DIRECTION].value == 1 await hass.services.async_call( "fan", "set_direction", {"entity_id": "fan.testdevice", "direction": "forward"}, blocking=True, ) assert helper.characteristics[V2_ROTATION_DIRECTION].value == 0 async def test_v2_direction_read(hass, utcnow): """Test that we can read a fans oscillation.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_ROTATION_DIRECTION].value = 0 state = await helper.poll_and_get_state() assert state.attributes["direction"] == "forward" helper.characteristics[V2_ROTATION_DIRECTION].value = 1 state = await helper.poll_and_get_state() assert state.attributes["direction"] == "reverse" async def test_v2_oscillate(hass, utcnow): """Test that we can control a fans oscillation.""" helper = await setup_test_component(hass, create_fanv2_service) await hass.services.async_call( "fan", "oscillate", {"entity_id": "fan.testdevice", "oscillating": True}, blocking=True, ) assert helper.characteristics[V2_SWING_MODE].value == 1 await hass.services.async_call( "fan", "oscillate", {"entity_id": "fan.testdevice", "oscillating": False}, blocking=True, ) assert helper.characteristics[V2_SWING_MODE].value == 0 async def test_v2_oscillate_read(hass, utcnow): """Test that we can read a fans oscillation.""" helper = await setup_test_component(hass, create_fanv2_service) helper.characteristics[V2_SWING_MODE].value = 0 state = await helper.poll_and_get_state() assert state.attributes["oscillating"] is False helper.characteristics[V2_SWING_MODE].value = 1 state = await helper.poll_and_get_state() assert state.attributes["oscillating"] is True
apache-2.0
davidmueller13/david_kernel_aosp_flo_6.0
scripts/rt-tester/rt-tester.py
11005
5307
#!/usr/bin/python # # rt-mutex tester # # (C) 2006 Thomas Gleixner <tglx@linutronix.de> # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # import os import sys import getopt import shutil import string # Globals quiet = 0 test = 0 comments = 0 sysfsprefix = "/sys/devices/system/rttest/rttest" statusfile = "/status" commandfile = "/command" # Command opcodes cmd_opcodes = { "schedother" : "1", "schedfifo" : "2", "lock" : "3", "locknowait" : "4", "lockint" : "5", "lockintnowait" : "6", "lockcont" : "7", "unlock" : "8", "signal" : "11", "resetevent" : "98", "reset" : "99", } test_opcodes = { "prioeq" : ["P" , "eq" , None], "priolt" : ["P" , "lt" , None], "priogt" : ["P" , "gt" , None], "nprioeq" : ["N" , "eq" , None], "npriolt" : ["N" , "lt" , None], "npriogt" : ["N" , "gt" , None], "unlocked" : ["M" , "eq" , 0], "trylock" : ["M" , "eq" , 1], "blocked" : ["M" , "eq" , 2], "blockedwake" : ["M" , "eq" , 3], "locked" : ["M" , "eq" , 4], "opcodeeq" : ["O" , "eq" , None], "opcodelt" : ["O" , "lt" , None], "opcodegt" : ["O" , "gt" , None], "eventeq" : ["E" , "eq" , None], "eventlt" : ["E" , "lt" , None], "eventgt" : ["E" , "gt" , None], } # Print usage information def usage(): print "rt-tester.py <-c -h -q -t> <testfile>" print " -c display comments after first command" print " -h help" print " -q quiet mode" print " -t test mode (syntax check)" print " testfile: read test specification from testfile" print " otherwise from stdin" return # Print progress when not in quiet mode def progress(str): if not quiet: print str # Analyse a status value def analyse(val, top, arg): intval = int(val) if top[0] == "M": intval = intval / (10 ** int(arg)) intval = intval % 10 argval = top[2] elif top[0] == "O": argval = int(cmd_opcodes.get(arg, arg)) else: argval = int(arg) # progress("%d %s %d" %(intval, top[1], argval)) if top[1] == "eq" and intval == argval: return 1 if top[1] == "lt" and intval < argval: return 1 if top[1] == "gt" and intval > argval: return 1 return 0 # Parse the commandline try: (options, arguments) = getopt.getopt(sys.argv[1:],'chqt') except getopt.GetoptError, ex: usage() sys.exit(1) # Parse commandline options for option, value in options: if option == "-c": comments = 1 elif option == "-q": quiet = 1 elif option == "-t": test = 1 elif option == '-h': usage() sys.exit(0) # Select the input source if arguments: try: fd = open(arguments[0]) except Exception,ex: sys.stderr.write("File not found %s\n" %(arguments[0])) sys.exit(1) else: fd = sys.stdin linenr = 0 # Read the test patterns while 1: linenr = linenr + 1 line = fd.readline() if not len(line): break line = line.strip() parts = line.split(":") if not parts or len(parts) < 1: continue if len(parts[0]) == 0: continue if parts[0].startswith("#"): if comments > 1: progress(line) continue if comments == 1: comments = 2 progress(line) cmd = parts[0].strip().lower() opc = parts[1].strip().lower() tid = parts[2].strip() dat = parts[3].strip() try: # Test or wait for a status value if cmd == "t" or cmd == "w": testop = test_opcodes[opc] fname = "%s%s%s" %(sysfsprefix, tid, statusfile) if test: print fname continue while 1: query = 1 fsta = open(fname, 'r') status = fsta.readline().strip() fsta.close() stat = status.split(",") for s in stat: s = s.strip() if s.startswith(testop[0]): # Separate status value val = s[2:].strip() query = analyse(val, testop, dat) break if query or cmd == "t": break progress(" " + status) if not query: sys.stderr.write("Test failed in line %d\n" %(linenr)) sys.exit(1) # Issue a command to the tester elif cmd == "c": cmdnr = cmd_opcodes[opc] # Build command string and sys filename cmdstr = "%s:%s" %(cmdnr, dat) fname = "%s%s%s" %(sysfsprefix, tid, commandfile) if test: print fname continue fcmd = open(fname, 'w') fcmd.write(cmdstr) fcmd.close() except Exception,ex: sys.stderr.write(str(ex)) sys.stderr.write("\nSyntax error in line %d\n" %(linenr)) if not test: fd.close() sys.exit(1) # Normal exit pass print "Pass" sys.exit(0)
gpl-2.0
lgarren/spack
var/spack/repos/builtin/packages/r-yaml/package.py
1
1687
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/llnl/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## from spack import * class RYaml(RPackage): """This package implements the libyaml YAML 1.1 parser and emitter (http://pyyaml.org/wiki/LibYAML) for R.""" homepage = "https://cran.r-project.org/web/packages/yaml/index.html" url = "https://cran.r-project.org/src/contrib/yaml_2.1.13.tar.gz" list_url = homepage version('2.1.14', '2de63248e6a122c368f8e4537426e35c') version('2.1.13', 'f2203ea395adaff6bd09134666191d9a')
lgpl-2.1
sujithshankar/systemd-work
tools/xml_helper.py
119
1281
# -*- Mode: python; coding: utf-8; indent-tabs-mode: nil -*- */ # # This file is part of systemd. # # Copyright 2012-2013 Zbigniew Jędrzejewski-Szmek # # systemd is free software; you can redistribute it and/or modify it # under the terms of the GNU Lesser General Public License as published by # the Free Software Foundation; either version 2.1 of the License, or # (at your option) any later version. # # systemd is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public License # along with systemd; If not, see <http://www.gnu.org/licenses/>. from lxml import etree as tree class CustomResolver(tree.Resolver): def resolve(self, url, id, context): if 'custom-entities.ent' in url: return self.resolve_filename('man/custom-entities.ent', context) _parser = tree.XMLParser() _parser.resolvers.add(CustomResolver()) def xml_parse(page): doc = tree.parse(page, _parser) doc.xinclude() return doc def xml_print(xml): return tree.tostring(xml, pretty_print=True, encoding='utf-8')
gpl-2.0
gpitel/pyjs
pygtkweb/demos/049-filelisting-gtm.py
6
5884
#!/usr/bin/env python import os, stat, time import pygtk pygtk.require('2.0') import gtk folderxpm = [ "17 16 7 1", " c #000000", ". c #808000", "X c yellow", "o c #808080", "O c #c0c0c0", "+ c white", "@ c None", "@@@@@@@@@@@@@@@@@", "@@@@@@@@@@@@@@@@@", "@@+XXXX.@@@@@@@@@", "@+OOOOOO.@@@@@@@@", "@+OXOXOXOXOXOXO. ", "@+XOXOXOXOXOXOX. ", "@+OXOXOXOXOXOXO. ", "@+XOXOXOXOXOXOX. ", "@+OXOXOXOXOXOXO. ", "@+XOXOXOXOXOXOX. ", "@+OXOXOXOXOXOXO. ", "@+XOXOXOXOXOXOX. ", "@+OOOOOOOOOOOOO. ", "@ ", "@@@@@@@@@@@@@@@@@", "@@@@@@@@@@@@@@@@@" ] folderpb = gtk.gdk.pixbuf_new_from_xpm_data(folderxpm) filexpm = [ "12 12 3 1", " c #000000", ". c #ffff04", "X c #b2c0dc", "X XXX", "X ...... XXX", "X ...... X", "X . ... X", "X ........ X", "X . .... X", "X ........ X", "X . .. X", "X ........ X", "X . .. X", "X ........ X", "X X" ] filepb = gtk.gdk.pixbuf_new_from_xpm_data(filexpm) class FileListModel(gtk.GenericTreeModel): column_types = (gtk.gdk.Pixbuf, str, long, str, str) column_names = ['Name', 'Size', 'Mode', 'Last Changed'] def __init__(self, dname=None): gtk.GenericTreeModel.__init__(self) if not dname: self.dirname = os.path.expanduser('~') else: self.dirname = os.path.abspath(dname) self.files = [f for f in os.listdir(self.dirname) if f[0] <> '.'] self.files.sort() self.files = ['..'] + self.files return def get_pathname(self, path): filename = self.files[path[0]] return os.path.join(self.dirname, filename) def is_folder(self, path): filename = self.files[path[0]] pathname = os.path.join(self.dirname, filename) filestat = os.stat(pathname) if stat.S_ISDIR(filestat.st_mode): return True return False def get_column_names(self): return self.column_names[:] def on_get_flags(self): return gtk.TREE_MODEL_LIST_ONLY|gtk.TREE_MODEL_ITERS_PERSIST def on_get_n_columns(self): return len(self.column_types) def on_get_column_type(self, n): return self.column_types[n] def on_get_iter(self, path): return self.files[path[0]] def on_get_path(self, rowref): return self.files.index(rowref) def on_get_value(self, rowref, column): fname = os.path.join(self.dirname, rowref) try: filestat = os.stat(fname) except OSError: return None mode = filestat.st_mode if column is 0: if stat.S_ISDIR(mode): return folderpb else: return filepb elif column is 1: return rowref elif column is 2: return filestat.st_size elif column is 3: return oct(stat.S_IMODE(mode)) return time.ctime(filestat.st_mtime) def on_iter_next(self, rowref): try: i = self.files.index(rowref)+1 return self.files[i] except IndexError: return None def on_iter_children(self, rowref): if rowref: return None return self.files[0] def on_iter_has_child(self, rowref): return False def on_iter_n_children(self, rowref): if rowref: return 0 return len(self.files) def on_iter_nth_child(self, rowref, n): if rowref: return None try: return self.files[n] except IndexError: return None def on_iter_parent(child): return None class GenericTreeModelExample: def delete_event(self, widget, event, data=None): gtk.main_quit() return False def __init__(self): # Create a new window self.window = gtk.Window(gtk.WINDOW_TOPLEVEL) self.window.set_size_request(300, 200) self.window.connect("delete_event", self.delete_event) self.listmodel = FileListModel() # create the TreeView self.treeview = gtk.TreeView() # create the TreeViewColumns to display the data column_names = self.listmodel.get_column_names() self.tvcolumn = [None] * len(column_names) cellpb = gtk.CellRendererPixbuf() self.tvcolumn[0] = gtk.TreeViewColumn(column_names[0], cellpb, pixbuf=0) cell = gtk.CellRendererText() self.tvcolumn[0].pack_start(cell, False) self.tvcolumn[0].add_attribute(cell, 'text', 1) self.treeview.append_column(self.tvcolumn[0]) for n in range(1, len(column_names)): cell = gtk.CellRendererText() if n == 1: cell.set_property('xalign', 1.0) self.tvcolumn[n] = gtk.TreeViewColumn(column_names[n], cell, text=n+1) self.treeview.append_column(self.tvcolumn[n]) self.treeview.connect('row-activated', self.open_file) self.scrolledwindow = gtk.ScrolledWindow() self.scrolledwindow.add(self.treeview) self.window.add(self.scrolledwindow) self.treeview.set_model(self.listmodel) self.window.set_title(self.listmodel.dirname) self.window.show_all() def open_file(self, treeview, path, column): model = treeview.get_model() if model.is_folder(path): pathname = model.get_pathname(path) new_model = FileListModel(pathname) self.window.set_title(new_model.dirname) treeview.set_model(new_model) return def main(): gtk.main() if __name__ == "__main__": gtmexample = GenericTreeModelExample() main()
apache-2.0
peterfpeterson/mantid
Framework/PythonInterface/plugins/algorithms/WorkflowAlgorithms/USANSReduction.py
3
15000
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + #pylint: disable=no-init,invalid-name from mantid.simpleapi import * from mantid.api import * from mantid.kernel import * import math import numpy import sys import os import json class USANSReduction(PythonAlgorithm): wl_list = None data_files = None total_points = None q_output = None iq_output = None iq_err_output = None def category(self): return "SANS" def seeAlso(self): return [ "USANSSimulation" ] def name(self): return "USANSReduction" def summary(self): return "Perform USANS data reduction" def PyInit(self): arrvalidator = IntArrayBoundedValidator(lower=0) self.declareProperty(IntArrayProperty("RunNumbers", values=[0], validator=arrvalidator, direction=Direction.Input), "Runs to reduce") self.declareProperty("EmptyRun", '', "Run number for the empty run") #TODO: Mask workspace self.declareProperty(MatrixWorkspaceProperty("OutputWorkspace", "", Direction.Output), "Output workspace") #pylint: disable= too-few-public-methods class DataFile(object): #pylint: disable= too-many-arguments def __init__(self, workspace, monitor, empty, empty_monitor, is_scan=False, max_index=1): self.workspace = workspace self.monitor = monitor self.empty = empty self.empty_monitor = empty_monitor self.is_scan = is_scan self.max_index = max_index def _find_monitors(self, run): """ Find a monitor file for testing purposes. @param run: run number """ f_list = FileFinder.findRuns("USANS_%s" % run) if len(f_list)>0: root, ext = os.path.splitext(f_list[0]) return "%s_monitors%s" % (root, ext) else: Logger("USANSReduction").error("Could not find monitors for run %s" % run) return None def _load_data(self): """ Load data and go through each file to determine how many points will have to be dealt with. """ # Load the empty run empty_run = self.getProperty("EmptyRun").value Load(Filename='USANS_%s' % empty_run, LoadMonitors=True, OutputWorkspace='__empty') # A simple Load doesn't load the instrument properties correctly with our test file # Reload the instrument for now LoadInstrument(Workspace='__empty', InstrumentName='USANS', RewriteSpectraMap=False) # For testing, we may have to load the monitors by hand if not mtd.doesExist('__empty_monitors'): Load(Filename=self._find_monitors(empty_run), OutputWorkspace='__empty_monitors') # Get the wavelength peak positions wl_cfg_str = mtd['__empty'].getInstrument().getStringParameter("wavelength_config")[0] self.wl_list = json.loads(wl_cfg_str) # Get the runs to reduce run_list = self.getProperty("RunNumbers").value # Total number of measurements per wavelength peak total_points = 0 # Load all files so we can determine how many points we have self.data_files = [] for item in run_list: ws_name = '__sample_%s' % item Load(Filename='USANS_%s' % item, LoadMonitors=True, OutputWorkspace=ws_name) # For testing, we may have to load the monitors by hand if not mtd.doesExist(ws_name+'_monitors'): Load(Filename=self._find_monitors(empty_run), OutputWorkspace=ws_name+'_monitors') # Determine whether we are putting together multiple files or whether # we will be looking for scan_index markers. is_scan = False max_index = 1 if mtd[ws_name].getRun().hasProperty('scan_index'): scan_index = mtd[ws_name].getRun().getProperty("scan_index").value if len(scan_index)>0: _max_index = scan_index.getStatistics().maximum if _max_index>0: max_index = _max_index is_scan = True # Append the info for when we do the reduction self.data_files.append(self.DataFile(workspace=ws_name, monitor=ws_name+'_monitors', empty='__empty', empty_monitor='__empty_monitors', is_scan=is_scan, max_index=max_index)) total_points += max_index return total_points def _process_data_file(self, file_info, index_offset): # Go through each point for point in range(file_info.max_index): # If we are in a scan, select the current scan point if file_info.is_scan: ws=FilterByLogValue(InputWorkspace=mtd[file_info.workspace], LogName='scan_index', MinimumValue=point, MaximumValue=point, LogBoundary='Left') else: ws = mtd[file_info.workspace] # Get the two-theta value for this point if ws.getRun().getProperty("two_theta").type=='number': two_theta = ws.getRun().getProperty("two_theta").value else: two_theta = ws.getRun().getProperty("two_theta").timeAverageValue() # Loop through the wavelength peaks for this point for i_wl in range(len(self.wl_list)): wl = self.wl_list[i_wl]['wavelength'] # Note: TOF value is given by tof = 30.0/0.0039560*wl q = 6.28*math.sin(two_theta)/wl # Get I(q) for each wavelength peak i_q = self._get_intensity(mtd[file_info.workspace], mtd[file_info.empty], mtd[file_info.monitor], mtd[file_info.empty_monitor], tof_min=self.wl_list[i_wl]['t_min'], tof_max=self.wl_list[i_wl]['t_max']) # Store the reduced data try: self.q_output[i_wl][point+index_offset] = q self.iq_output[i_wl][point+index_offset] = i_q.dataY(0)[0] self.iq_err_output[i_wl][point+index_offset] = i_q.dataE(0)[0] except: Logger("USANSReduction").error("Exception caught for " + "%s on peak %s, point %s. Offset=%s" % (file_info.workspace, i_wl, point, index_offset)) Logger("USANSReduction").error("Array: " + "%s x %s Data: %s" % (len(self.wl_list), self.total_points, file_info.max_index)) Logger("USANSReduction").error(sys.exc_info()[1]) return file_info.max_index def PyExec(self): # Placeholder for the data file information self.data_files = [] # Total number of measurements per wavelength peak self.total_points = self._load_data() # Create an array to store the I(q) points n_wl = len(self.wl_list) Logger("USANSReduction").notice("USANS reduction for %g peaks with %g point(s) each" % (n_wl, self.total_points)) self.q_output = numpy.zeros(shape=(n_wl, self.total_points)) self.iq_output = numpy.zeros(shape=(n_wl, self.total_points)) self.iq_err_output = numpy.zeros(shape=(n_wl, self.total_points)) index_offset = 0 for item in self.data_files: index_offset += self._process_data_file(item, index_offset) # Create a workspace for each peak self._aggregate() def _aggregate(self): """ Create a workspace for each peak #TODO: stitch the data instead of just dumping them in a workspace """ x_all = [] y_all = [] e_all = [] def compare(p1,p2): if p2[0]==p1[0]: return 0 return -1 if p2[0]>p1[0] else 1 for i_wl in range(len(self.wl_list)): x_all.extend(self.q_output[i_wl]) y_all.extend(self.iq_output[i_wl]) e_all.extend(self.iq_err_output[i_wl]) x = self.q_output[i_wl] y = self.iq_output[i_wl] e = self.iq_err_output[i_wl] # Sort the I(q) point just in case we got them in the wrong order zipped = list(zip(x,y,e)) combined = sorted(zipped, compare) x,y,e = list(zip(*combined)) wl = self.wl_list[i_wl]['wavelength'] CreateWorkspace(DataX=x, DataY=y, DataE=e, NSpec=1, UnitX='MomentumTransfer', OutputWorkspace='iq_%1.2f' % wl) # Sort the I(q) point just in case we got them in the wrong order zipped = list(zip(x_all,y_all,e_all)) combined = sorted(zipped, compare) x,y,e = list(zip(*combined)) # Create the combined output workspace output_ws_name = self.getPropertyValue("OutputWorkspace") out_ws = CreateWorkspace(DataX=x, DataY=y, DataE=e, NSpec=1, UnitX='MomentumTransfer', OutputWorkspace=output_ws_name) self.setProperty("OutputWorkspace", out_ws) #pylint: disable=too-many-arguments def _get_intensity(self, sample, empty, sample_monitor, empty_monitor, tof_min, tof_max): # Number of pixels we are dealing with nspecs = sample.getNumberHistograms() # Apply mask # Get the normalized empty run counts in the transmission detector __empty_summed = _execute('SumSpectra', InputWorkspace=str(empty), StartWorkspaceIndex=nspecs/2, EndWorkspaceIndex=nspecs-1, OutputWorkspace='__empty_summed') __point = _execute('CropWorkspace', InputWorkspace=__empty_summed, XMin=tof_min, XMax=tof_max, OutputWorkspace='__point') __empty_count = _execute('Integration', InputWorkspace=__point, OutputWorkspace='__empty_count') __point = _execute('CropWorkspace', InputWorkspace=str(empty_monitor), XMin=tof_min, XMax=tof_max, OutputWorkspace='__point') __empty_monitor_count = _execute('Integration', InputWorkspace=__point, OutputWorkspace='__empty_monitor_count') __normalized_empty = _execute('Divide', LHSWorkspace=__empty_count, RHSWorkspace=__empty_monitor_count, OutputWorkspace='__normalized_empty') # Get the normalized sample counts in the transmission detector __trans_summed = _execute('SumSpectra', InputWorkspace=sample, StartWorkspaceIndex=nspecs/2, EndWorkspaceIndex=nspecs-1, OutputWorkspace='__trans_summed') __point = _execute('CropWorkspace', InputWorkspace=__trans_summed, XMin=tof_min, XMax=tof_max, OutputWorkspace='__point') __trans_count = _execute('Integration', InputWorkspace=__point, OutputWorkspace='__trans_count') __point = _execute('CropWorkspace', InputWorkspace=sample_monitor, XMin=tof_min, XMax=tof_max, OutputWorkspace='__point') #__monitor_count = _execute('Integration', InputWorkspace=__point, # OutputWorkspace='__monitor_count') # The monitor count normalization cancels out when doing the transmission correction # of the scattering signal below __normalized_sample_trans = __trans_count#/__monitor_count # Transmission workspace transmission = _execute('Divide', LHSWorkspace=__normalized_sample_trans, RHSWorkspace=__normalized_empty, OutputWorkspace='transmission') # Scattering signal __signal_summed = _execute('SumSpectra', InputWorkspace=sample, StartWorkspaceIndex=0, EndWorkspaceIndex=nspecs/2, OutputWorkspace='__signal_summed') __point = _execute('CropWorkspace', InputWorkspace=__signal_summed, XMin=tof_min, XMax=tof_max, OutputWorkspace='__point') __signal_count = _execute('Integration', InputWorkspace=__point, OutputWorkspace='__signal_count') # The monitor count normalization cancels out when doing the transmission correction __signal = __signal_count#/__monitor_count intensity = _execute('Divide', LHSWorkspace=__signal, RHSWorkspace=transmission, OutputWorkspace='intensity') return intensity def _execute(algorithm_name, **parameters): alg = AlgorithmManager.create(algorithm_name) alg.initialize() alg.setChild(True) for key, value in parameters.items(): if value is None: Logger("USANSReduction").error("Trying to set %s=None" % key) if alg.existsProperty(key): if isinstance(value, str): alg.setPropertyValue(key, value) else: alg.setProperty(key, value) try: alg.execute() if alg.existsProperty("OutputWorkspace"): return alg.getProperty("OutputWorkspace").value except: Logger("USANSReduction").error("Error executing [%s]" % str(alg)) Logger("USANSReduction").error(str(sys.exc_info()[1])) return alg ############################################################################################# AlgorithmFactory.subscribe(USANSReduction())
gpl-3.0
ganeshrn/ansible
lib/ansible/module_utils/facts/network/generic_bsd.py
58
12273
# This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. from __future__ import (absolute_import, division, print_function) __metaclass__ = type import re import socket import struct from ansible.module_utils.facts.network.base import Network class GenericBsdIfconfigNetwork(Network): """ This is a generic BSD subclass of Network using the ifconfig command. It defines - interfaces (a list of interface names) - interface_<name> dictionary of ipv4, ipv6, and mac address information. - all_ipv4_addresses and all_ipv6_addresses: lists of all configured addresses. """ platform = 'Generic_BSD_Ifconfig' def populate(self, collected_facts=None): network_facts = {} ifconfig_path = self.module.get_bin_path('ifconfig') if ifconfig_path is None: return network_facts route_path = self.module.get_bin_path('route') if route_path is None: return network_facts default_ipv4, default_ipv6 = self.get_default_interfaces(route_path) interfaces, ips = self.get_interfaces_info(ifconfig_path) interfaces = self.detect_type_media(interfaces) self.merge_default_interface(default_ipv4, interfaces, 'ipv4') self.merge_default_interface(default_ipv6, interfaces, 'ipv6') network_facts['interfaces'] = sorted(list(interfaces.keys())) for iface in interfaces: network_facts[iface] = interfaces[iface] network_facts['default_ipv4'] = default_ipv4 network_facts['default_ipv6'] = default_ipv6 network_facts['all_ipv4_addresses'] = ips['all_ipv4_addresses'] network_facts['all_ipv6_addresses'] = ips['all_ipv6_addresses'] return network_facts def detect_type_media(self, interfaces): for iface in interfaces: if 'media' in interfaces[iface]: if 'ether' in interfaces[iface]['media'].lower(): interfaces[iface]['type'] = 'ether' return interfaces def get_default_interfaces(self, route_path): # Use the commands: # route -n get default # route -n get -inet6 default # to find out the default outgoing interface, address, and gateway command = dict(v4=[route_path, '-n', 'get', 'default'], v6=[route_path, '-n', 'get', '-inet6', 'default']) interface = dict(v4={}, v6={}) for v in 'v4', 'v6': if v == 'v6' and not socket.has_ipv6: continue rc, out, err = self.module.run_command(command[v]) if not out: # v6 routing may result in # RTNETLINK answers: Invalid argument continue for line in out.splitlines(): words = line.strip().split(': ') # Collect output from route command if len(words) > 1: if words[0] == 'interface': interface[v]['interface'] = words[1] if words[0] == 'gateway': interface[v]['gateway'] = words[1] # help pick the right interface address on OpenBSD if words[0] == 'if address': interface[v]['address'] = words[1] # help pick the right interface address on NetBSD if words[0] == 'local addr': interface[v]['address'] = words[1] return interface['v4'], interface['v6'] def get_interfaces_info(self, ifconfig_path, ifconfig_options='-a'): interfaces = {} current_if = {} ips = dict( all_ipv4_addresses=[], all_ipv6_addresses=[], ) # FreeBSD, DragonflyBSD, NetBSD, OpenBSD and macOS all implicitly add '-a' # when running the command 'ifconfig'. # Solaris must explicitly run the command 'ifconfig -a'. rc, out, err = self.module.run_command([ifconfig_path, ifconfig_options]) for line in out.splitlines(): if line: words = line.split() if words[0] == 'pass': continue elif re.match(r'^\S', line) and len(words) > 3: current_if = self.parse_interface_line(words) interfaces[current_if['device']] = current_if elif words[0].startswith('options='): self.parse_options_line(words, current_if, ips) elif words[0] == 'nd6': self.parse_nd6_line(words, current_if, ips) elif words[0] == 'ether': self.parse_ether_line(words, current_if, ips) elif words[0] == 'media:': self.parse_media_line(words, current_if, ips) elif words[0] == 'status:': self.parse_status_line(words, current_if, ips) elif words[0] == 'lladdr': self.parse_lladdr_line(words, current_if, ips) elif words[0] == 'inet': self.parse_inet_line(words, current_if, ips) elif words[0] == 'inet6': self.parse_inet6_line(words, current_if, ips) elif words[0] == 'tunnel': self.parse_tunnel_line(words, current_if, ips) else: self.parse_unknown_line(words, current_if, ips) return interfaces, ips def parse_interface_line(self, words): device = words[0][0:-1] current_if = {'device': device, 'ipv4': [], 'ipv6': [], 'type': 'unknown'} current_if['flags'] = self.get_options(words[1]) if 'LOOPBACK' in current_if['flags']: current_if['type'] = 'loopback' current_if['macaddress'] = 'unknown' # will be overwritten later if len(words) >= 5: # Newer FreeBSD versions current_if['metric'] = words[3] current_if['mtu'] = words[5] else: current_if['mtu'] = words[3] return current_if def parse_options_line(self, words, current_if, ips): # Mac has options like this... current_if['options'] = self.get_options(words[0]) def parse_nd6_line(self, words, current_if, ips): # FreeBSD has options like this... current_if['options'] = self.get_options(words[1]) def parse_ether_line(self, words, current_if, ips): current_if['macaddress'] = words[1] current_if['type'] = 'ether' def parse_media_line(self, words, current_if, ips): # not sure if this is useful - we also drop information current_if['media'] = words[1] if len(words) > 2: current_if['media_select'] = words[2] if len(words) > 3: current_if['media_type'] = words[3][1:] if len(words) > 4: current_if['media_options'] = self.get_options(words[4]) def parse_status_line(self, words, current_if, ips): current_if['status'] = words[1] def parse_lladdr_line(self, words, current_if, ips): current_if['lladdr'] = words[1] def parse_inet_line(self, words, current_if, ips): # netbsd show aliases like this # lo0: flags=8049<UP,LOOPBACK,RUNNING,MULTICAST> mtu 33184 # inet 127.0.0.1 netmask 0xff000000 # inet alias 127.1.1.1 netmask 0xff000000 if words[1] == 'alias': del words[1] address = {'address': words[1]} # cidr style ip address (eg, 127.0.0.1/24) in inet line # used in netbsd ifconfig -e output after 7.1 if '/' in address['address']: ip_address, cidr_mask = address['address'].split('/') address['address'] = ip_address netmask_length = int(cidr_mask) netmask_bin = (1 << 32) - (1 << 32 >> int(netmask_length)) address['netmask'] = socket.inet_ntoa(struct.pack('!L', netmask_bin)) if len(words) > 5: address['broadcast'] = words[3] else: # deal with hex netmask if re.match('([0-9a-f]){8}', words[3]) and len(words[3]) == 8: words[3] = '0x' + words[3] if words[3].startswith('0x'): address['netmask'] = socket.inet_ntoa(struct.pack('!L', int(words[3], base=16))) else: # otherwise assume this is a dotted quad address['netmask'] = words[3] # calculate the network address_bin = struct.unpack('!L', socket.inet_aton(address['address']))[0] netmask_bin = struct.unpack('!L', socket.inet_aton(address['netmask']))[0] address['network'] = socket.inet_ntoa(struct.pack('!L', address_bin & netmask_bin)) if 'broadcast' not in address: # broadcast may be given or we need to calculate if len(words) > 5: address['broadcast'] = words[5] else: address['broadcast'] = socket.inet_ntoa(struct.pack('!L', address_bin | (~netmask_bin & 0xffffffff))) # add to our list of addresses if not words[1].startswith('127.'): ips['all_ipv4_addresses'].append(address['address']) current_if['ipv4'].append(address) def parse_inet6_line(self, words, current_if, ips): address = {'address': words[1]} # using cidr style addresses, ala NetBSD ifconfig post 7.1 if '/' in address['address']: ip_address, cidr_mask = address['address'].split('/') address['address'] = ip_address address['prefix'] = cidr_mask if len(words) > 5: address['scope'] = words[5] else: if (len(words) >= 4) and (words[2] == 'prefixlen'): address['prefix'] = words[3] if (len(words) >= 6) and (words[4] == 'scopeid'): address['scope'] = words[5] localhost6 = ['::1', '::1/128', 'fe80::1%lo0'] if address['address'] not in localhost6: ips['all_ipv6_addresses'].append(address['address']) current_if['ipv6'].append(address) def parse_tunnel_line(self, words, current_if, ips): current_if['type'] = 'tunnel' def parse_unknown_line(self, words, current_if, ips): # we are going to ignore unknown lines here - this may be # a bad idea - but you can override it in your subclass pass # TODO: these are module scope static function candidates # (most of the class is really...) def get_options(self, option_string): start = option_string.find('<') + 1 end = option_string.rfind('>') if (start > 0) and (end > 0) and (end > start + 1): option_csv = option_string[start:end] return option_csv.split(',') else: return [] def merge_default_interface(self, defaults, interfaces, ip_type): if 'interface' not in defaults: return if not defaults['interface'] in interfaces: return ifinfo = interfaces[defaults['interface']] # copy all the interface values across except addresses for item in ifinfo: if item != 'ipv4' and item != 'ipv6': defaults[item] = ifinfo[item] ipinfo = [] if 'address' in defaults: ipinfo = [x for x in ifinfo[ip_type] if x['address'] == defaults['address']] if len(ipinfo) == 0: ipinfo = ifinfo[ip_type] if len(ipinfo) > 0: for item in ipinfo[0]: defaults[item] = ipinfo[0][item]
gpl-3.0
kohnle-lernmodule/exeLearningPlus1_04
twisted/cred/perspective.py
17
5793
# -*- test-case-name: twisted.test.test_cred -*- # Copyright (c) 2001-2004 Twisted Matrix Laboratories. # See LICENSE for details. """ This module is for domain-specific representations of users. Stability: semi-stable Future Plans: Errors may be changed to unify reporting in twisted.cred. """ from twisted.python import log, reflect, components from twisted.cred import identity from zope import interface import types class IPerspective(components.Interface): def setIdentityName(self, name): """""" def setIdentity(self, identity): """""" def makeIdentity(self, password): """""" def getPerspectiveName(self): """""" def getService(self): """""" def setService(self, service): """""" def getIdentityRequest(self): """""" def attached(self, reference, identity): """""" def detached(self, reference, identity): """""" def setCached(self): """""" def isCached(self): """""" class Perspective: """I am an Identity's view onto a service. I am the interface through which most 'external' code should interact with a service; I represent the actions a user may perform upon a service, and the state associated with that user for that service. """ interface.implements(IPerspective) _service_cached = 0 # Has my service cached me from a loaded store, or do I live in memory usually? def __init__(self, perspectiveName, identityName="Nobody"): """Create me. I require a name for myself and a reference to the service I participate in. (My identity name will be 'Nobody' by default, which will normally not resolve.) """ if not isinstance(perspectiveName, types.StringType): raise TypeError("Expected string, got %s."% perspectiveName) if not isinstance(identityName, types.StringType): raise TypeError("Expected string, got %s."% identityName) self.perspectiveName = perspectiveName self.identityName = identityName def setIdentityName(self, name): if not isinstance(name, types.StringType): raise TypeError self.identityName = name def setIdentity(self, ident): """Determine which identity I connect to. """ if not isinstance(ident, identity.Identity): raise TypeError self.setIdentityName(ident.name) def makeIdentity(self, password): """Make an identity from this perspective with a password. This is a utility method, which can be used in circumstances where the distinction between Perspective and Identity is weak, such as single-Service servers. """ if not isinstance(password, types.StringType): raise TypeError ident = self.service.authorizer.createIdentity(self.perspectiveName) # ident = identity.Identity(self.perspectiveName, self.service.application) self.setIdentity(ident) ident.setPassword(password) ident.addKeyForPerspective(self) ident.save() return ident def getPerspectiveName(self): """Return the unique name of this perspective. This will return a value such that self.service.getPerspectiveNamed(value) is self. (XXX: That's assuming I have been addPerspective'd to my service.) """ return self.perspectiveName def getService(self): """Return a service. """ return self.service def setService(self, service): """Change what service I am a part of. """ self.service = service def setCached(self): self._service_cached = 1 def isCached(self): return self._service_cached def getIdentityRequest(self): """Request my identity. """ return (self.service.authorizer. getIdentityRequest(self.identityName)) _attachedCount = 0 def attached(self, reference, identity): """Called when a remote reference is 'attached' to me. After being authorized, a remote actor can attach to me through its identity. This call will be made when that happens, and the return value of this method will be used as the _actual_ perspective to which I am attached. Note that the symmetric call, detached, will be made on whatever this method returns, _not_ on me. Therefore, by default I return 'self'. """ log.msg('attached [%s]' % reflect.qual(self.__class__)) self._attachedCount = self._attachedCount + 1 if self._attachedCount == 1: self.service.cachePerspective(self) else: log.msg(" (multiple references attached: %s)" % self._attachedCount) return self def detached(self, reference, identity): """Called when a broker is 'detached' from me. See 'attached'. When a remote actor disconnects (or times out, for example, with HTTP), this is called in order to indicate that the reference associated with that peer is no longer attached to this perspective. """ log.msg('detached [%s]' % reflect.qual(self.__class__)) self._attachedCount = self._attachedCount - 1 if self._attachedCount <= 0: self.service.uncachePerspective(self) if self._attachedCount < 0: log.msg(" (Weird stuff: attached count = %s)" % self._attachedCount) else: log.msg(" (multiple references attached: %s)" % self._attachedCount) return self components.backwardsCompatImplements(Perspective)
gpl-2.0
hoangt/gem5v
src/arch/x86/isa/insts/x87/stack_management/stack_control.py
91
2162
# Copyright (c) 2007 The Hewlett-Packard Development Company # All rights reserved. # # The license below extends only to copyright in the software and shall # not be construed as granting a license to any other intellectual # property including but not limited to intellectual property relating # to a hardware implementation of the functionality of the software # licensed hereunder. You may use the software subject to the license # terms below provided that you ensure that this notice is replicated # unmodified and in its entirety in all distributions of the software, # modified or unmodified, in source code or in binary form. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer; # redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution; # neither the name of the copyright holders nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # # Authors: Gabe Black microcode = ''' # FDECSTP # FINCSTP '''
bsd-3-clause
mensler/ansible
lib/ansible/modules/network/f5/bigip_virtual_server.py
66
29027
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2015, Etienne Carriere <etienne.carriere@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. ANSIBLE_METADATA = { 'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.0' } DOCUMENTATION = ''' --- module: bigip_virtual_server short_description: "Manages F5 BIG-IP LTM virtual servers" description: - "Manages F5 BIG-IP LTM virtual servers via iControl SOAP API" version_added: "2.1" author: - Etienne Carriere (@Etienne-Carriere) - Tim Rupp (@caphrim007) notes: - "Requires BIG-IP software version >= 11" - "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)" - "Best run as a local_action in your playbook" requirements: - bigsuds options: state: description: - Virtual Server state - Absent, delete the VS if present - C(present) (and its synonym enabled), create if needed the VS and set state to enabled - C(disabled), create if needed the VS and set state to disabled required: false default: present choices: - present - absent - enabled - disabled aliases: [] partition: description: - Partition required: false default: 'Common' name: description: - Virtual server name required: true aliases: - vs destination: description: - Destination IP of the virtual server (only host is currently supported). Required when state=present and vs does not exist. required: true aliases: - address - ip port: description: - Port of the virtual server. Required when state=present and vs does not exist. If you specify a value for this field, it must be a number between 0 and 65535. required: false default: None all_profiles: description: - List of all Profiles (HTTP,ClientSSL,ServerSSL,etc) that must be used by the virtual server required: false default: None all_policies: description: - List of all policies enabled for the virtual server. required: false default: None version_added: "2.3" all_rules: version_added: "2.2" description: - List of rules to be applied in priority order required: false default: None enabled_vlans: version_added: "2.2" description: - List of vlans to be enabled. When a VLAN named C(ALL) is used, all VLANs will be allowed. required: false default: None pool: description: - Default pool for the virtual server required: false default: None snat: description: - Source network address policy required: false choices: - None - Automap - Name of a SNAT pool (eg "/Common/snat_pool_name") to enable SNAT with the specific pool default: None default_persistence_profile: description: - Default Profile which manages the session persistence required: false default: None fallback_persistence_profile: description: - Specifies the persistence profile you want the system to use if it cannot use the specified default persistence profile. required: false default: None version_added: "2.3" route_advertisement_state: description: - Enable route advertisement for destination required: false default: disabled version_added: "2.3" description: description: - Virtual server description required: false default: None extends_documentation_fragment: f5 ''' EXAMPLES = ''' - name: Add virtual server bigip_virtual_server: server: lb.mydomain.net user: admin password: secret state: present partition: MyPartition name: myvirtualserver destination: "{{ ansible_default_ipv4['address'] }}" port: 443 pool: "{{ mypool }}" snat: Automap description: Test Virtual Server all_profiles: - http - clientssl enabled_vlans: - /Common/vlan2 delegate_to: localhost - name: Modify Port of the Virtual Server bigip_virtual_server: server: lb.mydomain.net user: admin password: secret state: present partition: MyPartition name: myvirtualserver port: 8080 delegate_to: localhost - name: Delete virtual server bigip_virtual_server: server: lb.mydomain.net user: admin password: secret state: absent partition: MyPartition name: myvirtualserver delegate_to: localhost ''' RETURN = ''' --- deleted: description: Name of a virtual server that was deleted returned: changed type: string sample: "my-virtual-server" ''' # map of state values STATES = { 'enabled': 'STATE_ENABLED', 'disabled': 'STATE_DISABLED' } STATUSES = { 'enabled': 'SESSION_STATUS_ENABLED', 'disabled': 'SESSION_STATUS_DISABLED', 'offline': 'SESSION_STATUS_FORCED_DISABLED' } def vs_exists(api, vs): # hack to determine if pool exists result = False try: api.LocalLB.VirtualServer.get_object_status(virtual_servers=[vs]) result = True except bigsuds.OperationFailed as e: if "was not found" in str(e): result = False else: # genuine exception raise return result def vs_create(api, name, destination, port, pool, profiles): if profiles: _profiles = [] for profile in profiles: _profiles.append( dict( profile_context='PROFILE_CONTEXT_TYPE_ALL', profile_name=profile ) ) else: _profiles = [{'profile_context': 'PROFILE_CONTEXT_TYPE_ALL', 'profile_name': 'tcp'}] # a bit of a hack to handle concurrent runs of this module. # even though we've checked the vs doesn't exist, # it may exist by the time we run create_vs(). # this catches the exception and does something smart # about it! try: api.LocalLB.VirtualServer.create( definitions=[{'name': [name], 'address': [destination], 'port': port, 'protocol': 'PROTOCOL_TCP'}], wildmasks=['255.255.255.255'], resources=[{'type': 'RESOURCE_TYPE_POOL', 'default_pool_name': pool}], profiles=[_profiles]) created = True return created except bigsuds.OperationFailed as e: raise Exception('Error on creating Virtual Server : %s' % e) def vs_remove(api, name): api.LocalLB.VirtualServer.delete_virtual_server( virtual_servers=[name] ) def get_rules(api, name): return api.LocalLB.VirtualServer.get_rule( virtual_servers=[name] )[0] def set_rules(api, name, rules_list): updated = False if rules_list is None: return False rules_list = list(enumerate(rules_list)) try: current_rules = [(x['priority'], x['rule_name']) for x in get_rules(api, name)] to_add_rules = [] for i, x in rules_list: if (i, x) not in current_rules: to_add_rules.append({'priority': i, 'rule_name': x}) to_del_rules = [] for i, x in current_rules: if (i, x) not in rules_list: to_del_rules.append({'priority': i, 'rule_name': x}) if len(to_del_rules) > 0: api.LocalLB.VirtualServer.remove_rule( virtual_servers=[name], rules=[to_del_rules] ) updated = True if len(to_add_rules) > 0: api.LocalLB.VirtualServer.add_rule( virtual_servers=[name], rules=[to_add_rules] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting rules : %s' % e) def get_profiles(api, name): return api.LocalLB.VirtualServer.get_profile( virtual_servers=[name] )[0] def set_profiles(api, name, profiles_list): updated = False try: if profiles_list is None: return False profiles_list = list(profiles_list) current_profiles = list(map(lambda x: x['profile_name'], get_profiles(api, name))) to_add_profiles = [] for x in profiles_list: if x not in current_profiles: to_add_profiles.append({'profile_context': 'PROFILE_CONTEXT_TYPE_ALL', 'profile_name': x}) to_del_profiles = [] for x in current_profiles: if (x not in profiles_list) and (x != "/Common/tcp"): to_del_profiles.append({'profile_context': 'PROFILE_CONTEXT_TYPE_ALL', 'profile_name': x}) if len(to_del_profiles) > 0: api.LocalLB.VirtualServer.remove_profile( virtual_servers=[name], profiles=[to_del_profiles] ) updated = True if len(to_add_profiles) > 0: api.LocalLB.VirtualServer.add_profile( virtual_servers=[name], profiles=[to_add_profiles] ) updated = True current_profiles = list(map(lambda x: x['profile_name'], get_profiles(api, name))) if len(current_profiles) == 0: raise F5ModuleError( "Virtual servers must has at least one profile" ) return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting profiles : %s' % e) def get_policies(api, name): return api.LocalLB.VirtualServer.get_content_policy( virtual_servers=[name] )[0] def set_policies(api, name, policies_list): updated = False try: if policies_list is None: return False policies_list = list(policies_list) current_policies = get_policies(api, name) to_add_policies = [] for x in policies_list: if x not in current_policies: to_add_policies.append(x) to_del_policies = [] for x in current_policies: if x not in policies_list: to_del_policies.append(x) if len(to_del_policies) > 0: api.LocalLB.VirtualServer.remove_content_policy( virtual_servers=[name], policies=[to_del_policies] ) updated = True if len(to_add_policies) > 0: api.LocalLB.VirtualServer.add_content_policy( virtual_servers=[name], policies=[to_add_policies] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting policies : %s' % e) def get_vlan(api, name): return api.LocalLB.VirtualServer.get_vlan( virtual_servers=[name] )[0] def set_enabled_vlans(api, name, vlans_enabled_list): updated = False to_add_vlans = [] try: if vlans_enabled_list is None: return updated vlans_enabled_list = list(vlans_enabled_list) current_vlans = get_vlan(api, name) # Set allowed list back to default ("all") # # This case allows you to undo what you may have previously done. # The default case is "All VLANs and Tunnels". This case will handle # that situation. if 'ALL' in vlans_enabled_list: # The user is coming from a situation where they previously # were specifying a list of allowed VLANs if len(current_vlans['vlans']) > 0 or \ current_vlans['state'] is "STATE_ENABLED": api.LocalLB.VirtualServer.set_vlan( virtual_servers=[name], vlans=[{'state': 'STATE_DISABLED', 'vlans': []}] ) updated = True else: if current_vlans['state'] is "STATE_DISABLED": to_add_vlans = vlans_enabled_list else: for vlan in vlans_enabled_list: if vlan not in current_vlans['vlans']: updated = True to_add_vlans = vlans_enabled_list break if updated: api.LocalLB.VirtualServer.set_vlan( virtual_servers=[name], vlans=[{ 'state': 'STATE_ENABLED', 'vlans': [to_add_vlans] }] ) return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting enabled vlans : %s' % e) def set_snat(api, name, snat): updated = False try: current_state = get_snat_type(api, name) current_snat_pool = get_snat_pool(api, name) if snat is None: return updated elif snat == 'None' and current_state != 'SRC_TRANS_NONE': api.LocalLB.VirtualServer.set_source_address_translation_none( virtual_servers=[name] ) updated = True elif snat == 'Automap' and current_state != 'SRC_TRANS_AUTOMAP': api.LocalLB.VirtualServer.set_source_address_translation_automap( virtual_servers=[name] ) updated = True elif snat_settings_need_updating(snat, current_state, current_snat_pool): api.LocalLB.VirtualServer.set_source_address_translation_snat_pool( virtual_servers=[name], pools=[snat] ) return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting snat : %s' % e) def get_snat_type(api, name): return api.LocalLB.VirtualServer.get_source_address_translation_type( virtual_servers=[name] )[0] def get_snat_pool(api, name): return api.LocalLB.VirtualServer.get_source_address_translation_snat_pool( virtual_servers=[name] )[0] def snat_settings_need_updating(snat, current_state, current_snat_pool): if snat == 'None' or snat == 'Automap': return False elif snat and current_state != 'SRC_TRANS_SNATPOOL': return True elif snat and current_state == 'SRC_TRANS_SNATPOOL' and current_snat_pool != snat: return True else: return False def get_pool(api, name): return api.LocalLB.VirtualServer.get_default_pool_name( virtual_servers=[name] )[0] def set_pool(api, name, pool): updated = False try: current_pool = get_pool(api, name) if pool is not None and (pool != current_pool): api.LocalLB.VirtualServer.set_default_pool_name( virtual_servers=[name], default_pools=[pool] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting pool : %s' % e) def get_destination(api, name): return api.LocalLB.VirtualServer.get_destination_v2( virtual_servers=[name] )[0] def set_destination(api, name, destination): updated = False try: current_destination = get_destination(api, name) if destination is not None and destination != current_destination['address']: api.LocalLB.VirtualServer.set_destination_v2( virtual_servers=[name], destinations=[{'address': destination, 'port': current_destination['port']}] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting destination : %s' % e) def set_port(api, name, port): updated = False try: current_destination = get_destination(api, name) if port is not None and port != current_destination['port']: api.LocalLB.VirtualServer.set_destination_v2( virtual_servers=[name], destinations=[{'address': current_destination['address'], 'port': port}] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting port : %s' % e) def get_state(api, name): return api.LocalLB.VirtualServer.get_enabled_state( virtual_servers=[name] )[0] def set_state(api, name, state): updated = False try: current_state = get_state(api, name) # We consider that being present is equivalent to enabled if state == 'present': state = 'enabled' if STATES[state] != current_state: api.LocalLB.VirtualServer.set_enabled_state( virtual_servers=[name], states=[STATES[state]] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting state : %s' % e) def get_description(api, name): return api.LocalLB.VirtualServer.get_description( virtual_servers=[name] )[0] def set_description(api, name, description): updated = False try: current_description = get_description(api, name) if description is not None and current_description != description: api.LocalLB.VirtualServer.set_description( virtual_servers=[name], descriptions=[description] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting description : %s ' % e) def get_persistence_profiles(api, name): return api.LocalLB.VirtualServer.get_persistence_profile( virtual_servers=[name] )[0] def set_default_persistence_profiles(api, name, persistence_profile): updated = False if persistence_profile is None: return updated try: current_persistence_profiles = get_persistence_profiles(api, name) default = None for profile in current_persistence_profiles: if profile['default_profile']: default = profile['profile_name'] break if default is not None and default != persistence_profile: api.LocalLB.VirtualServer.remove_persistence_profile( virtual_servers=[name], profiles=[[{'profile_name': default, 'default_profile': True}]] ) if default != persistence_profile: api.LocalLB.VirtualServer.add_persistence_profile( virtual_servers=[name], profiles=[[{'profile_name': persistence_profile, 'default_profile': True}]] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting default persistence profile : %s' % e) def get_fallback_persistence_profile(api, name): return api.LocalLB.VirtualServer.get_fallback_persistence_profile( virtual_servers=[name] )[0] def set_fallback_persistence_profile(api, partition, name, persistence_profile): updated = False if persistence_profile is None: return updated try: # This is needed because the SOAP API expects this to be an "empty" # value to set the fallback profile to "None". The fq_name function # does not take "None" into account though, so I do that here. if persistence_profile != "": persistence_profile = fq_name(partition, persistence_profile) current_fallback_profile = get_fallback_persistence_profile(api, name) if current_fallback_profile != persistence_profile: api.LocalLB.VirtualServer.set_fallback_persistence_profile( virtual_servers=[name], profile_names=[persistence_profile] ) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting fallback persistence profile : %s' % e) def get_route_advertisement_status(api, address): result = None results = api.LocalLB.VirtualAddressV2.get_route_advertisement_state(virtual_addresses=[address]) if results: result = results.pop(0) result = result.split("STATE_")[-1].lower() return result def set_route_advertisement_state(api, destination, partition, route_advertisement_state): updated = False if route_advertisement_state is None: return False try: state = "STATE_%s" % route_advertisement_state.strip().upper() address = fq_name(partition, destination,) current_route_advertisement_state = get_route_advertisement_status(api,address) if current_route_advertisement_state != route_advertisement_state: api.LocalLB.VirtualAddressV2.set_route_advertisement_state(virtual_addresses=[address], states=[state]) updated = True return updated except bigsuds.OperationFailed as e: raise Exception('Error on setting profiles : %s' % e) def main(): argument_spec = f5_argument_spec() argument_spec.update(dict( state=dict(type='str', default='present', choices=['present', 'absent', 'disabled', 'enabled']), name=dict(type='str', required=True, aliases=['vs']), destination=dict(type='str', aliases=['address', 'ip']), port=dict(type='str', default=None), all_policies=dict(type='list'), all_profiles=dict(type='list', default=None), all_rules=dict(type='list'), enabled_vlans=dict(type='list'), pool=dict(type='str'), description=dict(type='str'), snat=dict(type='str'), route_advertisement_state=dict( type='str', default=None, choices=['enabled', 'disabled'] ), default_persistence_profile=dict(type='str'), fallback_persistence_profile=dict(type='str') )) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True ) if not bigsuds_found: module.fail_json(msg="the python bigsuds module is required") if module.params['validate_certs']: import ssl if not hasattr(ssl, 'SSLContext'): module.fail_json( msg='bigsuds does not support verifying certificates with python < 2.7.9. Either update python or set validate_certs=False on the task' ) server = module.params['server'] server_port = module.params['server_port'] user = module.params['user'] password = module.params['password'] state = module.params['state'] partition = module.params['partition'] validate_certs = module.params['validate_certs'] name = fq_name(partition, module.params['name']) destination = module.params['destination'] port = module.params['port'] if port == '' or port is None: port = None else: port = int(port) all_profiles = fq_list_names(partition, module.params['all_profiles']) all_policies = fq_list_names(partition, module.params['all_policies']) all_rules = fq_list_names(partition, module.params['all_rules']) enabled_vlans = module.params['enabled_vlans'] if enabled_vlans is None or 'ALL' in enabled_vlans: all_enabled_vlans = enabled_vlans else: all_enabled_vlans = fq_list_names(partition, enabled_vlans) pool = fq_name(partition, module.params['pool']) description = module.params['description'] snat = module.params['snat'] route_advertisement_state = module.params['route_advertisement_state'] default_persistence_profile = fq_name(partition, module.params['default_persistence_profile']) fallback_persistence_profile = module.params['fallback_persistence_profile'] if 0 > port > 65535: module.fail_json(msg="valid ports must be in range 0 - 65535") try: api = bigip_api(server, user, password, validate_certs, port=server_port) result = {'changed': False} # default if state == 'absent': if not module.check_mode: if vs_exists(api, name): # hack to handle concurrent runs of module # pool might be gone before we actually remove try: vs_remove(api, name) result = {'changed': True, 'deleted': name} except bigsuds.OperationFailed as e: if "was not found" in str(e): result['changed'] = False else: raise else: # check-mode return value result = {'changed': True} else: update = False if not vs_exists(api, name): if (not destination) or (port is None): module.fail_json(msg="both destination and port must be supplied to create a VS") if not module.check_mode: # a bit of a hack to handle concurrent runs of this module. # even though we've checked the virtual_server doesn't exist, # it may exist by the time we run virtual_server(). # this catches the exception and does something smart # about it! try: vs_create(api, name, destination, port, pool, all_profiles) set_policies(api, name, all_policies) set_enabled_vlans(api, name, all_enabled_vlans) set_rules(api, name, all_rules) set_snat(api, name, snat) set_description(api, name, description) set_default_persistence_profiles(api, name, default_persistence_profile) set_fallback_persistence_profile(api, partition, name, fallback_persistence_profile) set_state(api, name, state) set_route_advertisement_state(api, destination, partition, route_advertisement_state) result = {'changed': True} except bigsuds.OperationFailed as e: raise Exception('Error on creating Virtual Server : %s' % e) else: # check-mode return value result = {'changed': True} else: update = True if update: # VS exists if not module.check_mode: # Have a transaction for all the changes try: api.System.Session.start_transaction() result['changed'] |= set_destination(api, name, fq_name(partition, destination)) result['changed'] |= set_port(api, name, port) result['changed'] |= set_pool(api, name, pool) result['changed'] |= set_description(api, name, description) result['changed'] |= set_snat(api, name, snat) result['changed'] |= set_profiles(api, name, all_profiles) result['changed'] |= set_policies(api, name, all_policies) result['changed'] |= set_enabled_vlans(api, name, all_enabled_vlans) result['changed'] |= set_rules(api, name, all_rules) result['changed'] |= set_default_persistence_profiles(api, name, default_persistence_profile) result['changed'] |= set_fallback_persistence_profile(api, partition, name, fallback_persistence_profile) result['changed'] |= set_state(api, name, state) result['changed'] |= set_route_advertisement_state(api, destination, partition, route_advertisement_state) api.System.Session.submit_transaction() except Exception as e: raise Exception("Error on updating Virtual Server : %s" % str(e)) else: # check-mode return value result = {'changed': True} except Exception as e: module.fail_json(msg="received exception: %s" % e) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.f5_utils import * if __name__ == '__main__': main()
gpl-3.0
orbisgis/orbiswps
tools/gettext-all.py
2
1223
from __future__ import print_function import os import shutil from subprocess import call import mmap def pom_with_gettext(filename): with open(filename) as f: s = mmap.mmap(f.fileno(), 0, access=mmap.ACCESS_READ) return s.find('<artifactId>gettext-maven-plugin</artifactId>') > 0 def callext(tab): print(" ".join(tab)) call(tab) merge_po = False ## # Copy and merge language file into each projects og_folder = os.path.abspath("../") for root, dirs, files in os.walk(og_folder): for file in files: if file == "pom.xml" and pom_with_gettext(os.path.join(root, file)): os.chdir(root) # Read source file to create key file and merge into PO callext(["mvn" ,"gettext:gettext-wps"]) if merge_po: callext(["mvn" ,"gettext:merge", "-DmsgmergeCmd\"msgmerge --backup=off\""]) #remove obsolete and fuzzy entry in po for proot, pdirs, pfiles in os.walk(root): for pfile in pfiles: if pfile.endswith(".po"): os.chdir(proot) callext(["msgattrib" ,pfile,"--no-obsolete","--no-fuzzy","-o",pfile])
gpl-3.0
EvanzzzZ/mxnet
example/nce-loss/wordvec.py
15
5514
# pylint:skip-file from __future__ import print_function import logging import sys, random, time, math sys.path.insert(0, "../../python") import mxnet as mx import numpy as np from collections import namedtuple from nce import * from operator import itemgetter from optparse import OptionParser def get_net(vocab_size, num_input, num_label): data = mx.sym.Variable('data') label = mx.sym.Variable('label') label_weight = mx.sym.Variable('label_weight') embed_weight = mx.sym.Variable('embed_weight') data_embed = mx.sym.Embedding(data = data, input_dim = vocab_size, weight = embed_weight, output_dim = 100, name = 'data_embed') datavec = mx.sym.SliceChannel(data = data_embed, num_outputs = num_input, squeeze_axis = 1, name = 'data_slice') pred = datavec[0] for i in range(1, num_input): pred = pred + datavec[i] return nce_loss(data = pred, label = label, label_weight = label_weight, embed_weight = embed_weight, vocab_size = vocab_size, num_hidden = 100, num_label = num_label) def load_data(name): buf = open(name).read() tks = buf.split(' ') vocab = {} freq = [0] data = [] for tk in tks: if len(tk) == 0: continue if tk not in vocab: vocab[tk] = len(vocab) + 1 freq.append(0) wid = vocab[tk] data.append(wid) freq[wid] += 1 negative = [] for i, v in enumerate(freq): if i == 0 or v < 5: continue v = int(math.pow(v * 1.0, 0.75)) negative += [i for _ in range(v)] return data, negative, vocab, freq class SimpleBatch(object): def __init__(self, data_names, data, label_names, label): self.data = data self.label = label self.data_names = data_names self.label_names = label_names @property def provide_data(self): return [(n, x.shape) for n, x in zip(self.data_names, self.data)] @property def provide_label(self): return [(n, x.shape) for n, x in zip(self.label_names, self.label)] class DataIter(mx.io.DataIter): def __init__(self, name, batch_size, num_label): super(DataIter, self).__init__() self.batch_size = batch_size self.data, self.negative, self.vocab, self.freq = load_data(name) self.vocab_size = 1 + len(self.vocab) print(self.vocab_size) self.num_label = num_label self.provide_data = [('data', (batch_size, num_label - 1))] self.provide_label = [('label', (self.batch_size, num_label)), ('label_weight', (self.batch_size, num_label))] def sample_ne(self): return self.negative[random.randint(0, len(self.negative) - 1)] def __iter__(self): print('begin') batch_data = [] batch_label = [] batch_label_weight = [] start = random.randint(0, self.num_label - 1) for i in range(start, len(self.data) - self.num_label - start, self.num_label): context = self.data[i: i + self.num_label / 2] \ + self.data[i + 1 + self.num_label / 2: i + self.num_label] target_word = self.data[i + self.num_label / 2] if self.freq[target_word] < 5: continue target = [target_word] \ + [self.sample_ne() for _ in range(self.num_label - 1)] target_weight = [1.0] + [0.0 for _ in range(self.num_label - 1)] batch_data.append(context) batch_label.append(target) batch_label_weight.append(target_weight) if len(batch_data) == self.batch_size: data_all = [mx.nd.array(batch_data)] label_all = [mx.nd.array(batch_label), mx.nd.array(batch_label_weight)] data_names = ['data'] label_names = ['label', 'label_weight'] batch_data = [] batch_label = [] batch_label_weight = [] yield SimpleBatch(data_names, data_all, label_names, label_all) def reset(self): pass if __name__ == '__main__': head = '%(asctime)-15s %(message)s' logging.basicConfig(level=logging.DEBUG, format=head) parser = OptionParser() parser.add_option("-g", "--gpu", action = "store_true", dest = "gpu", default = False, help = "use gpu") batch_size = 256 num_label = 5 data_train = DataIter("./data/text8", batch_size, num_label) network = get_net(data_train.vocab_size, num_label - 1, num_label) options, args = parser.parse_args() devs = mx.cpu() if options.gpu == True: devs = mx.gpu() model = mx.model.FeedForward(ctx = devs, symbol = network, num_epoch = 20, learning_rate = 0.3, momentum = 0.9, wd = 0.0000, initializer=mx.init.Xavier(factor_type="in", magnitude=2.34)) metric = NceAuc() model.fit(X = data_train, eval_metric = metric, batch_end_callback = mx.callback.Speedometer(batch_size, 50),)
apache-2.0
kidaa/aurora
src/test/python/apache/thermos/monitoring/test_disk.py
8
1761
# # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # import os from tempfile import mkstemp from twitter.common.dirutil import safe_mkdtemp from twitter.common.quantity import Amount, Data from apache.thermos.monitoring.disk import DiskCollector TEST_AMOUNT_1 = Amount(100, Data.MB) TEST_AMOUNT_2 = Amount(10, Data.MB) TEST_AMOUNT_SUM = TEST_AMOUNT_1 + TEST_AMOUNT_2 def make_file(size, dir): _, filename = mkstemp(dir=dir) with open(filename, 'w') as f: f.write('0' * int(size.as_(Data.BYTES))) return filename def _run_collector_tests(collector, target, wait): assert collector.value == 0 collector.sample() wait() assert collector.value == 0 f1 = make_file(TEST_AMOUNT_1, dir=target) wait() assert collector.value >= TEST_AMOUNT_1.as_(Data.BYTES) make_file(TEST_AMOUNT_2, dir=target) wait() assert collector.value >= TEST_AMOUNT_SUM.as_(Data.BYTES) os.unlink(f1) wait() assert TEST_AMOUNT_SUM.as_(Data.BYTES) > collector.value >= TEST_AMOUNT_2.as_(Data.BYTES) def test_du_diskcollector(): target = safe_mkdtemp() collector = DiskCollector(target) def wait(): collector.sample() if collector._thread is not None: collector._thread.event.wait() _run_collector_tests(collector, target, wait)
apache-2.0
SebasSBM/django
tests/template_tests/test_nodelist.py
173
3234
from unittest import TestCase from django.template import Context, Engine from django.template.base import TextNode, VariableNode from django.utils import six class NodelistTest(TestCase): @classmethod def setUpClass(cls): cls.engine = Engine() super(NodelistTest, cls).setUpClass() def test_for(self): template = self.engine.from_string('{% for i in 1 %}{{ a }}{% endfor %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_if(self): template = self.engine.from_string('{% if x %}{{ a }}{% endif %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_ifequal(self): template = self.engine.from_string('{% ifequal x y %}{{ a }}{% endifequal %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) def test_ifchanged(self): template = self.engine.from_string('{% ifchanged x %}{{ a }}{% endifchanged %}') vars = template.nodelist.get_nodes_by_type(VariableNode) self.assertEqual(len(vars), 1) class TextNodeTest(TestCase): def test_textnode_repr(self): engine = Engine() for temptext, reprtext in [ ("Hello, world!", "<TextNode: u'Hello, world!'>"), ("One\ntwo.", "<TextNode: u'One\\ntwo.'>"), ]: template = engine.from_string(temptext) texts = template.nodelist.get_nodes_by_type(TextNode) if six.PY3: reprtext = reprtext.replace("u'", "'") self.assertEqual(repr(texts[0]), reprtext) class ErrorIndexTest(TestCase): """ Checks whether index of error is calculated correctly in template debugger in for loops. Refs ticket #5831 """ def test_correct_exception_index(self): tests = [ ('{% load bad_tag %}{% for i in range %}{% badsimpletag %}{% endfor %}', (38, 56)), ( '{% load bad_tag %}{% for i in range %}{% for j in range %}' '{% badsimpletag %}{% endfor %}{% endfor %}', (58, 76) ), ( '{% load bad_tag %}{% for i in range %}{% badsimpletag %}' '{% for j in range %}Hello{% endfor %}{% endfor %}', (38, 56) ), ( '{% load bad_tag %}{% for i in range %}{% for j in five %}' '{% badsimpletag %}{% endfor %}{% endfor %}', (38, 57) ), ('{% load bad_tag %}{% for j in five %}{% badsimpletag %}{% endfor %}', (18, 37)), ] context = Context({ 'range': range(5), 'five': 5, }) engine = Engine(debug=True, libraries={'bad_tag': 'template_tests.templatetags.bad_tag'}) for source, expected_error_source_index in tests: template = engine.from_string(source) try: template.render(context) except (RuntimeError, TypeError) as e: debug = e.template_debug self.assertEqual((debug['start'], debug['end']), expected_error_source_index)
bsd-3-clause
eventql/eventql
deps/3rdparty/spidermonkey/mozjs/testing/mozbase/mozprofile/tests/server_locations.py
3
5364
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this file, # You can obtain one at http://mozilla.org/MPL/2.0/. import mozfile import unittest from mozprofile.permissions import ServerLocations, \ MissingPrimaryLocationError, MultiplePrimaryLocationsError, \ DuplicateLocationError, BadPortLocationError, LocationsSyntaxError class ServerLocationsTest(unittest.TestCase): """test server locations""" locations = """# This is the primary location from which tests run. # http://mochi.test:8888 primary,privileged # a few test locations http://127.0.0.1:80 privileged http://127.0.0.1:8888 privileged https://test:80 privileged http://example.org:80 privileged http://test1.example.org privileged """ locations_no_primary = """http://secondary.test:80 privileged http://tertiary.test:8888 privileged """ locations_bad_port = """http://mochi.test:8888 primary,privileged http://127.0.0.1:80 privileged http://127.0.0.1:8888 privileged http://test:badport privileged http://example.org:80 privileged """ def compare_location(self, location, scheme, host, port, options): self.assertEqual(location.scheme, scheme) self.assertEqual(location.host, host) self.assertEqual(location.port, port) self.assertEqual(location.options, options) def create_temp_file(self, contents): f = mozfile.NamedTemporaryFile() f.write(contents) f.flush() return f def test_server_locations(self): # write a permissions file f = self.create_temp_file(self.locations) # read the locations locations = ServerLocations(f.name) # ensure that they're what we expect self.assertEqual(len(locations), 6) i = iter(locations) self.compare_location(i.next(), 'http', 'mochi.test', '8888', ['primary', 'privileged']) self.compare_location(i.next(), 'http', '127.0.0.1', '80', ['privileged']) self.compare_location(i.next(), 'http', '127.0.0.1', '8888', ['privileged']) self.compare_location(i.next(), 'https', 'test', '80', ['privileged']) self.compare_location(i.next(), 'http', 'example.org', '80', ['privileged']) self.compare_location(i.next(), 'http', 'test1.example.org', '8888', ['privileged']) locations.add_host('mozilla.org') self.assertEqual(len(locations), 7) self.compare_location(i.next(), 'http', 'mozilla.org', '80', ['privileged']) # test some errors self.assertRaises(MultiplePrimaryLocationsError, locations.add_host, 'primary.test', options='primary') # We no longer throw these DuplicateLocation Error try: locations.add_host('127.0.0.1') except DuplicateLocationError: self.assertTrue(False, "Should no longer throw DuplicateLocationError") self.assertRaises(BadPortLocationError, locations.add_host, '127.0.0.1', port='abc') # test some errors in locations file f = self.create_temp_file(self.locations_no_primary) exc = None try: ServerLocations(f.name) except LocationsSyntaxError, e: exc = e self.assertNotEqual(exc, None) self.assertEqual(exc.err.__class__, MissingPrimaryLocationError) self.assertEqual(exc.lineno, 3) # test bad port in a locations file to ensure lineno calculated # properly. f = self.create_temp_file(self.locations_bad_port) exc = None try: ServerLocations(f.name) except LocationsSyntaxError, e: exc = e self.assertNotEqual(exc, None) self.assertEqual(exc.err.__class__, BadPortLocationError) self.assertEqual(exc.lineno, 4) def test_server_locations_callback(self): class CallbackTest(object): last_locations = None def callback(self, locations): self.last_locations = locations c = CallbackTest() f = self.create_temp_file(self.locations) locations = ServerLocations(f.name, c.callback) # callback should be for all locations in file self.assertEqual(len(c.last_locations), 6) # validate arbitrary one self.compare_location(c.last_locations[2], 'http', '127.0.0.1', '8888', ['privileged']) locations.add_host('a.b.c') # callback should be just for one location self.assertEqual(len(c.last_locations), 1) self.compare_location(c.last_locations[0], 'http', 'a.b.c', '80', ['privileged']) # read a second file, which should generate a callback with both # locations. f = self.create_temp_file(self.locations_no_primary) locations.read(f.name) self.assertEqual(len(c.last_locations), 2) if __name__ == '__main__': unittest.main()
agpl-3.0
lochiiconnectivity/libcloud
docs/examples/compute/trystack.py
63
1064
from libcloud.compute.types import Provider from libcloud.compute.providers import get_driver import libcloud.security # At the time this example was written, https://nova-api.trystack.org:5443 # was using a certificate issued by a Certificate Authority (CA) which is # not included in the default Ubuntu certificates bundle (ca-certificates). # Note: Code like this poses a security risk (MITM attack) and that's the # reason why you should never use it for anything else besides testing. You # have been warned. libcloud.security.VERIFY_SSL_CERT = False OpenStack = get_driver(Provider.OPENSTACK) driver = OpenStack('your username', 'your password', ex_force_auth_url='https://nova-api.trystack.org:5443', ex_force_auth_version='2.0_password') nodes = driver.list_nodes() images = driver.list_images() sizes = driver.list_sizes() size = [s for s in sizes if s.ram == 512][0] image = [i for i in images if i.name == 'natty-server-cloudimg-amd64'][0] node = driver.create_node(name='test node', image=image, size=size)
apache-2.0
ccortezb/troposphere
troposphere/sqs.py
27
1054
# Copyright (c) 2012-2013, Mark Peek <mark@peek.org> # All rights reserved. # # See LICENSE file for full license. from . import AWSObject, AWSProperty from .validators import integer try: from awacs.aws import Policy policytypes = (dict, Policy) except ImportError: policytypes = dict, class RedrivePolicy(AWSProperty): props = { 'deadLetterTargetArn': (basestring, False), 'maxReceiveCount': (integer, False), } class Queue(AWSObject): resource_type = "AWS::SQS::Queue" props = { 'DelaySeconds': (integer, False), 'MaximumMessageSize': (integer, False), 'MessageRetentionPeriod': (integer, False), 'QueueName': (basestring, False), 'ReceiveMessageWaitTimeSeconds': (integer, False), 'RedrivePolicy': (RedrivePolicy, False), 'VisibilityTimeout': (integer, False), } class QueuePolicy(AWSObject): resource_type = "AWS::SQS::QueuePolicy" props = { 'PolicyDocument': (policytypes, False), 'Queues': (list, True), }
bsd-2-clause
betoesquivel/fil2014
filenv/lib/python2.7/site-packages/django/contrib/gis/geos/prototypes/errcheck.py
623
3522
""" Error checking functions for GEOS ctypes prototype functions. """ import os from ctypes import c_void_p, string_at, CDLL from django.contrib.gis.geos.error import GEOSException from django.contrib.gis.geos.libgeos import GEOS_VERSION from django.contrib.gis.geos.prototypes.threadsafe import GEOSFunc # Getting the `free` routine used to free the memory allocated for # string pointers returned by GEOS. if GEOS_VERSION >= (3, 1, 1): # In versions 3.1.1 and above, `GEOSFree` was added to the C API # because `free` isn't always available on all platforms. free = GEOSFunc('GEOSFree') free.argtypes = [c_void_p] free.restype = None else: # Getting the `free` routine from the C library of the platform. if os.name == 'nt': # On NT, use the MS C library. libc = CDLL('msvcrt') else: # On POSIX platforms C library is obtained by passing None into `CDLL`. libc = CDLL(None) free = libc.free ### ctypes error checking routines ### def last_arg_byref(args): "Returns the last C argument's value by reference." return args[-1]._obj.value def check_dbl(result, func, cargs): "Checks the status code and returns the double value passed in by reference." # Checking the status code if result != 1: return None # Double passed in by reference, return its value. return last_arg_byref(cargs) def check_geom(result, func, cargs): "Error checking on routines that return Geometries." if not result: raise GEOSException('Error encountered checking Geometry returned from GEOS C function "%s".' % func.__name__) return result def check_minus_one(result, func, cargs): "Error checking on routines that should not return -1." if result == -1: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result def check_predicate(result, func, cargs): "Error checking for unary/binary predicate functions." val = ord(result) # getting the ordinal from the character if val == 1: return True elif val == 0: return False else: raise GEOSException('Error encountered on GEOS C predicate function "%s".' % func.__name__) def check_sized_string(result, func, cargs): """ Error checking for routines that return explicitly sized strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Invalid string pointer returned by GEOS C function "%s"' % func.__name__) # A c_size_t object is passed in by reference for the second # argument on these routines, and its needed to determine the # correct size. s = string_at(result, last_arg_byref(cargs)) # Freeing the memory allocated within GEOS free(result) return s def check_string(result, func, cargs): """ Error checking for routines that return strings. This frees the memory allocated by GEOS at the result pointer. """ if not result: raise GEOSException('Error encountered checking string return value in GEOS C function "%s".' % func.__name__) # Getting the string value at the pointer address. s = string_at(result) # Freeing the memory allocated within GEOS free(result) return s def check_zero(result, func, cargs): "Error checking on routines that should not return 0." if result == 0: raise GEOSException('Error encountered in GEOS C function "%s".' % func.__name__) else: return result
mit
chamaelj/tools-artbio
tools/msp_oases/oases_optimiser.py
3
1789
#!/usr/bin/env python """ VelvetOptimiser Wrapper refactored using the adaptation of Konrad Paszkiewicz University of Exeter, UK. """ import os, sys import subprocess def stop_err(msg): sys.stderr.write("%s\n" % msg) sys.exit() def oases_optimiser(starthash, endhash, input): ''' Replaces call to oases_optimiser.sh. For all k-mers between starthash and endhash run velvet and oases. ''' for i in xrange(starthash, endhash, 2): cmd1 = "velveth outputFolder_{0} {0} {1} && ".format(i, input) cmd2 = "velvetg outputFolder_{0} -read_trkg yes && ".format(i) cmd3 = "oases outputFolder_{0}".format(i) proc = subprocess.call(args=cmd1 + cmd2 + cmd3, shell=True, stdout=sys.stdout, stderr=sys.stdout) if not proc == 0: print("Oases failed at k-mer %s, skipping" % i) continue cmd4 = "velveth MergedAssemblyFolder 27 -long outputFolder_*/transcripts.fa && " cmd5 = "velvetg MergedAssemblyFolder -read_trkg yes -conserveLong yes && " cmd6 = "oases MergedAssemblyFolder -merge yes" proc = subprocess.call(args=cmd4 + cmd5 + cmd6, shell=True, stdout=sys.stdout, stderr=sys.stdout) if not proc == 0: raise Exception("Oases could not merge assembly") def __main__(): starthash = int(sys.argv[1]) endhash = int(sys.argv[2]) input = sys.argv[3] transcripts = sys.argv[4] try: oases_optimiser(starthash, endhash, input) except Exception, e: stop_err('Error running oases_optimiser.py\n' + str(e)) with open(transcripts, 'w') as out: transcript_path = os.path.join("MergedAssemblyFolder", 'transcripts.fa') for line in open(transcript_path): out.write("%s" % (line)) if __name__ == "__main__": __main__()
mit
maiklos-mirrors/jfx78
modules/web/src/main/native/Tools/Scripts/webkitpy/tool/commands/rebaseline.py
119
23221
# Copyright (c) 2010 Google Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. import json import logging import optparse import sys from webkitpy.common.checkout.baselineoptimizer import BaselineOptimizer from webkitpy.common.system.executive import ScriptError from webkitpy.layout_tests.controllers.test_result_writer import TestResultWriter from webkitpy.layout_tests.models import test_failures from webkitpy.layout_tests.models.test_expectations import TestExpectations, BASELINE_SUFFIX_LIST from webkitpy.port import builders from webkitpy.port import factory from webkitpy.tool.multicommandtool import Command _log = logging.getLogger(__name__) # FIXME: Should TestResultWriter know how to compute this string? def _baseline_name(fs, test_name, suffix): return fs.splitext(test_name)[0] + TestResultWriter.FILENAME_SUFFIX_EXPECTED + "." + suffix class AbstractRebaseliningCommand(Command): # not overriding execute() - pylint: disable=W0223 move_overwritten_baselines_option = optparse.make_option("--move-overwritten-baselines", action="store_true", default=False, help="Move overwritten baselines elsewhere in the baseline path. This is for bringing up new ports.") no_optimize_option = optparse.make_option('--no-optimize', dest='optimize', action='store_false', default=True, help=('Do not optimize/de-dup the expectations after rebaselining (default is to de-dup automatically). ' 'You can use "webkit-patch optimize-baselines" to optimize separately.')) platform_options = factory.platform_options(use_globs=True) results_directory_option = optparse.make_option("--results-directory", help="Local results directory to use") suffixes_option = optparse.make_option("--suffixes", default=','.join(BASELINE_SUFFIX_LIST), action="store", help="Comma-separated-list of file types to rebaseline") def __init__(self, options=None): super(AbstractRebaseliningCommand, self).__init__(options=options) self._baseline_suffix_list = BASELINE_SUFFIX_LIST class RebaselineTest(AbstractRebaseliningCommand): name = "rebaseline-test-internal" help_text = "Rebaseline a single test from a buildbot. Only intended for use by other webkit-patch commands." def __init__(self): super(RebaselineTest, self).__init__(options=[ self.no_optimize_option, self.results_directory_option, self.suffixes_option, optparse.make_option("--builder", help="Builder to pull new baselines from"), optparse.make_option("--move-overwritten-baselines-to", action="append", default=[], help="Platform to move existing baselines to before rebaselining. This is for bringing up new ports."), optparse.make_option("--test", help="Test to rebaseline"), ]) self._scm_changes = {'add': []} def _results_url(self, builder_name): return self._tool.buildbot.builder_with_name(builder_name).latest_layout_test_results_url() def _baseline_directory(self, builder_name): port = self._tool.port_factory.get_from_builder_name(builder_name) override_dir = builders.rebaseline_override_dir(builder_name) if override_dir: return self._tool.filesystem.join(port.layout_tests_dir(), 'platform', override_dir) return port.baseline_version_dir() def _copy_existing_baseline(self, move_overwritten_baselines_to, test_name, suffix): old_baselines = [] new_baselines = [] # Need to gather all the baseline paths before modifying the filesystem since # the modifications can affect the results of port.expected_filename. for platform in move_overwritten_baselines_to: port = self._tool.port_factory.get(platform) old_baseline = port.expected_filename(test_name, "." + suffix) if not self._tool.filesystem.exists(old_baseline): _log.debug("No existing baseline for %s." % test_name) continue new_baseline = self._tool.filesystem.join(port.baseline_path(), self._file_name_for_expected_result(test_name, suffix)) if self._tool.filesystem.exists(new_baseline): _log.debug("Existing baseline at %s, not copying over it." % new_baseline) continue old_baselines.append(old_baseline) new_baselines.append(new_baseline) for i in range(len(old_baselines)): old_baseline = old_baselines[i] new_baseline = new_baselines[i] _log.debug("Copying baseline from %s to %s." % (old_baseline, new_baseline)) self._tool.filesystem.maybe_make_directory(self._tool.filesystem.dirname(new_baseline)) self._tool.filesystem.copyfile(old_baseline, new_baseline) if not self._tool.scm().exists(new_baseline): self._add_to_scm(new_baseline) def _save_baseline(self, data, target_baseline): if not data: return filesystem = self._tool.filesystem filesystem.maybe_make_directory(filesystem.dirname(target_baseline)) filesystem.write_binary_file(target_baseline, data) if not self._tool.scm().exists(target_baseline): self._add_to_scm(target_baseline) def _add_to_scm(self, path): self._scm_changes['add'].append(path) def _update_expectations_file(self, builder_name, test_name): port = self._tool.port_factory.get_from_builder_name(builder_name) # Since rebaseline-test-internal can be called multiple times in parallel, # we need to ensure that we're not trying to update the expectations file # concurrently as well. # FIXME: We should rework the code to not need this; maybe just download # the files in parallel and rebaseline local files serially? try: path = port.path_to_test_expectations_file() lock = self._tool.make_file_lock(path + '.lock') lock.acquire_lock() expectations = TestExpectations(port, include_generic=False, include_overrides=False) for test_configuration in port.all_test_configurations(): if test_configuration.version == port.test_configuration().version: expectationsString = expectations.remove_configuration_from_test(test_name, test_configuration) self._tool.filesystem.write_text_file(path, expectationsString) finally: lock.release_lock() def _test_root(self, test_name): return self._tool.filesystem.splitext(test_name)[0] def _file_name_for_actual_result(self, test_name, suffix): return "%s-actual.%s" % (self._test_root(test_name), suffix) def _file_name_for_expected_result(self, test_name, suffix): return "%s-expected.%s" % (self._test_root(test_name), suffix) def _rebaseline_test(self, builder_name, test_name, move_overwritten_baselines_to, suffix, results_url): baseline_directory = self._baseline_directory(builder_name) source_baseline = "%s/%s" % (results_url, self._file_name_for_actual_result(test_name, suffix)) target_baseline = self._tool.filesystem.join(baseline_directory, self._file_name_for_expected_result(test_name, suffix)) if move_overwritten_baselines_to: self._copy_existing_baseline(move_overwritten_baselines_to, test_name, suffix) _log.debug("Retrieving %s." % source_baseline) self._save_baseline(self._tool.web.get_binary(source_baseline, convert_404_to_None=True), target_baseline) def _rebaseline_test_and_update_expectations(self, options): if options.results_directory: results_url = 'file://' + options.results_directory else: results_url = self._results_url(options.builder) self._baseline_suffix_list = options.suffixes.split(',') for suffix in self._baseline_suffix_list: self._rebaseline_test(options.builder, options.test, options.move_overwritten_baselines_to, suffix, results_url) self._update_expectations_file(options.builder, options.test) def execute(self, options, args, tool): self._rebaseline_test_and_update_expectations(options) print json.dumps(self._scm_changes) class OptimizeBaselines(AbstractRebaseliningCommand): name = "optimize-baselines" help_text = "Reshuffles the baselines for the given tests to use as litte space on disk as possible." argument_names = "TEST_NAMES" def __init__(self): super(OptimizeBaselines, self).__init__(options=[self.suffixes_option] + self.platform_options) def _optimize_baseline(self, optimizer, test_name): for suffix in self._baseline_suffix_list: baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix) if not optimizer.optimize(baseline_name): print "Heuristics failed to optimize %s" % baseline_name def execute(self, options, args, tool): self._baseline_suffix_list = options.suffixes.split(',') port_names = tool.port_factory.all_port_names(options.platform) if not port_names: print "No port names match '%s'" % options.platform return optimizer = BaselineOptimizer(tool, port_names) port = tool.port_factory.get(port_names[0]) for test_name in port.tests(args): _log.info("Optimizing %s" % test_name) self._optimize_baseline(optimizer, test_name) class AnalyzeBaselines(AbstractRebaseliningCommand): name = "analyze-baselines" help_text = "Analyzes the baselines for the given tests and prints results that are identical." argument_names = "TEST_NAMES" def __init__(self): super(AnalyzeBaselines, self).__init__(options=[ self.suffixes_option, optparse.make_option('--missing', action='store_true', default=False, help='show missing baselines as well'), ] + self.platform_options) self._optimizer_class = BaselineOptimizer # overridable for testing self._baseline_optimizer = None self._port = None def _write(self, msg): print msg def _analyze_baseline(self, options, test_name): for suffix in self._baseline_suffix_list: baseline_name = _baseline_name(self._tool.filesystem, test_name, suffix) results_by_directory = self._baseline_optimizer.read_results_by_directory(baseline_name) if results_by_directory: self._write("%s:" % baseline_name) self._baseline_optimizer.write_by_directory(results_by_directory, self._write, " ") elif options.missing: self._write("%s: (no baselines found)" % baseline_name) def execute(self, options, args, tool): self._baseline_suffix_list = options.suffixes.split(',') port_names = tool.port_factory.all_port_names(options.platform) if not port_names: print "No port names match '%s'" % options.platform return self._baseline_optimizer = self._optimizer_class(tool, port_names) self._port = tool.port_factory.get(port_names[0]) for test_name in self._port.tests(args): self._analyze_baseline(options, test_name) class AbstractParallelRebaselineCommand(AbstractRebaseliningCommand): # not overriding execute() - pylint: disable=W0223 def _run_webkit_patch(self, args, verbose): try: verbose_args = ['--verbose'] if verbose else [] stderr = self._tool.executive.run_command([self._tool.path()] + verbose_args + args, cwd=self._tool.scm().checkout_root, return_stderr=True) for line in stderr.splitlines(): print >> sys.stderr, line except ScriptError, e: _log.error(e) def _builders_to_fetch_from(self, builders_to_check): # This routine returns the subset of builders that will cover all of the baseline search paths # used in the input list. In particular, if the input list contains both Release and Debug # versions of a configuration, we *only* return the Release version (since we don't save # debug versions of baselines). release_builders = set() debug_builders = set() builders_to_fallback_paths = {} for builder in builders_to_check: port = self._tool.port_factory.get_from_builder_name(builder) if port.test_configuration().build_type == 'Release': release_builders.add(builder) else: debug_builders.add(builder) for builder in list(release_builders) + list(debug_builders): port = self._tool.port_factory.get_from_builder_name(builder) fallback_path = port.baseline_search_path() if fallback_path not in builders_to_fallback_paths.values(): builders_to_fallback_paths[builder] = fallback_path return builders_to_fallback_paths.keys() def _rebaseline_commands(self, test_list, options): path_to_webkit_patch = self._tool.path() cwd = self._tool.scm().checkout_root commands = [] for test in test_list: for builder in self._builders_to_fetch_from(test_list[test]): suffixes = ','.join(test_list[test][builder]) cmd_line = [path_to_webkit_patch, 'rebaseline-test-internal', '--suffixes', suffixes, '--builder', builder, '--test', test] if options.move_overwritten_baselines: move_overwritten_baselines_to = builders.move_overwritten_baselines_to(builder) for platform in move_overwritten_baselines_to: cmd_line.extend(['--move-overwritten-baselines-to', platform]) if options.results_directory: cmd_line.extend(['--results-directory', options.results_directory]) if options.verbose: cmd_line.append('--verbose') commands.append(tuple([cmd_line, cwd])) return commands def _files_to_add(self, command_results): files_to_add = set() for output in [result[1].split('\n') for result in command_results]: file_added = False for line in output: try: if line: files_to_add.update(json.loads(line)['add']) file_added = True except ValueError: _log.debug('"%s" is not a JSON object, ignoring' % line) if not file_added: _log.debug('Could not add file based off output "%s"' % output) return list(files_to_add) def _optimize_baselines(self, test_list, verbose=False): # We don't run this in parallel because modifying the SCM in parallel is unreliable. for test in test_list: all_suffixes = set() for builder in self._builders_to_fetch_from(test_list[test]): all_suffixes.update(test_list[test][builder]) # FIXME: We should propagate the platform options as well. self._run_webkit_patch(['optimize-baselines', '--suffixes', ','.join(all_suffixes), test], verbose) def _rebaseline(self, options, test_list): for test, builders_to_check in sorted(test_list.items()): _log.info("Rebaselining %s" % test) for builder, suffixes in sorted(builders_to_check.items()): _log.debug(" %s: %s" % (builder, ",".join(suffixes))) commands = self._rebaseline_commands(test_list, options) command_results = self._tool.executive.run_in_parallel(commands) log_output = '\n'.join(result[2] for result in command_results).replace('\n\n', '\n') for line in log_output.split('\n'): if line: print >> sys.stderr, line # FIXME: Figure out how to log properly. files_to_add = self._files_to_add(command_results) if files_to_add: self._tool.scm().add_list(list(files_to_add)) if options.optimize: self._optimize_baselines(test_list, options.verbose) class RebaselineJson(AbstractParallelRebaselineCommand): name = "rebaseline-json" help_text = "Rebaseline based off JSON passed to stdin. Intended to only be called from other scripts." def __init__(self,): super(RebaselineJson, self).__init__(options=[ self.move_overwritten_baselines_option, self.no_optimize_option, self.results_directory_option, ]) def execute(self, options, args, tool): self._rebaseline(options, json.loads(sys.stdin.read())) class RebaselineExpectations(AbstractParallelRebaselineCommand): name = "rebaseline-expectations" help_text = "Rebaselines the tests indicated in TestExpectations." def __init__(self): super(RebaselineExpectations, self).__init__(options=[ self.move_overwritten_baselines_option, self.no_optimize_option, ] + self.platform_options) self._test_list = None def _update_expectations_files(self, port_name): port = self._tool.port_factory.get(port_name) expectations = TestExpectations(port) for path in port.expectations_dict(): if self._tool.filesystem.exists(path): self._tool.filesystem.write_text_file(path, expectations.remove_rebaselined_tests(expectations.get_rebaselining_failures(), path)) def _tests_to_rebaseline(self, port): tests_to_rebaseline = {} expectations = TestExpectations(port, include_overrides=True) for test in expectations.get_rebaselining_failures(): tests_to_rebaseline[test] = TestExpectations.suffixes_for_expectations(expectations.get_expectations(test)) return tests_to_rebaseline def _add_tests_to_rebaseline_for_port(self, port_name): builder_name = builders.builder_name_for_port_name(port_name) if not builder_name: return tests = self._tests_to_rebaseline(self._tool.port_factory.get(port_name)).items() if tests: _log.info("Retrieving results for %s from %s." % (port_name, builder_name)) for test_name, suffixes in tests: _log.info(" %s (%s)" % (test_name, ','.join(suffixes))) if test_name not in self._test_list: self._test_list[test_name] = {} self._test_list[test_name][builder_name] = suffixes def execute(self, options, args, tool): options.results_directory = None self._test_list = {} port_names = tool.port_factory.all_port_names(options.platform) for port_name in port_names: self._add_tests_to_rebaseline_for_port(port_name) if not self._test_list: _log.warning("Did not find any tests marked Rebaseline.") return self._rebaseline(options, self._test_list) for port_name in port_names: self._update_expectations_files(port_name) class Rebaseline(AbstractParallelRebaselineCommand): name = "rebaseline" help_text = "Rebaseline tests with results from the build bots. Shows the list of failing tests on the builders if no test names are provided." argument_names = "[TEST_NAMES]" def __init__(self): super(Rebaseline, self).__init__(options=[ self.move_overwritten_baselines_option, self.no_optimize_option, # FIXME: should we support the platform options in addition to (or instead of) --builders? self.suffixes_option, optparse.make_option("--builders", default=None, action="append", help="Comma-separated-list of builders to pull new baselines from (can also be provided multiple times)"), ]) def _builders_to_pull_from(self): webkit_buildbot_builder_names = [] for name in builders.all_builder_names(): webkit_buildbot_builder_names.append(name) titles = ["build.webkit.org bots"] lists = [webkit_buildbot_builder_names] chosen_names = self._tool.user.prompt_with_multiple_lists("Which builder to pull results from:", titles, lists, can_choose_multiple=True) return [self._builder_with_name(name) for name in chosen_names] def _builder_with_name(self, name): return self._tool.buildbot.builder_with_name(name) def _tests_to_update(self, builder): failing_tests = builder.latest_layout_test_results().tests_matching_failure_types([test_failures.FailureTextMismatch]) return self._tool.user.prompt_with_list("Which test(s) to rebaseline for %s:" % builder.name(), failing_tests, can_choose_multiple=True) def execute(self, options, args, tool): options.results_directory = None if options.builders: builders_to_check = [] for builder_names in options.builders: builders_to_check += [self._builder_with_name(name) for name in builder_names.split(",")] else: builders_to_check = self._builders_to_pull_from() test_list = {} suffixes_to_update = options.suffixes.split(",") for builder in builders_to_check: tests = args or self._tests_to_update(builder) for test in tests: if test not in test_list: test_list[test] = {} test_list[test][builder.name()] = suffixes_to_update if options.verbose: _log.debug("rebaseline-json: " + str(test_list)) self._rebaseline(options, test_list)
gpl-2.0
ewdurbin/ansible-modules-extras
database/misc/riak.py
57
7967
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2013, James Martin <jmartin@basho.com>, Drew Kerrigan <dkerrigan@basho.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # DOCUMENTATION = ''' --- module: riak short_description: This module handles some common Riak operations description: - This module can be used to join nodes to a cluster, check the status of the cluster. version_added: "1.2" options: command: description: - The command you would like to perform against the cluster. required: false default: null aliases: [] choices: ['ping', 'kv_test', 'join', 'plan', 'commit'] config_dir: description: - The path to the riak configuration directory required: false default: /etc/riak aliases: [] http_conn: description: - The ip address and port that is listening for Riak HTTP queries required: false default: 127.0.0.1:8098 aliases: [] target_node: description: - The target node for certain operations (join, ping) required: false default: riak@127.0.0.1 aliases: [] wait_for_handoffs: description: - Number of seconds to wait for handoffs to complete. required: false default: null aliases: [] type: 'int' wait_for_ring: description: - Number of seconds to wait for all nodes to agree on the ring. required: false default: null aliases: [] type: 'int' wait_for_service: description: - Waits for a riak service to come online before continuing. required: false default: None aliases: [] choices: ['kv'] validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: ['yes', 'no'] version_added: 1.5.1 ''' EXAMPLES = ''' # Join's a Riak node to another node - riak: command=join target_node=riak@10.1.1.1 # Wait for handoffs to finish. Use with async and poll. - riak: wait_for_handoffs=yes # Wait for riak_kv service to startup - riak: wait_for_service=kv ''' import urllib2 import time import socket import sys try: import json except ImportError: import simplejson as json def ring_check(module, riak_admin_bin): cmd = '%s ringready' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0 and 'TRUE All nodes agree on the ring' in out: return True else: return False def main(): module = AnsibleModule( argument_spec=dict( command=dict(required=False, default=None, choices=[ 'ping', 'kv_test', 'join', 'plan', 'commit']), config_dir=dict(default='/etc/riak'), http_conn=dict(required=False, default='127.0.0.1:8098'), target_node=dict(default='riak@127.0.0.1', required=False), wait_for_handoffs=dict(default=False, type='int'), wait_for_ring=dict(default=False, type='int'), wait_for_service=dict( required=False, default=None, choices=['kv']), validate_certs = dict(default='yes', type='bool')) ) command = module.params.get('command') config_dir = module.params.get('config_dir') http_conn = module.params.get('http_conn') target_node = module.params.get('target_node') wait_for_handoffs = module.params.get('wait_for_handoffs') wait_for_ring = module.params.get('wait_for_ring') wait_for_service = module.params.get('wait_for_service') validate_certs = module.params.get('validate_certs') #make sure riak commands are on the path riak_bin = module.get_bin_path('riak') riak_admin_bin = module.get_bin_path('riak-admin') timeout = time.time() + 120 while True: if time.time() > timeout: module.fail_json(msg='Timeout, could not fetch Riak stats.') (response, info) = fetch_url(module, 'http://%s/stats' % (http_conn), force=True, timeout=5) if info['status'] == 200: stats_raw = response.read() break time.sleep(5) # here we attempt to load those stats, try: stats = json.loads(stats_raw) except: module.fail_json(msg='Could not parse Riak stats.') node_name = stats['nodename'] nodes = stats['ring_members'] ring_size = stats['ring_creation_size'] rc, out, err = module.run_command([riak_bin, 'version'] ) version = out.strip() result = dict(node_name=node_name, nodes=nodes, ring_size=ring_size, version=version) if command == 'ping': cmd = '%s ping %s' % ( riak_bin, target_node ) rc, out, err = module.run_command(cmd) if rc == 0: result['ping'] = out else: module.fail_json(msg=out) elif command == 'kv_test': cmd = '%s test' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['kv_test'] = out else: module.fail_json(msg=out) elif command == 'join': if nodes.count(node_name) == 1 and len(nodes) > 1: result['join'] = 'Node is already in cluster or staged to be in cluster.' else: cmd = '%s cluster join %s' % (riak_admin_bin, target_node) rc, out, err = module.run_command(cmd) if rc == 0: result['join'] = out result['changed'] = True else: module.fail_json(msg=out) elif command == 'plan': cmd = '%s cluster plan' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['plan'] = out if 'Staged Changes' in out: result['changed'] = True else: module.fail_json(msg=out) elif command == 'commit': cmd = '%s cluster commit' % riak_admin_bin rc, out, err = module.run_command(cmd) if rc == 0: result['commit'] = out result['changed'] = True else: module.fail_json(msg=out) # this could take a while, recommend to run in async mode if wait_for_handoffs: timeout = time.time() + wait_for_handoffs while True: cmd = '%s transfers' % riak_admin_bin rc, out, err = module.run_command(cmd) if 'No transfers active' in out: result['handoffs'] = 'No transfers active.' break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for handoffs.') if wait_for_service: cmd = [riak_admin_bin, 'wait_for_service', 'riak_%s' % wait_for_service, node_name ] rc, out, err = module.run_command(cmd) result['service'] = out if wait_for_ring: timeout = time.time() + wait_for_ring while True: if ring_check(module, riak_admin_bin): break time.sleep(10) if time.time() > timeout: module.fail_json(msg='Timeout waiting for nodes to agree on ring.') result['ring_ready'] = ring_check(module, riak_admin_bin) module.exit_json(**result) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.urls import * main()
gpl-3.0
Krozark/Kraggne
Kraggne/contrib/contentblocks/utils.py
3
1530
# -*- coding: utf-8 -*- from Kraggne.contrib.contentblocks.conf.settings import CONTENT_CHOICE_MODELS, CONTENT_FORM_MODELS from django.contrib.contenttypes.models import ContentType from django import forms def get_content_choice_models(): if CONTENT_CHOICE_MODELS: q = ContentType.objects.filter(pk=-1)#imposible, but it for be concatenate for u in CONTENT_CHOICE_MODELS: if not "app_label" in u: continue elif not "model" in u: q = q | ContentType.objects.filter(app_label = u["app_label"]) else: if isinstance(u["model"],(dict,list,tuple)): q = q | ContentType.objects.filter(app_label = u["app_label"],model__in = u["model"]) else: q = q | ContentType.objects.filter(app_label = u["app_label"],model = u["model"]) else: q = ContentType.objects.all() return q def model_to_modelform(model): try: form = CONTENT_FORM_MODELS[model._meta.app_label][model._meta.module_name] point = form.rfind('.') if point != -1: app = form[:point] klass = form[point+1:] f= __import__(app,globals(),locals(),[klass,]) modelform_class=getattr(f,klass) else: modelform_class=__import__(form) except: meta = type('Meta', (), { "model":model, }) modelform_class = type('modelform', (forms.ModelForm,), {"Meta": meta}) return modelform_class
bsd-2-clause
EricNeedham/assignment-1
venv/lib/python2.7/site-packages/sqlalchemy/engine/util.py
33
2338
# engine/util.py # Copyright (C) 2005-2014 the SQLAlchemy authors and contributors # <see AUTHORS file> # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php from .. import util def connection_memoize(key): """Decorator, memoize a function in a connection.info stash. Only applicable to functions which take no arguments other than a connection. The memo will be stored in ``connection.info[key]``. """ @util.decorator def decorated(fn, self, connection): connection = connection.connect() try: return connection.info[key] except KeyError: connection.info[key] = val = fn(self, connection) return val return decorated def py_fallback(): def _distill_params(multiparams, params): """Given arguments from the calling form *multiparams, **params, return a list of bind parameter structures, usually a list of dictionaries. In the case of 'raw' execution which accepts positional parameters, it may be a list of tuples or lists. """ if not multiparams: if params: return [params] else: return [] elif len(multiparams) == 1: zero = multiparams[0] if isinstance(zero, (list, tuple)): if not zero or hasattr(zero[0], '__iter__') and \ not hasattr(zero[0], 'strip'): # execute(stmt, [{}, {}, {}, ...]) # execute(stmt, [(), (), (), ...]) return zero else: # execute(stmt, ("value", "value")) return [zero] elif hasattr(zero, 'keys'): # execute(stmt, {"key":"value"}) return [zero] else: # execute(stmt, "value") return [[zero]] else: if hasattr(multiparams[0], '__iter__') and \ not hasattr(multiparams[0], 'strip'): return multiparams else: return [multiparams] return locals() try: from sqlalchemy.cutils import _distill_params except ImportError: globals().update(py_fallback())
mit
jcatw/deep_q_rl
deep_q_rl/ale_data_set.py
2
9391
"""This class stores all of the samples for training. It is able to construct randomly selected batches of phi's from the stored history. """ import numpy as np import time import theano floatX = theano.config.floatX class DataSet(object): """A replay memory consisting of circular buffers for observed images, actions, and rewards. """ def __init__(self, width, height, rng, max_steps=1000, phi_length=4): """Construct a DataSet. Arguments: width, height - image size max_steps - the number of time steps to store phi_length - number of images to concatenate into a state rng - initialized numpy random number generator, used to choose random minibatches """ # TODO: Specify capacity in number of state transitions, not # number of saved time steps. # Store arguments. self.width = width self.height = height self.max_steps = max_steps self.phi_length = phi_length self.rng = rng # Allocate the circular buffers and indices. self.imgs = np.zeros((max_steps, height, width), dtype='uint8') self.actions = np.zeros(max_steps, dtype='int32') self.rewards = np.zeros(max_steps, dtype=floatX) self.terminal = np.zeros(max_steps, dtype='bool') self.bottom = 0 self.top = 0 self.size = 0 def add_sample(self, img, action, reward, terminal): """Add a time step record. Arguments: img -- observed image action -- action chosen by the agent reward -- reward received after taking the action terminal -- boolean indicating whether the episode ended after this time step """ self.imgs[self.top] = img self.actions[self.top] = action self.rewards[self.top] = reward self.terminal[self.top] = terminal if self.size == self.max_steps: self.bottom = (self.bottom + 1) % self.max_steps else: self.size += 1 self.top = (self.top + 1) % self.max_steps def __len__(self): """Return an approximate count of stored state transitions.""" # TODO: Properly account for indices which can't be used, as in # random_batch's check. return max(0, self.size - self.phi_length) def last_phi(self): """Return the most recent phi (sequence of image frames).""" indexes = np.arange(self.top - self.phi_length, self.top) return self.imgs.take(indexes, axis=0, mode='wrap') def phi(self, img): """Return a phi (sequence of image frames), using the last phi_length - 1, plus img. """ indexes = np.arange(self.top - self.phi_length + 1, self.top) phi = np.empty((self.phi_length, self.height, self.width), dtype=floatX) phi[0:self.phi_length - 1] = self.imgs.take(indexes, axis=0, mode='wrap') phi[-1] = img return phi def random_batch(self, batch_size): """Return corresponding states, actions, rewards, terminal status, and next_states for batch_size randomly chosen state transitions. """ # Allocate the response. states = np.zeros((batch_size, self.phi_length, self.height, self.width), dtype='uint8') actions = np.zeros((batch_size, 1), dtype='int32') rewards = np.zeros((batch_size, 1), dtype=floatX) terminal = np.zeros((batch_size, 1), dtype='bool') next_states = np.zeros((batch_size, self.phi_length, self.height, self.width), dtype='uint8') count = 0 while count < batch_size: # Randomly choose a time step from the replay memory. index = self.rng.randint(self.bottom, self.bottom + self.size - self.phi_length) initial_indices = np.arange(index, index + self.phi_length) transition_indices = initial_indices + 1 end_index = index + self.phi_length - 1 # Check that the initial state corresponds entirely to a # single episode, meaning none but the last frame may be # terminal. If the last frame of the initial state is # terminal, then the last frame of the transitioned state # will actually be the first frame of a new episode, which # the Q learner recognizes and handles correctly during # training by zeroing the discounted future reward estimate. if np.any(self.terminal.take(initial_indices[0:-1], mode='wrap')): continue # Add the state transition to the response. states[count] = self.imgs.take(initial_indices, axis=0, mode='wrap') actions[count] = self.actions.take(end_index, mode='wrap') rewards[count] = self.rewards.take(end_index, mode='wrap') terminal[count] = self.terminal.take(end_index, mode='wrap') next_states[count] = self.imgs.take(transition_indices, axis=0, mode='wrap') count += 1 return states, actions, rewards, next_states, terminal # TESTING CODE BELOW THIS POINT... def simple_tests(): np.random.seed(222) dataset = DataSet(width=2, height=3, rng=np.random.RandomState(42), max_steps=6, phi_length=4) for i in range(10): img = np.random.randint(0, 256, size=(3, 2)) action = np.random.randint(16) reward = np.random.random() terminal = False if np.random.random() < .05: terminal = True print 'img', img dataset.add_sample(img, action, reward, terminal) print "I", dataset.imgs print "A", dataset.actions print "R", dataset.rewards print "T", dataset.terminal print "SIZE", dataset.size print print "LAST PHI", dataset.last_phi() print print 'BATCH', dataset.random_batch(2) def speed_tests(): dataset = DataSet(width=80, height=80, rng=np.random.RandomState(42), max_steps=20000, phi_length=4) img = np.random.randint(0, 256, size=(80, 80)) action = np.random.randint(16) reward = np.random.random() start = time.time() for i in range(100000): terminal = False if np.random.random() < .05: terminal = True dataset.add_sample(img, action, reward, terminal) print "samples per second: ", 100000 / (time.time() - start) start = time.time() for i in range(200): a = dataset.random_batch(32) print "batches per second: ", 200 / (time.time() - start) print dataset.last_phi() def trivial_tests(): dataset = DataSet(width=2, height=1, rng=np.random.RandomState(42), max_steps=3, phi_length=2) img1 = np.array([[1, 1]], dtype='uint8') img2 = np.array([[2, 2]], dtype='uint8') img3 = np.array([[3, 3]], dtype='uint8') dataset.add_sample(img1, 1, 1, False) dataset.add_sample(img2, 2, 2, False) dataset.add_sample(img3, 2, 2, True) print "last", dataset.last_phi() print "random", dataset.random_batch(1) def max_size_tests(): dataset1 = DataSet(width=3, height=4, rng=np.random.RandomState(42), max_steps=10, phi_length=4) dataset2 = DataSet(width=3, height=4, rng=np.random.RandomState(42), max_steps=1000, phi_length=4) for i in range(100): img = np.random.randint(0, 256, size=(4, 3)) action = np.random.randint(16) reward = np.random.random() terminal = False if np.random.random() < .05: terminal = True dataset1.add_sample(img, action, reward, terminal) dataset2.add_sample(img, action, reward, terminal) np.testing.assert_array_almost_equal(dataset1.last_phi(), dataset2.last_phi()) print "passed" def test_memory_usage_ok(): import memory_profiler dataset = DataSet(width=80, height=80, rng=np.random.RandomState(42), max_steps=100000, phi_length=4) last = time.time() for i in xrange(1000000000): if (i % 100000) == 0: print i dataset.add_sample(np.random.random((80, 80)), 1, 1, False) if i > 200000: states, actions, rewards, next_states, terminals = \ dataset.random_batch(32) if (i % 10007) == 0: print time.time() - last mem_usage = memory_profiler.memory_usage(-1) print len(dataset), mem_usage last = time.time() def main(): speed_tests() test_memory_usage_ok() max_size_tests() simple_tests() if __name__ == "__main__": main()
bsd-3-clause
sarakha63/persomov
libs/html5lib/serializer/htmlserializer.py
235
12897
from __future__ import absolute_import, division, unicode_literals from six import text_type import gettext _ = gettext.gettext try: from functools import reduce except ImportError: pass from ..constants import voidElements, booleanAttributes, spaceCharacters from ..constants import rcdataElements, entities, xmlEntities from .. import utils from xml.sax.saxutils import escape spaceCharacters = "".join(spaceCharacters) try: from codecs import register_error, xmlcharrefreplace_errors except ImportError: unicode_encode_errors = "strict" else: unicode_encode_errors = "htmlentityreplace" encode_entity_map = {} is_ucs4 = len("\U0010FFFF") == 1 for k, v in list(entities.items()): # skip multi-character entities if ((is_ucs4 and len(v) > 1) or (not is_ucs4 and len(v) > 2)): continue if v != "&": if len(v) == 2: v = utils.surrogatePairToCodepoint(v) else: v = ord(v) if not v in encode_entity_map or k.islower(): # prefer &lt; over &LT; and similarly for &amp;, &gt;, etc. encode_entity_map[v] = k def htmlentityreplace_errors(exc): if isinstance(exc, (UnicodeEncodeError, UnicodeTranslateError)): res = [] codepoints = [] skip = False for i, c in enumerate(exc.object[exc.start:exc.end]): if skip: skip = False continue index = i + exc.start if utils.isSurrogatePair(exc.object[index:min([exc.end, index + 2])]): codepoint = utils.surrogatePairToCodepoint(exc.object[index:index + 2]) skip = True else: codepoint = ord(c) codepoints.append(codepoint) for cp in codepoints: e = encode_entity_map.get(cp) if e: res.append("&") res.append(e) if not e.endswith(";"): res.append(";") else: res.append("&#x%s;" % (hex(cp)[2:])) return ("".join(res), exc.end) else: return xmlcharrefreplace_errors(exc) register_error(unicode_encode_errors, htmlentityreplace_errors) del register_error class HTMLSerializer(object): # attribute quoting options quote_attr_values = False quote_char = '"' use_best_quote_char = True # tag syntax options omit_optional_tags = True minimize_boolean_attributes = True use_trailing_solidus = False space_before_trailing_solidus = True # escaping options escape_lt_in_attrs = False escape_rcdata = False resolve_entities = True # miscellaneous options alphabetical_attributes = False inject_meta_charset = True strip_whitespace = False sanitize = False options = ("quote_attr_values", "quote_char", "use_best_quote_char", "omit_optional_tags", "minimize_boolean_attributes", "use_trailing_solidus", "space_before_trailing_solidus", "escape_lt_in_attrs", "escape_rcdata", "resolve_entities", "alphabetical_attributes", "inject_meta_charset", "strip_whitespace", "sanitize") def __init__(self, **kwargs): """Initialize HTMLSerializer. Keyword options (default given first unless specified) include: inject_meta_charset=True|False Whether it insert a meta element to define the character set of the document. quote_attr_values=True|False Whether to quote attribute values that don't require quoting per HTML5 parsing rules. quote_char=u'"'|u"'" Use given quote character for attribute quoting. Default is to use double quote unless attribute value contains a double quote, in which case single quotes are used instead. escape_lt_in_attrs=False|True Whether to escape < in attribute values. escape_rcdata=False|True Whether to escape characters that need to be escaped within normal elements within rcdata elements such as style. resolve_entities=True|False Whether to resolve named character entities that appear in the source tree. The XML predefined entities &lt; &gt; &amp; &quot; &apos; are unaffected by this setting. strip_whitespace=False|True Whether to remove semantically meaningless whitespace. (This compresses all whitespace to a single space except within pre.) minimize_boolean_attributes=True|False Shortens boolean attributes to give just the attribute value, for example <input disabled="disabled"> becomes <input disabled>. use_trailing_solidus=False|True Includes a close-tag slash at the end of the start tag of void elements (empty elements whose end tag is forbidden). E.g. <hr/>. space_before_trailing_solidus=True|False Places a space immediately before the closing slash in a tag using a trailing solidus. E.g. <hr />. Requires use_trailing_solidus. sanitize=False|True Strip all unsafe or unknown constructs from output. See `html5lib user documentation`_ omit_optional_tags=True|False Omit start/end tags that are optional. alphabetical_attributes=False|True Reorder attributes to be in alphabetical order. .. _html5lib user documentation: http://code.google.com/p/html5lib/wiki/UserDocumentation """ if 'quote_char' in kwargs: self.use_best_quote_char = False for attr in self.options: setattr(self, attr, kwargs.get(attr, getattr(self, attr))) self.errors = [] self.strict = False def encode(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, unicode_encode_errors) else: return string def encodeStrict(self, string): assert(isinstance(string, text_type)) if self.encoding: return string.encode(self.encoding, "strict") else: return string def serialize(self, treewalker, encoding=None): self.encoding = encoding in_cdata = False self.errors = [] if encoding and self.inject_meta_charset: from ..filters.inject_meta_charset import Filter treewalker = Filter(treewalker, encoding) # WhitespaceFilter should be used before OptionalTagFilter # for maximum efficiently of this latter filter if self.strip_whitespace: from ..filters.whitespace import Filter treewalker = Filter(treewalker) if self.sanitize: from ..filters.sanitizer import Filter treewalker = Filter(treewalker) if self.omit_optional_tags: from ..filters.optionaltags import Filter treewalker = Filter(treewalker) # Alphabetical attributes must be last, as other filters # could add attributes and alter the order if self.alphabetical_attributes: from ..filters.alphabeticalattributes import Filter treewalker = Filter(treewalker) for token in treewalker: type = token["type"] if type == "Doctype": doctype = "<!DOCTYPE %s" % token["name"] if token["publicId"]: doctype += ' PUBLIC "%s"' % token["publicId"] elif token["systemId"]: doctype += " SYSTEM" if token["systemId"]: if token["systemId"].find('"') >= 0: if token["systemId"].find("'") >= 0: self.serializeError(_("System identifer contains both single and double quote characters")) quote_char = "'" else: quote_char = '"' doctype += " %s%s%s" % (quote_char, token["systemId"], quote_char) doctype += ">" yield self.encodeStrict(doctype) elif type in ("Characters", "SpaceCharacters"): if type == "SpaceCharacters" or in_cdata: if in_cdata and token["data"].find("</") >= 0: self.serializeError(_("Unexpected </ in CDATA")) yield self.encode(token["data"]) else: yield self.encode(escape(token["data"])) elif type in ("StartTag", "EmptyTag"): name = token["name"] yield self.encodeStrict("<%s" % name) if name in rcdataElements and not self.escape_rcdata: in_cdata = True elif in_cdata: self.serializeError(_("Unexpected child element of a CDATA element")) for (attr_namespace, attr_name), attr_value in token["data"].items(): # TODO: Add namespace support here k = attr_name v = attr_value yield self.encodeStrict(' ') yield self.encodeStrict(k) if not self.minimize_boolean_attributes or \ (k not in booleanAttributes.get(name, tuple()) and k not in booleanAttributes.get("", tuple())): yield self.encodeStrict("=") if self.quote_attr_values or not v: quote_attr = True else: quote_attr = reduce(lambda x, y: x or (y in v), spaceCharacters + ">\"'=", False) v = v.replace("&", "&amp;") if self.escape_lt_in_attrs: v = v.replace("<", "&lt;") if quote_attr: quote_char = self.quote_char if self.use_best_quote_char: if "'" in v and '"' not in v: quote_char = '"' elif '"' in v and "'" not in v: quote_char = "'" if quote_char == "'": v = v.replace("'", "&#39;") else: v = v.replace('"', "&quot;") yield self.encodeStrict(quote_char) yield self.encode(v) yield self.encodeStrict(quote_char) else: yield self.encode(v) if name in voidElements and self.use_trailing_solidus: if self.space_before_trailing_solidus: yield self.encodeStrict(" /") else: yield self.encodeStrict("/") yield self.encode(">") elif type == "EndTag": name = token["name"] if name in rcdataElements: in_cdata = False elif in_cdata: self.serializeError(_("Unexpected child element of a CDATA element")) yield self.encodeStrict("</%s>" % name) elif type == "Comment": data = token["data"] if data.find("--") >= 0: self.serializeError(_("Comment contains --")) yield self.encodeStrict("<!--%s-->" % token["data"]) elif type == "Entity": name = token["name"] key = name + ";" if not key in entities: self.serializeError(_("Entity %s not recognized" % name)) if self.resolve_entities and key not in xmlEntities: data = entities[key] else: data = "&%s;" % name yield self.encodeStrict(data) else: self.serializeError(token["data"]) def render(self, treewalker, encoding=None): if encoding: return b"".join(list(self.serialize(treewalker, encoding))) else: return "".join(list(self.serialize(treewalker))) def serializeError(self, data="XXX ERROR MESSAGE NEEDED"): # XXX The idea is to make data mandatory. self.errors.append(data) if self.strict: raise SerializeError def SerializeError(Exception): """Error in serialized tree""" pass
gpl-3.0
emoon/ProDBG
src/external/edbee-lib/vendor/onig/onigmo.py
6
25586
# -*- coding: utf-8 -*- """Using Onigmo (Oniguruma-mod) regular expression library. This is a low level wrapper for Onigmo regular expression DLL/shared object. (This module does not support static link library.) This provides almost same API as the original C API, so the API is not object oriented. Onigmo DLL (onigmo.dll, libonigmo.so, etc.) must be placed in the default search path. The default search path depends on the system. """ import ctypes import os import sys #__all__ = ["onig_new", "onig_free", # "onig_search", "onig_match", # "onig_region_new", "onig_region_free", # "onig_version", "onig_copyright"] # # Onigmo API version # (Must be synchronized with LTVERSION in configure.ac.) # _onig_api_version = 6 # # Type Definitions # OnigCodePoint = ctypes.c_uint class OnigRegexType(ctypes.Structure): _fields_ = [ ] regex_t = OnigRegexType OnigRegex = ctypes.POINTER(OnigRegexType) try: # Python 2.7 _c_ssize_t = ctypes.c_ssize_t except AttributeError: # Python 2.6 if ctypes.sizeof(ctypes.c_int) == ctypes.sizeof(ctypes.c_void_p): _c_ssize_t = ctypes.c_int elif ctypes.sizeof(ctypes.c_long) == ctypes.sizeof(ctypes.c_void_p): _c_ssize_t = ctypes.c_long elif ctypes.sizeof(ctypes.c_longlong) == ctypes.sizeof(ctypes.c_void_p): _c_ssize_t = ctypes.c_longlong class OnigRegion(ctypes.Structure): _fields_ = [ ("allocated", ctypes.c_int), ("num_regs", ctypes.c_int), ("beg", ctypes.POINTER(_c_ssize_t)), ("end", ctypes.POINTER(_c_ssize_t)), ("history_root",ctypes.c_void_p), ] re_registers = OnigRegion OnigOptionType = ctypes.c_int class OnigEncodingType(ctypes.Structure): _fields_ = [ ("mbc_enc_len", ctypes.c_void_p), ("name", ctypes.c_char_p), ("max_enc_len", ctypes.c_int), ("min_enc_len", ctypes.c_int), ("is_mbc_newline", ctypes.c_void_p), ("mbc_to_code", ctypes.c_void_p), ("code_to_mbclen", ctypes.c_void_p), ("code_to_mbc", ctypes.c_void_p), ("mbc_case_fold", ctypes.c_void_p), ("apply_all_case_fold", ctypes.c_void_p), ("get_case_fold_codes_by_str", ctypes.c_void_p), ("property_name_to_ctype", ctypes.c_void_p), ("is_code_ctype", ctypes.c_void_p), ("get_ctype_code_range", ctypes.c_void_p), ("left_adjust_char_head", ctypes.c_void_p), ("is_allowed_reverse_match",ctypes.c_void_p), ("case_map", ctypes.c_void_p), ("ruby_encoding_index", ctypes.c_int), ("flags", ctypes.c_int), ] OnigEncoding = ctypes.POINTER(OnigEncodingType) class OnigMetaCharTableType(ctypes.Structure): _fields_ = [ ("esc", OnigCodePoint), ("anychar", OnigCodePoint), ("anytime", OnigCodePoint), ("zero_or_one_time",OnigCodePoint), ("one_or_one_time", OnigCodePoint), ("anychar_anytime", OnigCodePoint), ] class OnigSyntaxType(ctypes.Structure): _fields_ = [ ("op", ctypes.c_uint), ("op2", ctypes.c_uint), ("behavior", ctypes.c_uint), ("options", OnigOptionType), ("meta_char_table", OnigMetaCharTableType), ] class OnigErrorInfo(ctypes.Structure): _fields_ = [ ("enc", OnigEncoding), ("par", ctypes.c_char_p), ("par_end", ctypes.c_char_p), ] # load the DLL or the shared library if os.name in ("nt", "ce"): # Win32 _libname = "onigmo.dll" try: libonig = ctypes.cdll.LoadLibrary(_libname) except OSError: # Sometimes MinGW version has a prefix "lib". _libname = "libonigmo.dll" try: libonig = ctypes.cdll.LoadLibrary(_libname) except OSError: # Sometimes MinGW version has the API version. _libname = "libonigmo-%d.dll" % _onig_api_version libonig = ctypes.cdll.LoadLibrary(_libname) elif sys.platform == "cygwin": # Cygwin _libname = "cygonigmo-%d.dll" % _onig_api_version libonig = ctypes.cdll.LoadLibrary(_libname) elif sys.platform == "msys": # MSYS/MSYS2 _libname = "msys-onigmo-%d.dll" % _onig_api_version libonig = ctypes.cdll.LoadLibrary(_libname) elif sys.platform == "darwin": # Mac _libname = "libonigmo.dylib" libonig = ctypes.cdll.LoadLibrary(_libname) else: # Unix _libname = "libonigmo.so" libonig = ctypes.cdll.LoadLibrary(_libname) # # Encodings # def _load_encoding(enc): return ctypes.pointer(OnigEncodingType.in_dll(libonig, enc)) ONIG_ENCODING_ASCII = _load_encoding("OnigEncodingASCII") ONIG_ENCODING_ISO_8859_1 = _load_encoding("OnigEncodingISO_8859_1") ONIG_ENCODING_ISO_8859_2 = _load_encoding("OnigEncodingISO_8859_2") ONIG_ENCODING_ISO_8859_3 = _load_encoding("OnigEncodingISO_8859_3") ONIG_ENCODING_ISO_8859_4 = _load_encoding("OnigEncodingISO_8859_4") ONIG_ENCODING_ISO_8859_5 = _load_encoding("OnigEncodingISO_8859_5") ONIG_ENCODING_ISO_8859_6 = _load_encoding("OnigEncodingISO_8859_6") ONIG_ENCODING_ISO_8859_7 = _load_encoding("OnigEncodingISO_8859_7") ONIG_ENCODING_ISO_8859_8 = _load_encoding("OnigEncodingISO_8859_8") ONIG_ENCODING_ISO_8859_9 = _load_encoding("OnigEncodingISO_8859_9") ONIG_ENCODING_ISO_8859_10 = _load_encoding("OnigEncodingISO_8859_10") ONIG_ENCODING_ISO_8859_11 = _load_encoding("OnigEncodingISO_8859_11") ONIG_ENCODING_ISO_8859_13 = _load_encoding("OnigEncodingISO_8859_13") ONIG_ENCODING_ISO_8859_14 = _load_encoding("OnigEncodingISO_8859_14") ONIG_ENCODING_ISO_8859_15 = _load_encoding("OnigEncodingISO_8859_15") ONIG_ENCODING_ISO_8859_16 = _load_encoding("OnigEncodingISO_8859_16") ONIG_ENCODING_UTF_8 = _load_encoding("OnigEncodingUTF_8") ONIG_ENCODING_UTF_16LE = _load_encoding("OnigEncodingUTF_16LE") ONIG_ENCODING_UTF_16BE = _load_encoding("OnigEncodingUTF_16BE") ONIG_ENCODING_UTF_32LE = _load_encoding("OnigEncodingUTF_32LE") ONIG_ENCODING_UTF_32BE = _load_encoding("OnigEncodingUTF_32BE") ONIG_ENCODING_UTF8 = ONIG_ENCODING_UTF_8 ONIG_ENCODING_UTF16_LE = ONIG_ENCODING_UTF_16LE ONIG_ENCODING_UTF16_BE = ONIG_ENCODING_UTF_16BE ONIG_ENCODING_UTF32_LE = ONIG_ENCODING_UTF_32LE ONIG_ENCODING_UTF32_BE = ONIG_ENCODING_UTF_32BE ONIG_ENCODING_EUC_JP = _load_encoding("OnigEncodingEUC_JP") ONIG_ENCODING_EUC_TW = _load_encoding("OnigEncodingEUC_TW") ONIG_ENCODING_EUC_KR = _load_encoding("OnigEncodingEUC_KR") ONIG_ENCODING_EUC_CN = _load_encoding("OnigEncodingEUC_CN") ONIG_ENCODING_SHIFT_JIS = _load_encoding("OnigEncodingShift_JIS") ONIG_ENCODING_WINDOWS_31J = _load_encoding("OnigEncodingWindows_31J") ONIG_ENCODING_SJIS = ONIG_ENCODING_SHIFT_JIS ONIG_ENCODING_CP932 = ONIG_ENCODING_WINDOWS_31J #ONIG_ENCODING_KOI8 = _load_encoding("OnigEncodingKOI8") ONIG_ENCODING_KOI8_R = _load_encoding("OnigEncodingKOI8_R") ONIG_ENCODING_KOI8_U = _load_encoding("OnigEncodingKOI8_U") ONIG_ENCODING_WINDOWS_1250 = _load_encoding("OnigEncodingWindows_1250") ONIG_ENCODING_WINDOWS_1251 = _load_encoding("OnigEncodingWindows_1251") ONIG_ENCODING_WINDOWS_1252 = _load_encoding("OnigEncodingWindows_1252") ONIG_ENCODING_WINDOWS_1253 = _load_encoding("OnigEncodingWindows_1253") ONIG_ENCODING_WINDOWS_1254 = _load_encoding("OnigEncodingWindows_1254") ONIG_ENCODING_WINDOWS_1257 = _load_encoding("OnigEncodingWindows_1257") ONIG_ENCODING_CP1250 = ONIG_ENCODING_WINDOWS_1250 ONIG_ENCODING_CP1251 = ONIG_ENCODING_WINDOWS_1251 ONIG_ENCODING_CP1252 = ONIG_ENCODING_WINDOWS_1252 ONIG_ENCODING_CP1253 = ONIG_ENCODING_WINDOWS_1253 ONIG_ENCODING_CP1254 = ONIG_ENCODING_WINDOWS_1254 ONIG_ENCODING_CP1257 = ONIG_ENCODING_WINDOWS_1257 ONIG_ENCODING_BIG5 = _load_encoding("OnigEncodingBIG5") ONIG_ENCODING_GB18030 = _load_encoding("OnigEncodingGB18030") #ONIG_ENCODING_UNDEF = None # # Syntaxes # def _load_syntax(syn): return ctypes.pointer(OnigSyntaxType.in_dll(libonig, syn)) ONIG_SYNTAX_ASIS = _load_syntax("OnigSyntaxASIS") ONIG_SYNTAX_POSIX_BASIC = _load_syntax("OnigSyntaxPosixBasic") ONIG_SYNTAX_POSIX_EXTENDED = _load_syntax("OnigSyntaxPosixExtended") ONIG_SYNTAX_EMACS = _load_syntax("OnigSyntaxEmacs") ONIG_SYNTAX_GREP = _load_syntax("OnigSyntaxGrep") ONIG_SYNTAX_GNU_REGEX = _load_syntax("OnigSyntaxGnuRegex") ONIG_SYNTAX_JAVA = _load_syntax("OnigSyntaxJava") ONIG_SYNTAX_PERL = _load_syntax("OnigSyntaxPerl") ONIG_SYNTAX_PERL58 = _load_syntax("OnigSyntaxPerl58") ONIG_SYNTAX_PERL58_NG = _load_syntax("OnigSyntaxPerl58_NG") ONIG_SYNTAX_RUBY = _load_syntax("OnigSyntaxRuby") ONIG_SYNTAX_PYTHON = _load_syntax("OnigSyntaxPython") ONIG_SYNTAX_DEFAULT = ctypes.POINTER(OnigSyntaxType).in_dll( libonig, "OnigDefaultSyntax") # # Constants # ONIG_MAX_ERROR_MESSAGE_LEN = 90 # options ONIG_OPTION_NONE = 0 ONIG_OPTION_IGNORECASE = 1 ONIG_OPTION_EXTEND = (ONIG_OPTION_IGNORECASE << 1) ONIG_OPTION_MULTILINE = (ONIG_OPTION_EXTEND << 1) ONIG_OPTION_DOTALL = ONIG_OPTION_MULTILINE ONIG_OPTION_SINGLELINE = (ONIG_OPTION_MULTILINE << 1) ONIG_OPTION_FIND_LONGEST = (ONIG_OPTION_SINGLELINE << 1) ONIG_OPTION_FIND_NOT_EMPTY = (ONIG_OPTION_FIND_LONGEST << 1) ONIG_OPTION_NEGATE_SINGLELINE = (ONIG_OPTION_FIND_NOT_EMPTY << 1) ONIG_OPTION_DONT_CAPTURE_GROUP = (ONIG_OPTION_NEGATE_SINGLELINE << 1) ONIG_OPTION_CAPTURE_GROUP = (ONIG_OPTION_DONT_CAPTURE_GROUP << 1) # options (search time) ONIG_OPTION_NOTBOL = (ONIG_OPTION_CAPTURE_GROUP << 1) ONIG_OPTION_NOTEOL = (ONIG_OPTION_NOTBOL << 1) ONIG_OPTION_NOTBOS = (ONIG_OPTION_NOTEOL << 1) ONIG_OPTION_NOTEOS = (ONIG_OPTION_NOTBOS << 1) # options (ctype range) ONIG_OPTION_ASCII_RANGE = (ONIG_OPTION_NOTEOS << 1) ONIG_OPTION_POSIX_BRACKET_ALL_RANGE = (ONIG_OPTION_ASCII_RANGE << 1) ONIG_OPTION_WORD_BOUND_ALL_RANGE = (ONIG_OPTION_POSIX_BRACKET_ALL_RANGE << 1) # options (newline) ONIG_OPTION_NEWLINE_CRLF = (ONIG_OPTION_WORD_BOUND_ALL_RANGE << 1) ONIG_OPTION_DEFAULT = ONIG_OPTION_NONE # syntax (operators) ONIG_SYN_OP_VARIABLE_META_CHARACTERS = (1<<0) ONIG_SYN_OP_DOT_ANYCHAR = (1<<1) ONIG_SYN_OP_ASTERISK_ZERO_INF = (1<<2) ONIG_SYN_OP_ESC_ASTERISK_ZERO_INF = (1<<3) ONIG_SYN_OP_PLUS_ONE_INF = (1<<4) ONIG_SYN_OP_ESC_PLUS_ONE_INF = (1<<5) ONIG_SYN_OP_QMARK_ZERO_ONE = (1<<6) ONIG_SYN_OP_ESC_QMARK_ZERO_ONE = (1<<7) ONIG_SYN_OP_BRACE_INTERVAL = (1<<8) ONIG_SYN_OP_ESC_BRACE_INTERVAL = (1<<9) ONIG_SYN_OP_VBAR_ALT = (1<<10) ONIG_SYN_OP_ESC_VBAR_ALT = (1<<11) ONIG_SYN_OP_LPAREN_SUBEXP = (1<<12) ONIG_SYN_OP_ESC_LPAREN_SUBEXP = (1<<13) ONIG_SYN_OP_ESC_AZ_BUF_ANCHOR = (1<<14) ONIG_SYN_OP_ESC_CAPITAL_G_BEGIN_ANCHOR = (1<<15) ONIG_SYN_OP_DECIMAL_BACKREF = (1<<16) ONIG_SYN_OP_BRACKET_CC = (1<<17) ONIG_SYN_OP_ESC_W_WORD = (1<<18) ONIG_SYN_OP_ESC_LTGT_WORD_BEGIN_END = (1<<19) ONIG_SYN_OP_ESC_B_WORD_BOUND = (1<<20) ONIG_SYN_OP_ESC_S_WHITE_SPACE = (1<<21) ONIG_SYN_OP_ESC_D_DIGIT = (1<<22) ONIG_SYN_OP_LINE_ANCHOR = (1<<23) ONIG_SYN_OP_POSIX_BRACKET = (1<<24) ONIG_SYN_OP_QMARK_NON_GREEDY = (1<<25) ONIG_SYN_OP_ESC_CONTROL_CHARS = (1<<26) ONIG_SYN_OP_ESC_C_CONTROL = (1<<27) ONIG_SYN_OP_ESC_OCTAL3 = (1<<28) ONIG_SYN_OP_ESC_X_HEX2 = (1<<29) ONIG_SYN_OP_ESC_X_BRACE_HEX8 = (1<<30) ONIG_SYN_OP_ESC_O_BRACE_OCTAL = (1<<31) ONIG_SYN_OP2_ESC_CAPITAL_Q_QUOTE = (1<<0) ONIG_SYN_OP2_QMARK_GROUP_EFFECT = (1<<1) ONIG_SYN_OP2_OPTION_PERL = (1<<2) ONIG_SYN_OP2_OPTION_RUBY = (1<<3) ONIG_SYN_OP2_PLUS_POSSESSIVE_REPEAT = (1<<4) ONIG_SYN_OP2_PLUS_POSSESSIVE_INTERVAL = (1<<5) ONIG_SYN_OP2_CCLASS_SET_OP = (1<<6) ONIG_SYN_OP2_QMARK_LT_NAMED_GROUP = (1<<7) ONIG_SYN_OP2_ESC_K_NAMED_BACKREF = (1<<8) ONIG_SYN_OP2_ESC_G_SUBEXP_CALL = (1<<9) ONIG_SYN_OP2_ATMARK_CAPTURE_HISTORY = (1<<10) ONIG_SYN_OP2_ESC_CAPITAL_C_BAR_CONTROL = (1<<11) ONIG_SYN_OP2_ESC_CAPITAL_M_BAR_META = (1<<12) ONIG_SYN_OP2_ESC_V_VTAB = (1<<13) ONIG_SYN_OP2_ESC_U_HEX4 = (1<<14) ONIG_SYN_OP2_ESC_GNU_BUF_ANCHOR = (1<<15) ONIG_SYN_OP2_ESC_P_BRACE_CHAR_PROPERTY = (1<<16) ONIG_SYN_OP2_ESC_P_BRACE_CIRCUMFLEX_NOT = (1<<17) #ONIG_SYN_OP2_CHAR_PROPERTY_PREFIX_IS = (1<<18) ONIG_SYN_OP2_ESC_H_XDIGIT = (1<<19) ONIG_SYN_OP2_INEFFECTIVE_ESCAPE = (1<<20) ONIG_SYN_OP2_ESC_CAPITAL_R_LINEBREAK = (1<<21) ONIG_SYN_OP2_ESC_CAPITAL_X_EXTENDED_GRAPHEME_CLUSTER = (1<<22) ONIG_SYN_OP2_ESC_V_VERTICAL_WHITESPACE = (1<<23) ONIG_SYN_OP2_ESC_H_HORIZONTAL_WHITESPACE = (1<<24) ONIG_SYN_OP2_ESC_CAPITAL_K_KEEP = (1<<25) ONIG_SYN_OP2_ESC_G_BRACE_BACKREF = (1<<26) ONIG_SYN_OP2_QMARK_SUBEXP_CALL = (1<<27) ONIG_SYN_OP2_QMARK_VBAR_BRANCH_RESET = (1<<28) ONIG_SYN_OP2_QMARK_LPAREN_CONDITION = (1<<29) ONIG_SYN_OP2_QMARK_CAPITAL_P_NAMED_GROUP = (1<<30) ONIG_SYN_OP2_OPTION_JAVA = (1<<31) # syntax (behavior) ONIG_SYN_CONTEXT_INDEP_ANCHORS = (1<<31) ONIG_SYN_CONTEXT_INDEP_REPEAT_OPS = (1<<0) ONIG_SYN_CONTEXT_INVALID_REPEAT_OPS = (1<<1) ONIG_SYN_ALLOW_UNMATCHED_CLOSE_SUBEXP = (1<<2) ONIG_SYN_ALLOW_INVALID_INTERVAL = (1<<3) ONIG_SYN_ALLOW_INTERVAL_LOW_ABBREV = (1<<4) ONIG_SYN_STRICT_CHECK_BACKREF = (1<<5) ONIG_SYN_DIFFERENT_LEN_ALT_LOOK_BEHIND = (1<<6) ONIG_SYN_CAPTURE_ONLY_NAMED_GROUP = (1<<7) ONIG_SYN_ALLOW_MULTIPLEX_DEFINITION_NAME = (1<<8) ONIG_SYN_FIXED_INTERVAL_IS_GREEDY_ONLY = (1<<9) ONIG_SYN_ALLOW_MULTIPLEX_DEFINITION_NAME_CALL = (1<<10) ONIG_SYN_USE_LEFT_MOST_NAMED_GROUP = (1<<11) # (behavior) in char class [...] ONIG_SYN_NOT_NEWLINE_IN_NEGATIVE_CC = (1<<20) ONIG_SYN_BACKSLASH_ESCAPE_IN_CC = (1<<21) ONIG_SYN_ALLOW_EMPTY_RANGE_IN_CC = (1<<22) ONIG_SYN_ALLOW_DOUBLE_RANGE_OP_IN_CC = (1<<23) # syntax (behavior) warning ONIG_SYN_WARN_CC_OP_NOT_ESCAPED = (1<<24) ONIG_SYN_WARN_REDUNDANT_NESTED_REPEAT = (1<<25) ONIG_SYN_WARN_CC_DUP = (1<<26) # meta character specifiers (onig_set_meta_char()) ONIG_META_CHAR_ESCAPE = 0 ONIG_META_CHAR_ANYCHAR = 1 ONIG_META_CHAR_ANYTIME = 2 ONIG_META_CHAR_ZERO_OR_ONE_TIME = 3 ONIG_META_CHAR_ONE_OR_MORE_TIME = 4 ONIG_META_CHAR_ANYCHAR_ANYTIME = 5 ONIG_INEFFECTIVE_META_CHAR = 0 # error codes def ONIG_IS_PATTERN_ERROR(ecode): return ((ecode) <= -100 and (ecode) > -1000) # normal return ONIG_NORMAL = 0 ONIG_MISMATCH = -1 ONIG_NO_SUPPORT_CONFIG = -2 # internal error ONIGERR_MEMORY = -5 ONIGERR_TYPE_BUG = -6 ONIGERR_PARSER_BUG = -11 ONIGERR_STACK_BUG = -12 ONIGERR_UNDEFINED_BYTECODE = -13 ONIGERR_UNEXPECTED_BYTECODE = -14 ONIGERR_MATCH_STACK_LIMIT_OVER = -15 ONIGERR_PARSE_DEPTH_LIMIT_OVER = -16 ONIGERR_DEFAULT_ENCODING_IS_NOT_SET = -21 ONIGERR_SPECIFIED_ENCODING_CANT_CONVERT_TO_WIDE_CHAR = -22 # general error ONIGERR_INVALID_ARGUMENT = -30 # syntax error ONIGERR_END_PATTERN_AT_LEFT_BRACE = -100 ONIGERR_END_PATTERN_AT_LEFT_BRACKET = -101 ONIGERR_EMPTY_CHAR_CLASS = -102 ONIGERR_PREMATURE_END_OF_CHAR_CLASS = -103 ONIGERR_END_PATTERN_AT_ESCAPE = -104 ONIGERR_END_PATTERN_AT_META = -105 ONIGERR_END_PATTERN_AT_CONTROL = -106 ONIGERR_META_CODE_SYNTAX = -108 ONIGERR_CONTROL_CODE_SYNTAX = -109 ONIGERR_CHAR_CLASS_VALUE_AT_END_OF_RANGE = -110 ONIGERR_CHAR_CLASS_VALUE_AT_START_OF_RANGE = -111 ONIGERR_UNMATCHED_RANGE_SPECIFIER_IN_CHAR_CLASS = -112 ONIGERR_TARGET_OF_REPEAT_OPERATOR_NOT_SPECIFIED = -113 ONIGERR_TARGET_OF_REPEAT_OPERATOR_INVALID = -114 ONIGERR_NESTED_REPEAT_OPERATOR = -115 ONIGERR_UNMATCHED_CLOSE_PARENTHESIS = -116 ONIGERR_END_PATTERN_WITH_UNMATCHED_PARENTHESIS = -117 ONIGERR_END_PATTERN_IN_GROUP = -118 ONIGERR_UNDEFINED_GROUP_OPTION = -119 ONIGERR_INVALID_POSIX_BRACKET_TYPE = -121 ONIGERR_INVALID_LOOK_BEHIND_PATTERN = -122 ONIGERR_INVALID_REPEAT_RANGE_PATTERN = -123 ONIGERR_INVALID_CONDITION_PATTERN = -124 # values error (syntax error) ONIGERR_TOO_BIG_NUMBER = -200 ONIGERR_TOO_BIG_NUMBER_FOR_REPEAT_RANGE = -201 ONIGERR_UPPER_SMALLER_THAN_LOWER_IN_REPEAT_RANGE = -202 ONIGERR_EMPTY_RANGE_IN_CHAR_CLASS = -203 ONIGERR_MISMATCH_CODE_LENGTH_IN_CLASS_RANGE = -204 ONIGERR_TOO_MANY_MULTI_BYTE_RANGES = -205 ONIGERR_TOO_SHORT_MULTI_BYTE_STRING = -206 ONIGERR_TOO_BIG_BACKREF_NUMBER = -207 ONIGERR_INVALID_BACKREF = -208 ONIGERR_NUMBERED_BACKREF_OR_CALL_NOT_ALLOWED = -209 ONIGERR_TOO_MANY_CAPTURE_GROUPS = -210 ONIGERR_TOO_SHORT_DIGITS = -211 ONIGERR_TOO_LONG_WIDE_CHAR_VALUE = -212 ONIGERR_EMPTY_GROUP_NAME = -214 ONIGERR_INVALID_GROUP_NAME = -215 ONIGERR_INVALID_CHAR_IN_GROUP_NAME = -216 ONIGERR_UNDEFINED_NAME_REFERENCE = -217 ONIGERR_UNDEFINED_GROUP_REFERENCE = -218 ONIGERR_MULTIPLEX_DEFINED_NAME = -219 ONIGERR_MULTIPLEX_DEFINITION_NAME_CALL = -220 ONIGERR_NEVER_ENDING_RECURSION = -221 ONIGERR_GROUP_NUMBER_OVER_FOR_CAPTURE_HISTORY = -222 ONIGERR_INVALID_CHAR_PROPERTY_NAME = -223 ONIGERR_INVALID_CODE_POINT_VALUE = -400 ONIGERR_INVALID_WIDE_CHAR_VALUE = -400 ONIGERR_TOO_BIG_WIDE_CHAR_VALUE = -401 ONIGERR_NOT_SUPPORTED_ENCODING_COMBINATION = -402 ONIGERR_INVALID_COMBINATION_OF_OPTIONS = -403 # errors related to thread #ONIGERR_OVER_THREAD_PASS_LIMIT_COUNT = -1001 OnigWarnFunc = ctypes.CFUNCTYPE(None, ctypes.c_char_p) # # Onigmo APIs # # onig_init onig_init = libonig.onig_init # onig_error_code_to_str libonig.onig_error_code_to_str.argtypes = [ctypes.c_char_p, _c_ssize_t, ctypes.POINTER(OnigErrorInfo)] def onig_error_code_to_str(err_buf, err_code, err_info=None): return libonig.onig_error_code_to_str(err_buf, err_code, err_info) # onig_set_warn_func libonig.onig_set_warn_func.argtypes = [OnigWarnFunc] onig_set_warn_func = libonig.onig_set_warn_func # onig_set_verb_warn_func libonig.onig_set_verb_warn_func.argtypes = [OnigWarnFunc] onig_set_verb_warn_func = libonig.onig_set_verb_warn_func # onig_new libonig.onig_new.argtypes = [ctypes.POINTER(OnigRegex), ctypes.c_void_p, ctypes.c_void_p, OnigOptionType, OnigEncoding, ctypes.POINTER(OnigSyntaxType), ctypes.POINTER(OnigErrorInfo)] onig_new = libonig.onig_new # onig_reg_init # onig_new_without_alloc # onig_new_deluxe # onig_free libonig.onig_free.argtypes = [OnigRegex] onig_free = libonig.onig_free # onig_free_body # onig_search libonig.onig_search.argtypes = [OnigRegex, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER(OnigRegion), OnigOptionType] libonig.onig_search.restype = _c_ssize_t onig_search = libonig.onig_search # onig_search_gpos libonig.onig_search_gpos.argtypes = [OnigRegex, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER(OnigRegion), OnigOptionType] libonig.onig_search_gpos.restype = _c_ssize_t onig_search_gpos = libonig.onig_search_gpos # onig_match libonig.onig_match.argtypes = [OnigRegex, ctypes.c_void_p, ctypes.c_void_p, ctypes.c_void_p, ctypes.POINTER(OnigRegion), OnigOptionType] libonig.onig_match.restype = _c_ssize_t onig_match = libonig.onig_match # onig_region_new libonig.onig_region_new.argtypes = [] libonig.onig_region_new.restype = ctypes.POINTER(OnigRegion) onig_region_new = libonig.onig_region_new # onig_region_init # onig_region_free libonig.onig_region_free.argtypes = [ctypes.POINTER(OnigRegion), ctypes.c_int] onig_region_free = libonig.onig_region_free # onig_region_copy # onig_region_clear # onig_region_resize # onig_region_set # onig_name_to_group_numbers # onig_name_to_backref_number # onig_foreach_name # onig_number_of_names # onig_number_of_captures # onig_number_of_capture_histories # onig_get_capture_tree # onig_capture_tree_traverse # onig_noname_group_capture_is_active # onig_get_encoding # onig_get_options # onig_get_case_fold_flag # onig_get_syntax # onig_set_default_syntax libonig.onig_set_default_syntax.argtypes = [ctypes.POINTER(OnigSyntaxType)] libonig.onig_set_default_syntax.restype = ctypes.c_int onig_set_default_syntax = libonig.onig_set_default_syntax # onig_copy_syntax libonig.onig_copy_syntax.argtypes = [ctypes.POINTER(OnigSyntaxType), ctypes.POINTER(OnigSyntaxType)] onig_copy_syntax = libonig.onig_copy_syntax # onig_get_syntax_op libonig.onig_get_syntax_op.argtypes = [ctypes.POINTER(OnigSyntaxType)] libonig.onig_get_syntax_op.restype = ctypes.c_int onig_get_syntax_op = libonig.onig_get_syntax_op # onig_get_syntax_op2 libonig.onig_get_syntax_op2.argtypes = [ctypes.POINTER(OnigSyntaxType)] libonig.onig_get_syntax_op2.restype = ctypes.c_int onig_get_syntax_op2 = libonig.onig_get_syntax_op2 # onig_get_syntax_behavior libonig.onig_get_syntax_behavior.argtypes = [ctypes.POINTER(OnigSyntaxType)] libonig.onig_get_syntax_behavior.restype = ctypes.c_int onig_get_syntax_behavior = libonig.onig_get_syntax_behavior # onig_get_syntax_options libonig.onig_get_syntax_options.argtypes = [ctypes.POINTER(OnigSyntaxType)] libonig.onig_get_syntax_options.restype = ctypes.c_int onig_get_syntax_options = libonig.onig_get_syntax_options # onig_set_syntax_op libonig.onig_set_syntax_op.argtypes = [ctypes.POINTER(OnigSyntaxType), ctypes.c_int] onig_set_syntax_op = libonig.onig_set_syntax_op # onig_set_syntax_op2 libonig.onig_set_syntax_op2.argtypes = [ctypes.POINTER(OnigSyntaxType), ctypes.c_int] onig_set_syntax_op2 = libonig.onig_set_syntax_op2 # onig_set_syntax_behavior libonig.onig_set_syntax_behavior.argtypes = [ctypes.POINTER(OnigSyntaxType), ctypes.c_int] onig_set_syntax_behavior = libonig.onig_set_syntax_behavior # onig_set_syntax_options libonig.onig_set_syntax_options.argtypes = [ctypes.POINTER(OnigSyntaxType), ctypes.c_int] onig_set_syntax_options = libonig.onig_set_syntax_options # onig_set_meta_char # onig_copy_encoding # onig_get_default_case_fold_flag # onig_set_default_case_fold_flag # onig_get_match_stack_limit_size libonig.onig_get_match_stack_limit_size.argtypes = [] libonig.onig_get_match_stack_limit_size.restype = ctypes.c_int onig_get_match_stack_limit_size = libonig.onig_get_match_stack_limit_size # onig_set_match_stack_limit_size libonig.onig_set_match_stack_limit_size.argtypes = [ctypes.c_int] libonig.onig_set_match_stack_limit_size.restype = ctypes.c_int onig_set_match_stack_limit_size = libonig.onig_set_match_stack_limit_size # onig_get_parse_depth_limit libonig.onig_get_parse_depth_limit.argtypes = [] libonig.onig_get_parse_depth_limit.restype = ctypes.c_int onig_get_parse_depth_limit = libonig.onig_get_parse_depth_limit # onig_set_parse_depth_limit libonig.onig_set_parse_depth_limit.argtypes = [ctypes.c_int] libonig.onig_set_parse_depth_limit.restype = ctypes.c_int onig_set_parse_depth_limit = libonig.onig_set_parse_depth_limit # onig_end libonig.onig_end.argtypes = [] onig_end = libonig.onig_end # onig_version libonig.onig_version.argtypes = [] libonig.onig_version.restype = ctypes.c_char_p def onig_version(): return libonig.onig_version().decode() # onig_copyright libonig.onig_copyright.argtypes = [] libonig.onig_copyright.restype = ctypes.c_char_p def onig_copyright(): return libonig.onig_copyright().decode()
mit
soarpenguin/ansible
lib/ansible/plugins/terminal/vyos.py
191
1700
# # (c) 2016 Red Hat Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re from ansible.plugins.terminal import TerminalBase from ansible.errors import AnsibleConnectionFailure class TerminalModule(TerminalBase): terminal_stdout_re = [ re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"), re.compile(br"\@[\w\-\.]+:\S+?[>#\$] ?$") ] terminal_stderr_re = [ re.compile(br"\n\s*Invalid command:"), re.compile(br"\nCommit failed"), re.compile(br"\n\s+Set failed"), ] terminal_length = os.getenv('ANSIBLE_VYOS_TERMINAL_LENGTH', 10000) def on_open_shell(self): try: for cmd in (b'set terminal length 0', b'set terminal width 512'): self._exec_cli_command(cmd) self._exec_cli_command(b'set terminal length %d' % self.terminal_length) except AnsibleConnectionFailure: raise AnsibleConnectionFailure('unable to set terminal parameters')
gpl-3.0
blaffoy/jenkins-job-builder
jenkins_jobs/modules/parameters.py
12
24380
# Copyright 2012 Hewlett-Packard Development Company, L.P. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """ The Parameters module allows you to specify build parameters for a job. **Component**: parameters :Macro: parameter :Entry Point: jenkins_jobs.parameters Example:: job: name: test_job parameters: - string: name: FOO default: bar description: "A parameter named FOO, defaults to 'bar'." """ import xml.etree.ElementTree as XML import jenkins_jobs.modules.base from jenkins_jobs.errors import JenkinsJobsException def base_param(parser, xml_parent, data, do_default, ptype): pdef = XML.SubElement(xml_parent, ptype) XML.SubElement(pdef, 'name').text = data['name'] XML.SubElement(pdef, 'description').text = data.get('description', '') if do_default: default = data.get('default', None) if default: XML.SubElement(pdef, 'defaultValue').text = default else: XML.SubElement(pdef, 'defaultValue') return pdef def string_param(parser, xml_parent, data): """yaml: string A string parameter. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) Example:: parameters: - string: name: FOO default: bar description: "A parameter named FOO, defaults to 'bar'." """ base_param(parser, xml_parent, data, True, 'hudson.model.StringParameterDefinition') def password_param(parser, xml_parent, data): """yaml: password A password parameter. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) Example:: parameters: - password: name: FOO default: 1HSC0Ts6E161FysGf+e1xasgsHkgleLh09JUTYnipPvw= description: "A parameter named FOO." """ base_param(parser, xml_parent, data, True, 'hudson.model.PasswordParameterDefinition') def bool_param(parser, xml_parent, data): """yaml: bool A boolean parameter. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) Example:: parameters: - bool: name: FOO default: false description: "A parameter named FOO, defaults to 'false'." """ data['default'] = str(data.get('default', False)).lower() base_param(parser, xml_parent, data, True, 'hudson.model.BooleanParameterDefinition') def file_param(parser, xml_parent, data): """yaml: file A file parameter. :arg str name: the target location for the file upload :arg str description: a description of the parameter (optional) Example:: parameters: - file: name: test.txt description: "Upload test.txt." """ base_param(parser, xml_parent, data, False, 'hudson.model.FileParameterDefinition') def text_param(parser, xml_parent, data): """yaml: text A text parameter. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) Example:: parameters: - text: name: FOO default: bar description: "A parameter named FOO, defaults to 'bar'." """ base_param(parser, xml_parent, data, True, 'hudson.model.TextParameterDefinition') def label_param(parser, xml_parent, data): """yaml: label A node label parameter. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) Example:: parameters: - label: name: node default: precise description: "The node on which to run the job" """ base_param(parser, xml_parent, data, True, 'org.jvnet.jenkins.plugins.nodelabelparameter.' 'LabelParameterDefinition') def node_param(parser, xml_parent, data): """yaml: node Defines a list of nodes where this job could potentially be executed on. Restrict where this project can be run, If your using a node or label parameter to run your job on a particular node, you should not use the option "Restrict where this project can be run" in the job configuration - it will not have any effect to the selection of your node anymore! :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg list default-nodes: The nodes used when job gets triggered by anything else other than manually :arg list allowed-slaves: The nodes available for selection when job gets triggered manually. Empty means 'All'. :arg bool ignore-offline-nodes: Ignore nodes not online or not having executors (default false) :arg bool allowed-multiselect: Allow multi node selection for concurrent builds - this option only makes sense (and must be selected!) in case the job is configured with: "Execute concurrent builds if necessary". With this configuration the build will be executed on all the selected nodes in parallel. (default false) Example: .. literalinclude:: /../../tests/parameters/fixtures/node-param001.yaml :language: yaml """ pdef = base_param(parser, xml_parent, data, False, 'org.jvnet.jenkins.plugins.nodelabelparameter.' 'NodeParameterDefinition') default = XML.SubElement(pdef, 'defaultSlaves') if 'default-slaves' in data: for slave in data['default-slaves']: XML.SubElement(default, 'string').text = slave allowed = XML.SubElement(pdef, 'allowedSlaves') if 'allowed-slaves' in data: for slave in data['allowed-slaves']: XML.SubElement(allowed, 'string').text = slave XML.SubElement(pdef, 'ignoreOfflineNodes').text = str( data.get('ignore-offline-nodes', False)).lower() if data.get('allowed-multiselect', False): XML.SubElement(pdef, 'triggerIfResult').text = \ 'allowMultiSelectionForConcurrentBuilds' else: XML.SubElement(pdef, 'triggerIfResult').text = \ 'multiSelectionDisallowed' XML.SubElement(pdef, 'allowMultiNodeSelection').text = str( data.get('allowed-multiselect', False)).lower() XML.SubElement(pdef, 'triggerConcurrentBuilds').text = str( data.get('allowed-multiselect', False)).lower() def choice_param(parser, xml_parent, data): """yaml: choice A single selection parameter. :arg str name: the name of the parameter :arg list choices: the available choices :arg str description: a description of the parameter (optional) Example:: parameters: - choice: name: project choices: - nova - glance description: "On which project to run?" """ pdef = base_param(parser, xml_parent, data, False, 'hudson.model.ChoiceParameterDefinition') choices = XML.SubElement(pdef, 'choices', {'class': 'java.util.Arrays$ArrayList'}) a = XML.SubElement(choices, 'a', {'class': 'string-array'}) for choice in data['choices']: XML.SubElement(a, 'string').text = choice def run_param(parser, xml_parent, data): """yaml: run A run parameter. :arg str name: the name of the parameter :arg str project-name: the name of job from which the user can pick runs :arg str description: a description of the parameter (optional) Example: .. literalinclude:: /../../tests/parameters/fixtures/run-param001.yaml :language: yaml """ pdef = base_param(parser, xml_parent, data, False, 'hudson.model.RunParameterDefinition') XML.SubElement(pdef, 'projectName').text = data['project-name'] def extended_choice_param(parser, xml_parent, data): """yaml: extended-choice Creates an extended choice parameter where values can be read from a file Requires the Jenkins :jenkins-wiki:`Extended Choice Parameter Plugin <Extended+Choice+Parameter+plugin>`. :arg str name: name of the parameter :arg str description: description of the parameter (optional, default '') :arg str property-file: location of property file to read from (optional, default '') :arg str property-key: key for the property-file (optional, default '') :arg bool quote-value: whether to put quotes around the property when passing to Jenkins (optional, default false) :arg str visible-items: number of items to show in the list (optional, default 5) :arg str type: type of select, can be single-select, multi-select, radio, checkbox or textbox (optional, default single-select) :arg str value: comma separated list of values for the single select or multi-select box (optional, default '') :arg str default-value: used to set the initial selection of the single-select or multi-select box (optional, default '') :arg str default-property-file: location of property file when default value needs to come from a property file (optional, default '') :arg str default-property-key: key for the default property file (optional, default '') :arg str multi-select-delimiter: value between selections when the parameter is a multi-select (optiona, default ',') Example: .. literalinclude:: \ /../../tests/parameters/fixtures/extended-choice-param001.yaml :language: yaml """ pdef = base_param(parser, xml_parent, data, False, 'com.cwctravel.hudson.plugins.' 'extended__choice__parameter.' 'ExtendedChoiceParameterDefinition') XML.SubElement(pdef, 'value').text = data.get('value', '') XML.SubElement(pdef, 'visibleItemCount').text = str(data.get( 'visible-items', data.get('visible-item-count', 5))) XML.SubElement(pdef, 'multiSelectDelimiter').text = data.get( 'multi-select-delimiter', ',') XML.SubElement(pdef, 'quoteValue').text = str(data.get('quote-value', False)).lower() XML.SubElement(pdef, 'defaultValue').text = data.get( 'default-value', '') choice = data.get('type', 'single-select') choicedict = {'single-select': 'PT_SINGLE_SELECT', 'multi-select': 'PT_MULTI_SELECT', 'radio': 'PT_RADIO', 'checkbox': 'PT_CHECKBOX', 'textbox': 'PT_TEXTBOX', 'PT_SINGLE_SELECT': 'PT_SINGLE_SELECT', 'PT_MULTI_SELECT': 'PT_MULTI_SELECT', 'PT_RADIO': 'PT_RADIO', 'PT_CHECKBOX': 'PT_CHECKBOX', 'PT_TEXTBOX': 'PT_TEXTBOX'} if choice in choicedict: XML.SubElement(pdef, 'type').text = choicedict[choice] else: raise JenkinsJobsException("Type entered is not valid, must be one " "of: single-select, multi-select, radio, " "textbox or checkbox") XML.SubElement(pdef, 'propertyFile').text = data.get('property-file', '') XML.SubElement(pdef, 'propertyKey').text = data.get('property-key', '') XML.SubElement(pdef, 'defaultPropertyFile').text = data.get( 'default-property-file', '') XML.SubElement(pdef, 'defaultPropertyKey').text = data.get( 'default-property-key', '') def validating_string_param(parser, xml_parent, data): """yaml: validating-string A validating string parameter Requires the Jenkins :jenkins-wiki:`Validating String Plugin <Validating+String+Parameter+Plugin>`. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) :arg str regex: a regular expression to validate the string :arg str msg: a message to display upon failed validation Example:: parameters: - validating-string: name: FOO default: bar description: "A parameter named FOO, defaults to 'bar'." regex: [A-Za-z]* msg: Your entered value failed validation """ pdef = base_param(parser, xml_parent, data, True, 'hudson.plugins.validating__string__parameter.' 'ValidatingStringParameterDefinition') XML.SubElement(pdef, 'regex').text = data['regex'] XML.SubElement(pdef, 'failedValidationMessage').text = data['msg'] def svn_tags_param(parser, xml_parent, data): """yaml: svn-tags A svn tag parameter Requires the Jenkins :jenkins-wiki:`Parameterized Trigger Plugin <Parameterized+Trigger+Plugin>`. :arg str name: the name of the parameter :arg str default: the default value of the parameter (optional) :arg str description: a description of the parameter (optional) :arg str url: the url to list tags from :arg str filter: the regular expression to filter tags Example:: parameters: - svn-tags: name: BRANCH_NAME default: release description: A parameter named BRANCH_NAME default is release url: http://svn.example.com/repo filter: [A-za-z0-9]* """ pdef = base_param(parser, xml_parent, data, True, 'hudson.scm.listtagsparameter.' 'ListSubversionTagsParameterDefinition') XML.SubElement(pdef, 'tagsDir').text = data['url'] XML.SubElement(pdef, 'tagsFilter').text = data.get('filter', None) XML.SubElement(pdef, 'reverseByDate').text = "true" XML.SubElement(pdef, 'reverseByName').text = "false" XML.SubElement(pdef, 'maxTags').text = "100" XML.SubElement(pdef, 'uuid').text = "1-1-1-1-1" def dynamic_choice_param(parser, xml_parent, data): """yaml: dynamic-choice Dynamic Choice Parameter Requires the Jenkins :jenkins-wiki:`Jenkins Dynamic Parameter Plug-in <Dynamic+Parameter+Plug-in>`. :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg str script: Groovy expression which generates the potential choices. :arg bool remote: the script will be executed on the slave where the build is started (default false) :arg str classpath: class path for script (optional) :arg bool read-only: user can't modify parameter once populated (default false) Example:: parameters: - dynamic-choice: name: OPTIONS description: "Available options" script: "['optionA', 'optionB']" remote: false read-only: false """ dynamic_param_common(parser, xml_parent, data, 'ChoiceParameterDefinition') def dynamic_string_param(parser, xml_parent, data): """yaml: dynamic-string Dynamic Parameter Requires the Jenkins :jenkins-wiki:`Jenkins Dynamic Parameter Plug-in <Dynamic+Parameter+Plug-in>`. :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg str script: Groovy expression which generates the potential choices :arg bool remote: the script will be executed on the slave where the build is started (default false) :arg str classpath: class path for script (optional) :arg bool read-only: user can't modify parameter once populated (default false) Example:: parameters: - dynamic-string: name: FOO description: "A parameter named FOO, defaults to 'bar'." script: "bar" remote: false read-only: false """ dynamic_param_common(parser, xml_parent, data, 'StringParameterDefinition') def dynamic_choice_scriptler_param(parser, xml_parent, data): """yaml: dynamic-choice-scriptler Dynamic Choice Parameter (Scriptler) Requires the Jenkins :jenkins-wiki:`Jenkins Dynamic Parameter Plug-in <Dynamic+Parameter+Plug-in>`. :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg str script-id: Groovy script which generates the default value :arg list parameters: parameters to corresponding script :Parameter: * **name** (`str`) Parameter name * **value** (`str`) Parameter value :arg bool remote: the script will be executed on the slave where the build is started (default false) :arg bool read-only: user can't modify parameter once populated (default false) Example:: parameters: - dynamic-choice-scriptler: name: OPTIONS description: "Available options" script-id: "scriptid.groovy" parameters: - name: param1 value: value1 - name: param2 value: value2 remote: false read-only: false """ dynamic_scriptler_param_common(parser, xml_parent, data, 'ScriptlerChoiceParameterDefinition') def dynamic_string_scriptler_param(parser, xml_parent, data): """yaml: dynamic-string-scriptler Dynamic Parameter (Scriptler) Requires the Jenkins :jenkins-wiki:`Jenkins Dynamic Parameter Plug-in <Dynamic+Parameter+Plug-in>`. :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg str script-id: Groovy script which generates the default value :arg list parameters: parameters to corresponding script :Parameter: * **name** (`str`) Parameter name * **value** (`str`) Parameter value :arg bool remote: the script will be executed on the slave where the build is started (default false) :arg bool read-only: user can't modify parameter once populated (default false) Example:: parameters: - dynamic-string-scriptler: name: FOO description: "A parameter named FOO, defaults to 'bar'." script-id: "scriptid.groovy" parameters: - name: param1 value: value1 - name: param2 value: value2 remote: false read-only: false """ dynamic_scriptler_param_common(parser, xml_parent, data, 'ScriptlerStringParameterDefinition') def dynamic_param_common(parser, xml_parent, data, ptype): pdef = base_param(parser, xml_parent, data, False, 'com.seitenbau.jenkins.plugins.dynamicparameter.' + ptype) XML.SubElement(pdef, '__remote').text = str( data.get('remote', False)).lower() XML.SubElement(pdef, '__script').text = data.get('script', None) localBaseDir = XML.SubElement(pdef, '__localBaseDirectory', {'serialization': 'custom'}) filePath = XML.SubElement(localBaseDir, 'hudson.FilePath') default = XML.SubElement(filePath, 'default') XML.SubElement(filePath, 'boolean').text = "true" XML.SubElement(default, 'remote').text = \ "/var/lib/jenkins/dynamic_parameter/classpath" XML.SubElement(pdef, '__remoteBaseDirectory').text = \ "dynamic_parameter_classpath" XML.SubElement(pdef, '__classPath').text = data.get('classpath', None) XML.SubElement(pdef, 'readonlyInputField').text = str( data.get('read-only', False)).lower() def dynamic_scriptler_param_common(parser, xml_parent, data, ptype): pdef = base_param(parser, xml_parent, data, False, 'com.seitenbau.jenkins.plugins.dynamicparameter.' 'scriptler.' + ptype) XML.SubElement(pdef, '__remote').text = str( data.get('remote', False)).lower() XML.SubElement(pdef, '__scriptlerScriptId').text = data.get( 'script-id', None) parametersXML = XML.SubElement(pdef, '__parameters') parameters = data.get('parameters', []) if parameters: for parameter in parameters: parameterXML = XML.SubElement(parametersXML, 'com.seitenbau.jenkins.plugins.' 'dynamicparameter.scriptler.' 'ScriptlerParameterDefinition_' '-ScriptParameter') XML.SubElement(parameterXML, 'name').text = parameter['name'] XML.SubElement(parameterXML, 'value').text = parameter['value'] XML.SubElement(pdef, 'readonlyInputField').text = str(data.get( 'read-only', False)).lower() def matrix_combinations_param(parser, xml_parent, data): """yaml: matrix-combinations Matrix combinations parameter Requires the Jenkins :jenkins-wiki:`Matrix Combinations Plugin <Matrix+Combinations+Plugin>`. :arg str name: the name of the parameter :arg str description: a description of the parameter (optional) :arg str filter: Groovy expression to use filter the combination by default (optional) Example: .. literalinclude:: \ /../../tests/parameters/fixtures/matrix-combinations-param001.yaml :language: yaml """ element_name = 'hudson.plugins.matrix__configuration__parameter.' \ 'MatrixCombinationsParameterDefinition' pdef = XML.SubElement(xml_parent, element_name) if 'name' not in data: raise JenkinsJobsException('matrix-combinations must have a name ' 'parameter.') XML.SubElement(pdef, 'name').text = data['name'] XML.SubElement(pdef, 'description').text = data.get('description', '') combination_filter = data.get('filter') if combination_filter: XML.SubElement(pdef, 'defaultCombinationFilter').text = \ combination_filter return pdef class Parameters(jenkins_jobs.modules.base.Base): sequence = 21 component_type = 'parameter' component_list_type = 'parameters' def gen_xml(self, parser, xml_parent, data): properties = xml_parent.find('properties') if properties is None: properties = XML.SubElement(xml_parent, 'properties') parameters = data.get('parameters', []) hmodel = 'hudson.model.' if parameters: # The conditionals here are to work around the extended_choice # parameter also being definable in the properties module. This # usage has been deprecated but not removed. Because it may have # added these elements before us, we need to check if they already # exist, and only add them if they're missing. pdefp = properties.find(hmodel + 'ParametersDefinitionProperty') if pdefp is None: pdefp = XML.SubElement(properties, hmodel + 'ParametersDefinitionProperty') pdefs = pdefp.find('parameterDefinitions') if pdefs is None: pdefs = XML.SubElement(pdefp, 'parameterDefinitions') for param in parameters: self.registry.dispatch('parameter', parser, pdefs, param)
apache-2.0
pikhovkin/instructor
instructor/model.py
1
2702
from collections import OrderedDict from .errors import InvalidData, InvalidDataSize, InvalidModelDeclaration from .fields import BaseFieldInstructor, DefaultByteOrder __all__ = ( 'InstructorModel', ) class Opts(object): pass class MetaInstructor(type): def __new__(cls, name, bases, attrs): declared_fields = [(key, value) for key, value in attrs.iteritems() if isinstance(value, BaseFieldInstructor)] _fields = OrderedDict(sorted(declared_fields, key=lambda x: x[1]._order_counter)) if _fields and not isinstance(_fields.values()[0], DefaultByteOrder): raise InvalidModelDeclaration('First field of a class must be subclass of DefaultByteOrder') for field_name, field in _fields.iteritems(): field.name = field_name attrs.pop(field_name) new_cls = type.__new__(cls, name, bases, attrs) new_cls._meta = Opts() new_cls._meta.fields = _fields for field_name, field in _fields.iteritems(): setattr(new_cls._meta, field_name, field) return new_cls class InstructorModel(object): __metaclass__ = MetaInstructor def __init__(self, *args, **kwargs): if args: data = args[0] offset = 0 byte_order = self._meta.fields.values()[0] try: for i, field in enumerate(self._meta.fields.itervalues()): if i == 0: continue value, size = field._unpack(self, byte_order, data, offset=offset) offset += size setattr(self, field.name, value) except Exception as e: if e.args[0] == 'total struct size too long': raise InvalidDataSize(e.args[0]) elif e.args[0].startswith('unpack_from requires a buffer of at least'): raise InvalidDataSize(e.args[0]) raise e elif kwargs: for i, field in enumerate(self._meta.fields.itervalues()): if i == 0: continue value = kwargs.get(field.name, field.get_default()) setattr(self, field.name, value) else: raise InvalidData @classmethod def unpack(cls, data): return cls(data) def pack(self): fmt = '' data = '' byte_order = self._meta.fields.values()[0] for i, field in enumerate(self._meta.fields.itervalues()): if i == 0: continue _fmt, _data = field._pack(self, byte_order) fmt += _fmt data += _data return data
mit
asadziach/tensorflow
tensorflow/python/ops/histogram_ops_test.py
71
2968
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for tensorflow.ops.histogram_ops.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python.framework import dtypes from tensorflow.python.ops import histogram_ops from tensorflow.python.platform import test class HistogramFixedWidthTest(test.TestCase): def setUp(self): self.rng = np.random.RandomState(0) def test_empty_input_gives_all_zero_counts(self): # Bins will be: # (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf) value_range = [0.0, 5.0] values = [] expected_bin_counts = [0, 0, 0, 0, 0] with self.test_session(): hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5) self.assertEqual(dtypes.int32, hist.dtype) self.assertAllClose(expected_bin_counts, hist.eval()) def test_1d_values_int64_output(self): # Bins will be: # (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf) value_range = [0.0, 5.0] values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15] expected_bin_counts = [2, 1, 1, 0, 2] with self.test_session(): hist = histogram_ops.histogram_fixed_width( values, value_range, nbins=5, dtype=dtypes.int64) self.assertEqual(dtypes.int64, hist.dtype) self.assertAllClose(expected_bin_counts, hist.eval()) def test_1d_float64_values(self): # Bins will be: # (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf) value_range = np.float64([0.0, 5.0]) values = np.float64([-1.0, 0.0, 1.5, 2.0, 5.0, 15]) expected_bin_counts = [2, 1, 1, 0, 2] with self.test_session(): hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5) self.assertEqual(dtypes.int32, hist.dtype) self.assertAllClose(expected_bin_counts, hist.eval()) def test_2d_values(self): # Bins will be: # (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf) value_range = [0.0, 5.0] values = [[-1.0, 0.0, 1.5], [2.0, 5.0, 15]] expected_bin_counts = [2, 1, 1, 0, 2] with self.test_session(): hist = histogram_ops.histogram_fixed_width(values, value_range, nbins=5) self.assertEqual(dtypes.int32, hist.dtype) self.assertAllClose(expected_bin_counts, hist.eval()) if __name__ == '__main__': test.main()
apache-2.0
rzarzynski/tempest
tempest/scenario/test_volume_boot_pattern.py
1
8285
# Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from tempest_lib.common.utils import data_utils from tempest_lib import decorators from tempest import config from tempest.openstack.common import log from tempest.scenario import manager from tempest import test CONF = config.CONF LOG = log.getLogger(__name__) class TestVolumeBootPattern(manager.ScenarioTest): """ This test case attempts to reproduce the following steps: * Create in Cinder some bootable volume importing a Glance image * Boot an instance from the bootable volume * Write content to the volume * Delete an instance and Boot a new instance from the volume * Check written content in the instance * Create a volume snapshot while the instance is running * Boot an additional instance from the new snapshot based volume * Check written content in the instance booted from snapshot """ @classmethod def skip_checks(cls): super(TestVolumeBootPattern, cls).skip_checks() if not CONF.volume_feature_enabled.snapshot: raise cls.skipException("Cinder volume snapshots are disabled") def _create_volume_from_image(self): img_uuid = CONF.compute.image_ref vol_name = data_utils.rand_name('volume-origin') return self.create_volume(name=vol_name, imageRef=img_uuid) def _boot_instance_from_volume(self, vol_id, keypair): # NOTE(gfidente): the syntax for block_device_mapping is # dev_name=id:type:size:delete_on_terminate # where type needs to be "snap" if the server is booted # from a snapshot, size instead can be safely left empty bd_map = [{ 'device_name': 'vda', 'volume_id': vol_id, 'delete_on_termination': '0'}] self.security_group = self._create_security_group() security_groups = [{'name': self.security_group['name']}] create_kwargs = { 'block_device_mapping': bd_map, 'key_name': keypair['name'], 'security_groups': security_groups } return self.create_server(image='', create_kwargs=create_kwargs) def _create_snapshot_from_volume(self, vol_id): snap_name = data_utils.rand_name('snapshot') snap = self.snapshots_client.create_snapshot( volume_id=vol_id, force=True, display_name=snap_name) self.addCleanup_with_wait( waiter_callable=self.snapshots_client.wait_for_resource_deletion, thing_id=snap['id'], thing_id_param='id', cleanup_callable=self.delete_wrapper, cleanup_args=[self.snapshots_client.delete_snapshot, snap['id']]) self.snapshots_client.wait_for_snapshot_status(snap['id'], 'available') self.assertEqual(snap_name, snap['display_name']) return snap def _create_volume_from_snapshot(self, snap_id): vol_name = data_utils.rand_name('volume') return self.create_volume(name=vol_name, snapshot_id=snap_id) def _stop_instances(self, instances): # NOTE(gfidente): two loops so we do not wait for the status twice for i in instances: self.servers_client.stop(i['id']) for i in instances: self.servers_client.wait_for_server_status(i['id'], 'SHUTOFF') def _detach_volumes(self, volumes): # NOTE(gfidente): two loops so we do not wait for the status twice for v in volumes: self.volumes_client.detach_volume(v['id']) for v in volumes: self.volumes_client.wait_for_volume_status(v['id'], 'available') def _ssh_to_server(self, server, keypair): if CONF.compute.use_floatingip_for_ssh: floating_ip = self.floating_ips_client.create_floating_ip() self.addCleanup(self.delete_wrapper, self.floating_ips_client.delete_floating_ip, floating_ip['id']) self.floating_ips_client.associate_floating_ip_to_server( floating_ip['ip'], server['id']) ip = floating_ip['ip'] else: network_name_for_ssh = CONF.compute.network_for_ssh ip = server.networks[network_name_for_ssh][0] return self.get_remote_client(ip, private_key=keypair['private_key'], log_console_of_servers=[server]) def _get_content(self, ssh_client): return ssh_client.exec_command('cat /tmp/text') def _write_text(self, ssh_client): text = data_utils.rand_name('text-') ssh_client.exec_command('echo "%s" > /tmp/text; sync' % (text)) return self._get_content(ssh_client) def _delete_server(self, server): self.servers_client.delete_server(server['id']) self.servers_client.wait_for_server_termination(server['id']) def _check_content_of_written_file(self, ssh_client, expected): actual = self._get_content(ssh_client) self.assertEqual(expected, actual) @decorators.skip_because(bug='1373513') @test.idempotent_id('557cd2c2-4eb8-4dce-98be-f86765ff311b') @test.services('compute', 'volume', 'image') def test_volume_boot_pattern(self): keypair = self.create_keypair() self.security_group = self._create_security_group() # create an instance from volume volume_origin = self._create_volume_from_image() instance_1st = self._boot_instance_from_volume(volume_origin['id'], keypair) # write content to volume on instance ssh_client_for_instance_1st = self._ssh_to_server(instance_1st, keypair) text = self._write_text(ssh_client_for_instance_1st) # delete instance self._delete_server(instance_1st) # create a 2nd instance from volume instance_2nd = self._boot_instance_from_volume(volume_origin['id'], keypair) # check the content of written file ssh_client_for_instance_2nd = self._ssh_to_server(instance_2nd, keypair) self._check_content_of_written_file(ssh_client_for_instance_2nd, text) # snapshot a volume snapshot = self._create_snapshot_from_volume(volume_origin['id']) # create a 3rd instance from snapshot volume = self._create_volume_from_snapshot(snapshot['id']) instance_from_snapshot = self._boot_instance_from_volume(volume['id'], keypair) # check the content of written file ssh_client = self._ssh_to_server(instance_from_snapshot, keypair) self._check_content_of_written_file(ssh_client, text) # NOTE(gfidente): ensure resources are in clean state for # deletion operations to succeed self._stop_instances([instance_2nd, instance_from_snapshot]) self._detach_volumes([volume_origin, volume]) class TestVolumeBootPatternV2(TestVolumeBootPattern): def _boot_instance_from_volume(self, vol_id, keypair): bdms = [{'uuid': vol_id, 'source_type': 'volume', 'destination_type': 'volume', 'boot_index': 0, 'delete_on_termination': False}] self.security_group = self._create_security_group() security_groups = [{'name': self.security_group['name']}] create_kwargs = { 'block_device_mapping_v2': bdms, 'key_name': keypair['name'], 'security_groups': security_groups } return self.create_server(image='', create_kwargs=create_kwargs)
apache-2.0
phillxnet/rockstor-core
src/rockstor/cli/disks_console.py
2
3121
""" Copyright (c) 2012-2020 RockStor, Inc. <http://rockstor.com> This file is part of RockStor. RockStor is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. RockStor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from base_console import BaseConsole from rest_util import api_error, api_call, print_disks_info, print_disk_info class DisksConsole(BaseConsole): def __init__(self, prompt): BaseConsole.__init__(self) self.prompt = prompt + " Disks> " self.baseurl = "%sdisks" % BaseConsole.url @api_error def do_list(self, args): url = self.baseurl if args: # print info for a single disk url = "%s/%s" % (url, args) disk_info = api_call(url) print_disk_info(disk_info, True) else: # print info for all disks disks_info = api_call(url) print_disks_info(disks_info) def help_list(self): snps = "Print details of one or all disks in the appliance" args = ("<disk_name>",) params = { "<disk_name>": ("(optional)Print details of the given disk only"), } examples = { "Print details of all disks in the system": "", "Print details of the disk named sdd": "sdd", } self.print_help(snps, "list", args, params, examples) @api_error def do_scan(self, args): url = "%s/scan" % self.baseurl api_call(url, data=None, calltype="post") self.do_list(None) def help_scan(self): snps = "Scan the system for new disks" examples = { snps: "", } self.print_help(snps, "scan", examples=examples) @api_error def do_delete(self, args): url = "%s/%s" % (self.baseurl, args) api_call(url, calltype="delete") print_disks_info(api_call(self.baseurl)) def help_delete(self): snps = "Delete an offlined disk" args = ("disk_name",) params = { "disk_name": ( "Name of the disk to be deleted. It must already be offlined" ), } self.print_help(snps, "delete", args=args, params=params) @api_error def do_wipe(self, args): url = "%s/%s/wipe" % (self.baseurl, args) api_call(url, calltype="post") print_disks_info(api_call(self.baseurl)) def help_wipe(self): snps = "Wipe the partition table of a disk" params = { "disk_name": "Name of the disk to be wiped of its data", } self.print_help(snps, "wipe", args=("disk_name",), params=params)
gpl-3.0
dajhorn/ps2binutils
gdb/testsuite/gdb.perf/backtrace.py
46
1825
# Copyright (C) 2013-2015 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from perftest import perftest class BackTrace (perftest.TestCaseWithBasicMeasurements): def __init__(self, depth): super (BackTrace, self).__init__ ("backtrace") self.depth = depth def warm_up(self): # Warm up. gdb.execute ("bt", False, True) gdb.execute ("bt", False, True) def _do_test(self): """Do backtrace multiple times.""" do_test_command = "bt %d" % self.depth for _ in range(1, 15): gdb.execute (do_test_command, False, True) def execute_test(self): line_size = 2 for _ in range(1, 12): # Keep the total size of dcache unchanged, and increase the # line-size in the loop. line_size_command = "set dcache line-size %d" % (line_size) size_command = "set dcache size %d" % (4096 * 64 / line_size) # Cache is cleared by changing line-size or size. gdb.execute (line_size_command) gdb.execute (size_command) func = lambda: self._do_test() self.measure.measure(func, line_size) line_size *= 2
gpl-2.0
wbcyclist/django-xadmin
xadmin/plugins/chart.py
17
5683
import datetime import decimal import calendar from django.template import loader from django.http import HttpResponseNotFound from django.core.serializers.json import DjangoJSONEncoder from django.http import HttpResponse from django.utils.encoding import smart_unicode from django.db import models from django.utils.http import urlencode from django.utils.translation import ugettext_lazy as _, ugettext from xadmin.sites import site from xadmin.views import BaseAdminPlugin, ListAdminView from xadmin.views.dashboard import ModelBaseWidget, widget_manager from xadmin.util import lookup_field, label_for_field, force_unicode, json @widget_manager.register class ChartWidget(ModelBaseWidget): widget_type = 'chart' description = _('Show models simple chart.') template = 'xadmin/widgets/chart.html' widget_icon = 'fa fa-bar-chart-o' def convert(self, data): self.list_params = data.pop('params', {}) self.chart = data.pop('chart', None) def setup(self): super(ChartWidget, self).setup() self.charts = {} self.one_chart = False model_admin = self.admin_site._registry[self.model] chart = self.chart if hasattr(model_admin, 'data_charts'): if chart and chart in model_admin.data_charts: self.charts = {chart: model_admin.data_charts[chart]} self.one_chart = True if self.title is None: self.title = model_admin.data_charts[chart].get('title') else: self.charts = model_admin.data_charts if self.title is None: self.title = ugettext( "%s Charts") % self.model._meta.verbose_name_plural def filte_choices_model(self, model, modeladmin): return bool(getattr(modeladmin, 'data_charts', None)) and \ super(ChartWidget, self).filte_choices_model(model, modeladmin) def get_chart_url(self, name, v): return self.model_admin_url('chart', name) + "?" + urlencode(self.list_params) def context(self, context): context.update({ 'charts': [{"name": name, "title": v['title'], 'url': self.get_chart_url(name, v)} for name, v in self.charts.items()], }) # Media def media(self): return self.vendor('flot.js', 'xadmin.plugin.charts.js') class JSONEncoder(DjangoJSONEncoder): def default(self, o): if isinstance(o, (datetime.date, datetime.datetime)): return calendar.timegm(o.timetuple()) * 1000 elif isinstance(o, decimal.Decimal): return str(o) else: try: return super(JSONEncoder, self).default(o) except Exception: return smart_unicode(o) class ChartsPlugin(BaseAdminPlugin): data_charts = {} def init_request(self, *args, **kwargs): return bool(self.data_charts) def get_chart_url(self, name, v): return self.admin_view.model_admin_url('chart', name) + self.admin_view.get_query_string() # Media def get_media(self, media): return media + self.vendor('flot.js', 'xadmin.plugin.charts.js') # Block Views def block_results_top(self, context, nodes): context.update({ 'charts': [{"name": name, "title": v['title'], 'url': self.get_chart_url(name, v)} for name, v in self.data_charts.items()], }) nodes.append(loader.render_to_string('xadmin/blocks/model_list.results_top.charts.html', context_instance=context)) class ChartsView(ListAdminView): data_charts = {} def get_ordering(self): if 'order' in self.chart: return self.chart['order'] else: return super(ChartsView, self).get_ordering() def get(self, request, name): if name not in self.data_charts: return HttpResponseNotFound() self.chart = self.data_charts[name] self.x_field = self.chart['x-field'] y_fields = self.chart['y-field'] self.y_fields = ( y_fields,) if type(y_fields) not in (list, tuple) else y_fields datas = [{"data":[], "label": force_unicode(label_for_field( i, self.model, model_admin=self))} for i in self.y_fields] self.make_result_list() for obj in self.result_list: xf, attrs, value = lookup_field(self.x_field, obj, self) for i, yfname in enumerate(self.y_fields): yf, yattrs, yv = lookup_field(yfname, obj, self) datas[i]["data"].append((value, yv)) option = {'series': {'lines': {'show': True}, 'points': {'show': False}}, 'grid': {'hoverable': True, 'clickable': True}} try: xfield = self.opts.get_field(self.x_field) if type(xfield) in (models.DateTimeField, models.DateField, models.TimeField): option['xaxis'] = {'mode': "time", 'tickLength': 5} if type(xfield) is models.DateField: option['xaxis']['timeformat'] = "%y/%m/%d" elif type(xfield) is models.TimeField: option['xaxis']['timeformat'] = "%H:%M:%S" else: option['xaxis']['timeformat'] = "%y/%m/%d %H:%M:%S" except Exception: pass option.update(self.chart.get('option', {})) content = {'data': datas, 'option': option} result = json.dumps(content, cls=JSONEncoder, ensure_ascii=False) return HttpResponse(result) site.register_plugin(ChartsPlugin, ListAdminView) site.register_modelview(r'^chart/(.+)/$', ChartsView, name='%s_%s_chart')
bsd-3-clause
soft-matter/mr
mr/tests/test_feature_saving.py
1
1721
import unittest import nose from numpy.testing import assert_almost_equal, assert_allclose from numpy.testing.decorators import slow from pandas.util.testing import (assert_series_equal, assert_frame_equal) import os from tempfile import NamedTemporaryFile import pandas as pd from pandas import DataFrame, Series import mr import sqlite3 path, _ = os.path.split(os.path.abspath(__file__)) class TestFeatureSaving(unittest.TestCase): def setUp(self): self.db_conn = sqlite3.connect(':memory:') directory = os.path.join(path, 'video', 'image_sequence') self.v = mr.ImageSequence(directory) self.PARAMS = (11, 3000) with NamedTemporaryFile() as temp: self.expected = mr.batch(self.v[[0, 1]], *self.PARAMS, meta=temp.name) def test_sqlite(self): with NamedTemporaryFile() as temp: f = mr.batch(self.v[[0, 1]], *self.PARAMS, conn=self.db_conn, sql_flavor='sqlite', table='features', meta=temp.name) assert_frame_equal(f, self.expected) def test_HDFStore(self): STORE_NAME = 'temp_for_testing.h5' if os.path.isfile(STORE_NAME): os.remove(STORE_NAME) try: store = pd.HDFStore(STORE_NAME) except: nose.SkipTest('Cannot make an HDF5 file. Skipping') else: with NamedTemporaryFile() as temp: f = mr.batch(self.v[[0, 1]], *self.PARAMS, store=store, table='features', meta=temp.name) assert_frame_equal(f.reset_index(drop=True), self.expected.reset_index(drop=True)) os.remove(STORE_NAME)
gpl-3.0
alexryndin/ambari
ambari-server/src/test/python/TestAmbariServer.py
1
340374
''' Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. ''' import os from mock.mock import patch, MagicMock, create_autospec, call import sys with patch.object(os, "geteuid", new=MagicMock(return_value=0)): from resource_management.core import sudo reload(sudo) from stacks.utils.RMFTestCase import * import traceback import datetime import errno import json import operator from optparse import OptionParser import platform import re import shutil import signal import stat import StringIO import tempfile import logging import logging.handlers import logging.config from unittest import TestCase os.environ["ROOT"] = "" from only_for_platform import get_platform, not_for_platform, only_for_platform, os_distro_value, PLATFORM_LINUX, PLATFORM_WINDOWS from ambari_commons import os_utils if get_platform() != PLATFORM_WINDOWS: from pwd import getpwnam import shutil project_dir = os.path.join(os.path.abspath(os.path.dirname(__file__)),os.path.normpath("../../../../")) shutil.copyfile(project_dir+"/ambari-server/conf/unix/ambari.properties", "/tmp/ambari.properties") # We have to use this import HACK because the filename contains a dash _search_file = os_utils.search_file def search_file_proxy(filename, searchpatch, pathsep=os.pathsep): global _search_file if "ambari.properties" in filename: return "/tmp/ambari.properties" return _search_file(filename, searchpatch, pathsep) os_utils.search_file = search_file_proxy with patch.object(platform, "linux_distribution", return_value = MagicMock(return_value=('Redhat', '6.4', 'Final'))): with patch("os.path.isdir", return_value = MagicMock(return_value=True)): with patch("os.access", return_value = MagicMock(return_value=True)): with patch.object(os_utils, "parse_log4j_file", return_value={'ambari.log.dir': '/var/log/ambari-server'}): with patch("platform.linux_distribution", return_value = os_distro_value): with patch("os.symlink"): with patch("glob.glob", return_value = ['/etc/init.d/postgresql-9.3']): _ambari_server_ = __import__('ambari-server') with patch("__builtin__.open"): from ambari_commons.firewall import Firewall from ambari_commons.os_check import OSCheck, OSConst from ambari_commons.os_family_impl import OsFamilyImpl, OsFamilyFuncImpl from ambari_commons.exceptions import FatalException, NonFatalException from ambari_commons.logging_utils import get_verbose, set_verbose, get_silent, set_silent, get_debug_mode, \ print_info_msg, print_warning_msg, print_error_msg from ambari_commons.os_utils import run_os_command, search_file, set_file_permissions, remove_file, copy_file, \ is_valid_filepath from ambari_server.dbConfiguration import DBMSConfigFactory, check_jdbc_drivers from ambari_server.dbConfiguration_linux import PGConfig, LinuxDBMSConfig, OracleConfig from ambari_server.properties import Properties from ambari_server.resourceFilesKeeper import ResourceFilesKeeper, KeeperException from ambari_server.serverConfiguration import configDefaults, get_java_exe_path, \ check_database_name_property, OS_FAMILY_PROPERTY, \ find_properties_file, get_ambari_properties, get_JAVA_HOME, \ parse_properties_file, read_ambari_user, update_ambari_properties, update_properties_2, write_property, find_jdk, \ get_is_active_instance, \ AMBARI_CONF_VAR, AMBARI_SERVER_LIB, JDBC_DATABASE_PROPERTY, JDBC_RCA_PASSWORD_FILE_PROPERTY, \ PERSISTENCE_TYPE_PROPERTY, JDBC_URL_PROPERTY, get_conf_dir, JDBC_USER_NAME_PROPERTY, JDBC_PASSWORD_PROPERTY, \ JDBC_DATABASE_NAME_PROPERTY, OS_TYPE_PROPERTY, validate_jdk, JDBC_POSTGRES_SCHEMA_PROPERTY, \ RESOURCES_DIR_PROPERTY, JDBC_RCA_PASSWORD_ALIAS, JDBC_RCA_SCHEMA_PROPERTY, IS_LDAP_CONFIGURED, \ SSL_API, SSL_API_PORT, CLIENT_API_PORT_PROPERTY,\ JDBC_CONNECTION_POOL_TYPE, JDBC_CONNECTION_POOL_ACQUISITION_SIZE, \ JDBC_CONNECTION_POOL_IDLE_TEST_INTERVAL, JDBC_CONNECTION_POOL_MAX_AGE, JDBC_CONNECTION_POOL_MAX_IDLE_TIME, \ JDBC_CONNECTION_POOL_MAX_IDLE_TIME_EXCESS,\ LDAP_MGR_PASSWORD_PROPERTY, LDAP_MGR_PASSWORD_ALIAS, JDBC_PASSWORD_FILENAME, NR_USER_PROPERTY, SECURITY_KEY_IS_PERSISTED, \ SSL_TRUSTSTORE_PASSWORD_PROPERTY, SECURITY_IS_ENCRYPTION_ENABLED, PID_DIR_PROPERTY, SSL_TRUSTSTORE_PASSWORD_ALIAS, \ SECURITY_MASTER_KEY_LOCATION, SECURITY_KEYS_DIR, LDAP_PRIMARY_URL_PROPERTY, store_password_file, \ get_pass_file_path, GET_FQDN_SERVICE_URL, JDBC_USE_INTEGRATED_AUTH_PROPERTY, SECURITY_KEY_ENV_VAR_NAME, \ JAVA_HOME_PROPERTY, JDK_NAME_PROPERTY, JCE_NAME_PROPERTY, STACK_LOCATION_KEY, SERVER_VERSION_FILE_PATH, \ COMMON_SERVICES_PATH_PROPERTY, WEBAPP_DIR_PROPERTY, SHARED_RESOURCES_DIR, BOOTSTRAP_SCRIPT, \ CUSTOM_ACTION_DEFINITIONS, BOOTSTRAP_SETUP_AGENT_SCRIPT, STACKADVISOR_SCRIPT, BOOTSTRAP_DIR_PROPERTY, MPACKS_STAGING_PATH_PROPERTY from ambari_server.serverUtils import is_server_runing, refresh_stack_hash from ambari_server.serverSetup import check_selinux, check_ambari_user, proceedJDBCProperties, SE_STATUS_DISABLED, SE_MODE_ENFORCING, configure_os_settings, \ download_and_install_jdk, prompt_db_properties, setup, \ AmbariUserChecks, AmbariUserChecksLinux, AmbariUserChecksWindows, JDKSetup, reset, setup_jce_policy, expand_jce_zip_file from ambari_server.serverUpgrade import upgrade, upgrade_local_repo, change_objects_owner, upgrade_stack, \ run_stack_upgrade, run_metainfo_upgrade, run_schema_upgrade, move_user_custom_actions, find_and_copy_custom_services from ambari_server.setupHttps import is_valid_https_port, setup_https, import_cert_and_key_action, get_fqdn, \ generate_random_string, get_cert_info, COMMON_NAME_ATTR, is_valid_cert_exp, NOT_AFTER_ATTR, NOT_BEFORE_ATTR, \ SSL_DATE_FORMAT, import_cert_and_key, is_valid_cert_host, setup_truststore, \ SRVR_ONE_WAY_SSL_PORT_PROPERTY, SRVR_TWO_WAY_SSL_PORT_PROPERTY, GANGLIA_HTTPS from ambari_server.setupSecurity import adjust_directory_permissions, get_alias_string, get_ldap_event_spec_names, sync_ldap, LdapSyncOptions, \ configure_ldap_password, setup_ldap, REGEX_HOSTNAME_PORT, REGEX_TRUE_FALSE, REGEX_ANYTHING, setup_master_key, \ setup_ambari_krb5_jaas, ensure_can_start_under_current_user, generate_env from ambari_server.userInput import get_YN_input, get_choice_string_input, get_validated_string_input, \ read_password from ambari_server_main import get_ulimit_open_files, ULIMIT_OPEN_FILES_KEY, ULIMIT_OPEN_FILES_DEFAULT from ambari_server.serverClassPath import ServerClassPath from ambari_server.hostUpdate import update_host_names from ambari_server.checkDatabase import check_database from ambari_server import serverConfiguration CURR_AMBARI_VERSION = "2.0.0" @patch.object(platform, "linux_distribution", new = MagicMock(return_value=('Redhat', '6.4', 'Final'))) @patch("ambari_server.dbConfiguration_linux.get_postgre_hba_dir", new = MagicMock(return_value = "/var/lib/pgsql/data")) @patch("ambari_server.dbConfiguration_linux.get_postgre_running_status", new = MagicMock(return_value = "running")) class TestAmbariServer(TestCase): def setUp(self): out = StringIO.StringIO() sys.stdout = out def tearDown(self): sys.stdout = sys.__stdout__ @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_configure_pg_hba_ambaridb_users(self, run_os_command_method): # Prepare mocks run_os_command_method.return_value = (0, "", "") database_username = "ffdf" tf1 = tempfile.NamedTemporaryFile() # Run test PGConfig._configure_pg_hba_ambaridb_users(tf1.name, database_username) # Check results self.assertTrue(run_os_command_method.called) string_expected = self.get_file_string(self.get_samples_dir("configure_pg_hba_ambaridb_users1")) string_actual = self.get_file_string(tf1.name) self.assertEquals(string_expected, string_actual) pass @not_for_platform(PLATFORM_WINDOWS) @patch("__builtin__.raw_input") def test_servicename_regex(self, raw_input_method): ''' Test to make sure the service name can contain digits ''' set_silent(False) raw_input_method.return_value = "OT100" result = OracleConfig._get_validated_service_name("ambari", 1) self.assertEqual("OT100", result, "Not accepting digits") pass @not_for_platform(PLATFORM_WINDOWS) @patch("__builtin__.raw_input") def test_dbname_regex(self, raw_input_method): ''' Test to make sure the service name can contain digits ''' set_silent(False) raw_input_method.return_value = "OT100" result = LinuxDBMSConfig._get_validated_db_name("Database", "ambari") self.assertEqual("OT100", result, "Not accepting digits") pass @not_for_platform(PLATFORM_WINDOWS) def test_configure_pg_hba_postgres_user(self): tf1 = tempfile.NamedTemporaryFile() PGConfig.PG_HBA_CONF_FILE = tf1.name with open(PGConfig.PG_HBA_CONF_FILE, 'w') as fout: fout.write("\n") fout.write("local all all md5\n") fout.write("host all all 0.0.0.0/0 md5\n") fout.write("host all all ::/0 md5\n") PGConfig._configure_pg_hba_postgres_user() expected = self.get_file_string(self.get_samples_dir( "configure_pg_hba_ambaridb_users2")) result = self.get_file_string(PGConfig.PG_HBA_CONF_FILE) self.assertEqual(expected, result, "pg_hba_conf not processed") mode = oct(os.stat(PGConfig.PG_HBA_CONF_FILE)[stat.ST_MODE]) str_mode = str(mode)[-4:] self.assertEqual("0644", str_mode, "Wrong file permissions") pass @patch("__builtin__.raw_input") def test_get_choice_string_input(self, raw_input_method): prompt = "blablabla" default = "default blablabla" firstChoice = set(['yes', 'ye', 'y']) secondChoice = set(['no', 'n']) # test first input raw_input_method.return_value = "Y" result = get_choice_string_input(prompt, default, firstChoice, secondChoice) self.assertEquals(result, True) raw_input_method.reset_mock() # test second input raw_input_method.return_value = "N" result = get_choice_string_input(prompt, default, firstChoice, secondChoice) self.assertEquals(result, False) raw_input_method.reset_mock() # test enter pressed raw_input_method.return_value = "" result = get_choice_string_input(prompt, default, firstChoice, secondChoice) self.assertEquals(result, default) raw_input_method.reset_mock() # test wrong input list_of_return_values = ['yes', 'dsad', 'fdsfds'] def side_effect(list): return list_of_return_values.pop() raw_input_method.side_effect = side_effect result = get_choice_string_input(prompt, default, firstChoice, secondChoice) self.assertEquals(result, True) self.assertEquals(raw_input_method.call_count, 3) pass @patch("re.search") @patch("__builtin__.raw_input") @patch("getpass.getpass") def test_get_validated_string_input(self, get_pass_method, raw_input_method, re_search_method): prompt = "blabla" default = "default_pass" pattern = "pattern_pp" description = "blabla2" # check password input self.assertFalse(False, get_silent()) is_pass = True get_pass_method.return_value = "dfdsfdsfds" result = get_validated_string_input(prompt, default, pattern, description, is_pass) self.assertEquals(get_pass_method.return_value, result) get_pass_method.assure_called_once(prompt) self.assertFalse(raw_input_method.called) # check raw input get_pass_method.reset_mock() raw_input_method.reset_mock() is_pass = False raw_input_method.return_value = "dkf90ewuf0" result = get_validated_string_input(prompt, default, pattern, description, is_pass) self.assertEquals(raw_input_method.return_value, result) self.assertFalse(get_pass_method.called) raw_input_method.assure_called_once(prompt) pass @not_for_platform(PLATFORM_WINDOWS) def test_get_pass_file_path(self): result = get_pass_file_path("/etc/ambari/conf_file", JDBC_PASSWORD_FILENAME) self.assertEquals("/etc/ambari/password.dat", result) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup_security") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_setup_security(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, OptionParserMock, setup_security_method): opm = OptionParserMock.return_value options = MagicMock() args = ["setup-security"] opm.parse_args.return_value = (options, args) options.dbms = None options.security_option = "setup-security" options.sid_or_sname = "sid" setup_security_method.return_value = None _ambari_server_.mainBody() _ambari_server_.mainBody() self.assertTrue(setup_security_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup_ambari_krb5_jaas") @patch.object(_ambari_server_, "setup_master_key") @patch.object(_ambari_server_, "setup_truststore") @patch.object(_ambari_server_, "setup_https") @patch.object(_ambari_server_, "get_validated_string_input") @patch.object(_ambari_server_, "logger") def test_setup_security(self, logger_mock, get_validated_string_input_mock, setup_https_mock, setup_truststore_mock, setup_master_key_mock, setup_ambari_krb5_jaas_mock): args = self._create_empty_options_mock() get_validated_string_input_mock.return_value = '1' _ambari_server_.setup_security(args) self.assertTrue(setup_https_mock.called) get_validated_string_input_mock.return_value = '2' _ambari_server_.setup_security(args) self.assertTrue(setup_master_key_mock.called) get_validated_string_input_mock.return_value = '3' _ambari_server_.setup_security(args) self.assertTrue(setup_ambari_krb5_jaas_mock.called) get_validated_string_input_mock.return_value = '4' _ambari_server_.setup_security(args) self.assertTrue(setup_truststore_mock.called) get_validated_string_input_mock.return_value = '5' _ambari_server_.setup_security(args) self.assertTrue(setup_truststore_mock.called) pass @patch("re.sub") @patch("fileinput.FileInput") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.search_file") @patch("os.path.exists") def test_setup_ambari_krb5_jaas(self, exists_mock, search_mock, get_validated_string_input_mock, fileinput_mock, re_sub_mock): search_mock.return_value = 'filepath' exists_mock.return_value = False # Negative case try: setup_ambari_krb5_jaas(self._create_empty_options_mock()) self.fail("Should throw exception") except NonFatalException as fe: # Expected self.assertTrue("No jaas config file found at location" in fe.reason) pass # Positive case exists_mock.reset_mock() exists_mock.return_value = True get_validated_string_input_mock.side_effect = ['aaa@aaa.cnn', 'pathtokeytab'] fileinput_mock.return_value = [ 'keyTab=xyz', 'principal=xyz' ] setup_ambari_krb5_jaas(self._create_empty_options_mock()) self.assertTrue(fileinput_mock.called) self.assertTrue(re_sub_mock.called) self.assertTrue(re_sub_mock.call_args_list, [('aaa@aaa.cnn'), ('pathtokeytab')]) pass @patch("sys.exit") @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_setup(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, OptionParserMock, reset_method, stop_method, start_method, setup_method, exit_mock): opm = OptionParserMock.return_value options = self._create_empty_options_mock() args = ["setup"] opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sid" _ambari_server_.mainBody() self.assertTrue(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) setup_method.reset_mock() start_method.reset_mock() stop_method.reset_mock() reset_method.reset_mock() exit_mock.reset_mock() args = ["setup", "-v"] options = self._create_empty_options_mock() opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sid" setup_method.side_effect = Exception("Unexpected error") try: _ambari_server_.mainBody() except Exception: self.assertTrue(True) self.assertTrue(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertTrue(get_verbose()) setup_method.reset_mock() start_method.reset_mock() stop_method.reset_mock() reset_method.reset_mock() exit_mock.reset_mock() args = ["setup"] options = self._create_empty_options_mock() opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sid" options.verbose = False setup_method.side_effect = Exception("Unexpected error") _ambari_server_.mainBody() self.assertTrue(exit_mock.called) self.assertTrue(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertFalse(get_verbose()) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_with_preset_dbms(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["setup"] opm.parse_args.return_value = (options, args) options.dbms = "sqlanywhere" options.sid_or_sname = "sname" _ambari_server_.mainBody() self.assertTrue(setup_method.called) self.assertEquals(options.database_index, 5) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "fix_database_options") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_fix_database_options_called(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, fixDBOptionsMock, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["setup"] opm.parse_args.return_value = (options, args) _ambari_server_.mainBody() self.assertTrue(setup_method.called) self.assertTrue(fixDBOptionsMock.called) set_silent(False) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_start(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, reset_method, stop_method, start_method, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["setup"] opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sname" _ambari_server_.mainBody() self.assertTrue(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") def test_main_test_start_debug_short(self, reset_method, stop_method, start_method, setup_method): temp_args = sys.argv try: sys.argv = ["ambari-server", "start", "-g"] _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertTrue(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertTrue(get_debug_mode()) finally: sys.argv = temp_args pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") def test_main_test_start_debug_short(self, reset_method, stop_method, start_method, setup_method): temp_args = sys.argv try: sys.argv = ["ambari-server", "pstart", "-g"] _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertTrue(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertTrue(get_debug_mode()) finally: sys.argv = temp_args pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") def test_main_test_start_debug_long(self, reset_method, stop_method, start_method, setup_method): temp_args = sys.argv try: sys.argv = ["ambari-server", "start", "--debug"] _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertTrue(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertTrue(get_debug_mode()) finally: sys.argv = temp_args pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") def test_main_test_start_debug_long(self, reset_method, stop_method, start_method, setup_method): temp_args = sys.argv try: sys.argv = ["ambari-server", "pstart", "--debug"] _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertTrue(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertTrue(get_debug_mode()) finally: sys.argv = temp_args pass #Backup is not yet supported on Windows @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") @patch.object(_ambari_server_, "backup") @patch.object(_ambari_server_, "restore") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_backup(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, restore_mock, backup_mock, reset_method, stop_method, start_method, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["backup"] opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sname" _ambari_server_.mainBody() self.assertTrue(backup_mock.called) self.assertFalse(restore_mock.called) self.assertFalse(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) pass #Restore is not yet supported on Windows @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") @patch.object(_ambari_server_, "backup") @patch.object(_ambari_server_, "restore") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_restore(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, restore_mock, backup_mock, reset_method, stop_method, start_method, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["restore"] opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sname" _ambari_server_.mainBody() self.assertTrue(restore_mock.called) self.assertFalse(backup_mock.called) self.assertFalse(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "is_server_runing") @patch.object(_ambari_server_, "reset") @patch("optparse.OptionParser") def test_main_test_stop(self, optionParserMock, reset_method, is_server_runing_method, start_method, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() del options.exit_message args = ["stop"] opm.parse_args.return_value = (options, args) is_server_runing_method.return_value = (False, None) options.dbms = None options.sid_or_sname = "sid" _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertFalse(start_method.called) self.assertTrue(is_server_runing_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) self.assertTrue(options.exit_message is None) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch("os_windows.win32serviceutil.WaitForServiceStatus") @patch("os_windows.win32serviceutil.StopService") @patch("os_windows.win32serviceutil.StopServiceWithDeps") @patch.object(_ambari_server_, "reset") def test_main_test_stop(self, reset_method, service_stop_w_deps_method, service_stop_method, service_status_wait_method, start_method, setup_method): temp_args = sys.argv try: sys.argv = ["ambari-server", "stop"] _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertFalse(start_method.called) self.assertTrue(service_stop_w_deps_method.called) self.assertTrue(service_status_wait_method.called) self.assertFalse(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) finally: sys.argv = temp_args pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") @patch.object(_ambari_server_, "start") @patch.object(_ambari_server_, "stop") @patch.object(_ambari_server_, "reset") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_reset(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, reset_method, stop_method, start_method, setup_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() args = ["reset"] opm.parse_args.return_value = (options, args) options.dbms = None options.sid_or_sname = "sid" _ambari_server_.mainBody() self.assertFalse(setup_method.called) self.assertFalse(start_method.called) self.assertFalse(stop_method.called) self.assertTrue(reset_method.called) self.assertFalse(False, get_verbose()) self.assertFalse(False, get_silent()) pass @not_for_platform(PLATFORM_WINDOWS) def test_configure_postgresql_conf(self): tf1 = tempfile.NamedTemporaryFile() PGConfig.POSTGRESQL_CONF_FILE = tf1.name with open(PGConfig.POSTGRESQL_CONF_FILE, 'w') as f: f.write("#listen_addresses = '127.0.0.1' #\n") f.write("#listen_addresses = '127.0.0.1'") PGConfig._configure_postgresql_conf() expected = self.get_file_string(self.get_samples_dir( "configure_postgresql_conf1")) result = self.get_file_string(PGConfig.POSTGRESQL_CONF_FILE) self.assertEqual(expected, result, "postgresql.conf not updated") mode = oct(os.stat(PGConfig.POSTGRESQL_CONF_FILE)[stat.ST_MODE]) str_mode = str(mode)[-4:] self.assertEqual("0644", str_mode, "Wrong file permissions") pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(PGConfig, "_restart_postgres") @patch.object(PGConfig, "_get_postgre_status") @patch.object(PGConfig, "_configure_postgresql_conf") @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_configure_postgres(self, run_os_command_mock, configure_postgresql_conf_mock, get_postgre_status_mock, restart_postgres_mock): args = MagicMock() properties = Properties() args.database_index = 0 del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.silent factory = DBMSConfigFactory() dbConfig = factory.create(args, properties) self.assertTrue(dbConfig.dbms, "postgres") self.assertTrue(dbConfig.persistence_type, "local") tf1 = tempfile.NamedTemporaryFile() tf2 = tempfile.NamedTemporaryFile() PGConfig.PG_HBA_CONF_FILE = tf1.name PGConfig.PG_HBA_CONF_FILE_BACKUP = tf2.name out = StringIO.StringIO() sys.stdout = out retcode, out1, err = dbConfig._configure_postgres() sys.stdout = sys.__stdout__ self.assertEqual(0, retcode) self.assertEqual("Backup for pg_hba found, reconfiguration not required\n", out.getvalue()) tf2.close() get_postgre_status_mock.return_value = PGConfig.PG_STATUS_RUNNING, 0, "", "" run_os_command_mock.return_value = 0, "", "" restart_postgres_mock.return_value = 0, "", "" rcode, out, err = dbConfig._configure_postgres() self.assertTrue(os.path.isfile(PGConfig.PG_HBA_CONF_FILE_BACKUP), "postgresql.conf backup not created") self.assertTrue(run_os_command_mock.called) mode = oct(os.stat(PGConfig.PG_HBA_CONF_FILE)[stat.ST_MODE]) str_mode = str(mode)[-4:] self.assertEqual("0644", str_mode, "Wrong file permissions") self.assertTrue(configure_postgresql_conf_mock.called) self.assertEqual(0, rcode) os.unlink(PGConfig.PG_HBA_CONF_FILE_BACKUP) get_postgre_status_mock.return_value = "stopped", 0, "", "" rcode, out, err = dbConfig._configure_postgres() self.assertEqual(0, rcode) os.unlink(PGConfig.PG_HBA_CONF_FILE_BACKUP) sys.stdout = sys.__stdout__ pass @not_for_platform(PLATFORM_WINDOWS) @patch("time.sleep") @patch("subprocess.Popen") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch.object(PGConfig, "_get_postgre_status") @patch("ambari_server.dbConfiguration_linux.print_info_msg") def test_restart_postgres(self, printInfoMsg_mock, get_postgre_status_mock, run_os_command_mock, popenMock, sleepMock): p = MagicMock() p.poll.return_value = 0 popenMock.return_value = p retcode, out, err = PGConfig._restart_postgres() self.assertEqual(0, retcode) p.poll.return_value = None get_postgre_status_mock.return_value = "stopped", 0, "", "" run_os_command_mock.return_value = (1, None, None) retcode, out, err = PGConfig._restart_postgres() self.assertEqual(1, retcode) pass @not_for_platform(PLATFORM_WINDOWS) @patch("shlex.split") @patch("subprocess.Popen") @patch("ambari_commons.os_linux.print_info_msg") def test_run_os_command(self, printInfoMsg_mock, popenMock, splitMock): p = MagicMock() p.communicate.return_value = (None, None) p.returncode = 3 popenMock.return_value = p # with list arg cmd = ["exec", "arg"] run_os_command(cmd) self.assertFalse(splitMock.called) # with str arg resp = run_os_command("runme") self.assertEqual(3, resp[0]) self.assertTrue(splitMock.called) pass @only_for_platform(PLATFORM_WINDOWS) @patch("shlex.split") @patch("subprocess.Popen") @patch("ambari_commons.os_windows.print_info_msg") def test_run_os_command(self, printInfoMsg_mock, popenMock, splitMock): p = MagicMock() p.communicate.return_value = (None, None) p.returncode = 3 popenMock.return_value = p # with list arg cmd = ["exec", "arg"] run_os_command(cmd) self.assertFalse(splitMock.called) # with str arg resp = run_os_command("runme") self.assertEqual(3, resp[0]) self.assertTrue(splitMock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.get_conf_dir") @patch("ambari_server.serverConfiguration.search_file") def test_write_property(self, search_file_mock, get_conf_dir_mock): expected_content = "key1=val1\n" tf1 = tempfile.NamedTemporaryFile() search_file_mock.return_value = tf1.name write_property("key1", "val1") result = tf1.read() self.assertTrue(expected_content in result) pass @only_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.get_conf_dir") @patch("ambari_server.serverConfiguration.search_file") def test_write_property(self, search_file_mock, get_conf_dir_mock): expected_content = "key1=val1\n" tf1 = tempfile.NamedTemporaryFile("r+b", delete=False) search_file_mock.return_value = tf1.name tf1.close() write_property("key1", "val1") hf1 = open(tf1.name, "r") try: result = hf1.read() self.assertTrue(expected_content in result) finally: hf1.close() os.unlink(tf1.name) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_setup_db(self, run_os_command_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias")) decrypt_password_for_alias_mock.return_value = "password" dbms = PGConfig(args, properties, "local") self.assertTrue(decrypt_password_for_alias_mock.called) run_os_command_mock.return_value = (0, None, None) result = dbms._setup_db() self.assertTrue(run_os_command_mock.called) self.assertEqual(run_os_command_mock.call_count, 2) self.assertEqual((0, None, None), result) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("time.sleep") @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_setup_db_connect_attempts_fail(self, run_os_command_mock, sleep_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() decrypt_password_for_alias_mock.return_value = "password" dbms = PGConfig(args, properties, "local") run_os_command_mock.side_effect = [(1, "error", "error"), (1, "error", "error"), (1, "error", "error")] result = dbms._setup_db() self.assertTrue(run_os_command_mock.called) self.assertEqual((1, 'error', 'error') , result) self.assertEqual(2, sleep_mock.call_count) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("time.sleep") @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_setup_db_connect_attempts_success(self, run_os_command_mock, sleep_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() decrypt_password_for_alias_mock.return_value = "password" dbms = PGConfig(args, properties, "local") run_os_command_mock.side_effect = [(1, "error", "error"), (0, None, None), (0, None, None)] result = dbms._setup_db() self.assertTrue(run_os_command_mock.called) self.assertEqual((0, None, None) , result) self.assertEqual(1, sleep_mock.call_count) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.run_os_command") def test_check_selinux(self, run_os_command_mock, getYNInput_mock): run_os_command_mock.return_value = (0, SE_STATUS_DISABLED, None) rcode = check_selinux() self.assertEqual(0, rcode) getYNInput_mock.return_value = True run_os_command_mock.return_value = (0, "enabled " + SE_MODE_ENFORCING, None) rcode = check_selinux() self.assertEqual(0, rcode) self.assertTrue(run_os_command_mock.called) self.assertTrue(getYNInput_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.print_info_msg") def test_get_ambari_jars(self, printInfoMsg_mock): env = "/ambari/jars" os.environ[AMBARI_SERVER_LIB] = env result = get_ambari_jars() self.assertEqual(env, result) del os.environ[AMBARI_SERVER_LIB] result = get_ambari_jars() self.assertEqual("/usr/lib/ambari-server", result) self.assertTrue(printInfoMsg_mock.called) pass @only_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.print_info_msg") def test_get_ambari_jars(self, printInfoMsg_mock): env = "\\ambari\\jars" os.environ[AMBARI_SERVER_LIB] = env result = get_ambari_jars() self.assertEqual(env, result) del os.environ[AMBARI_SERVER_LIB] result = get_ambari_jars() self.assertEqual("lib", result) self.assertTrue(printInfoMsg_mock.called) pass @patch("ambari_server.serverConfiguration.print_info_msg") def test_get_conf_dir(self, printInfoMsg_mock): env = "/dummy/ambari/conf" os.environ[AMBARI_CONF_VAR] = env result = get_conf_dir() self.assertEqual(env, result) del os.environ[AMBARI_CONF_VAR] result = get_conf_dir() self.assertEqual("/etc/ambari-server/conf", result) pass @only_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.print_info_msg") def test_get_conf_dir(self, printInfoMsg_mock): env = "\\dummy\\ambari\\conf" os.environ[AMBARI_CONF_VAR] = env result = get_conf_dir() self.assertEqual(env, result) del os.environ[AMBARI_CONF_VAR] result = get_conf_dir() self.assertEqual("conf", result) pass def _test_search_file(self): path = os.path.dirname(__file__) result = search_file(__file__, path) expected = os.path.abspath(__file__) self.assertEqual(expected, result) result = search_file("non_existent_file", path) self.assertEqual(None, result) pass @patch("ambari_server.serverConfiguration.search_file") def test_find_properties_file(self, search_file_mock): # Testing case when file is not found search_file_mock.return_value = None try: find_properties_file() self.fail("File not found'") except FatalException: # Expected pass self.assertTrue(search_file_mock.called) # Testing case when file is found value = MagicMock() search_file_mock.return_value = value result = find_properties_file() self.assertTrue(result is value) pass @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.Properties") def test_read_ambari_user(self, properties_mock, get_ambari_properties_mock): # Testing with defined user properties_mock.__getitem__.return_value = "dummy_user" get_ambari_properties_mock.return_value = properties_mock user = read_ambari_user() self.assertEquals(user, "dummy_user") # Testing with undefined user properties_mock.__getitem__.return_value = None user = read_ambari_user() self.assertEquals(user, None) pass @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.Properties") def test_read_active_instance(self, properties_mock, get_ambari_properties_mock): # Set up the mock properties_mock.propertyNames = MagicMock(return_value=['active.instance']) get_ambari_properties_mock.return_value = properties_mock # Test with explicitly set value of "false" (should return False) properties_mock.__getitem__.return_value = "false" is_active_instance = get_is_active_instance() self.assertFalse(is_active_instance) # Test with empty string (should return False) properties_mock.__getitem__.return_value = "" is_active_instance = get_is_active_instance() self.assertFalse(is_active_instance) # Test with a random string (should return False) properties_mock.__getitem__.return_value = "xyz" is_active_instance = get_is_active_instance() self.assertFalse(is_active_instance) # Test with a explicit false string (should return False) properties_mock.__getitem__.return_value = "false" is_active_instance = get_is_active_instance() self.assertFalse(is_active_instance) # Test with explicitly set value of "true" (should return True) properties_mock.__getitem__.return_value = "true" is_active_instance = get_is_active_instance() self.assertTrue(is_active_instance) # Test with missing active.instance entry (should return True) properties_mock.propertyNames = MagicMock(return_value=[]) is_active_instance = get_is_active_instance() self.assertTrue(is_active_instance) pass @patch("ambari_server.setupSecurity.get_file_owner") @patch("ambari_server.setupSecurity.get_ambari_repo_file_full_name") @patch("os.path.exists") @patch("ambari_server.setupSecurity.set_file_permissions") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.get_resources_location") @patch("ambari_server.setupSecurity.get_value_from_properties") @patch("os.mkdir") @patch("shutil.rmtree") @patch("ambari_commons.os_utils.print_info_msg") @patch("ambari_server.setupSecurity.change_owner") def test_adjust_directory_permissions(self, change_owner_mock, print_info_msg_mock, rmtree_mock, mkdir_mock, get_value_from_properties_mock, get_resources_location_mock, get_ambari_properties_mock, set_file_permissions_mock, exists_mock, get_ambari_repo_file_full_name_mock, get_file_owner_mock): # Testing boostrap dir wipe properties_mock = Properties() properties_mock.process_pair(JDK_NAME_PROPERTY, "dummy_jdk") properties_mock.process_pair(JCE_NAME_PROPERTY, "dummy_jce") properties_mock.process_pair(JAVA_HOME_PROPERTY, "dummy_java_home") get_ambari_properties_mock.return_value = properties_mock get_value_from_properties_mock.return_value = "dummy_bootstrap_dir" get_resources_location_mock.return_value = "dummy_resources_dir" exists_mock.return_value = False adjust_directory_permissions("user") self.assertTrue(mkdir_mock.called) set_file_permissions_mock.reset_mock() change_owner_mock.reset_mock() # Test recursive calls old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST old_change_owner_list = configDefaults.NR_CHANGE_OWNERSHIP_LIST try: configDefaults.NR_ADJUST_OWNERSHIP_LIST = [ ( "/etc/ambari-server/conf", "755", "{0}", True ), ( "/etc/ambari-server/conf/ambari.properties", "644", "{0}", False ) ] configDefaults.NR_CHANGE_OWNERSHIP_LIST = [ ( "/etc/ambari-server", "{0}", True ) ] adjust_directory_permissions("user") self.assertTrue(len(set_file_permissions_mock.call_args_list) == len(configDefaults.NR_ADJUST_OWNERSHIP_LIST)) self.assertEquals(set_file_permissions_mock.call_args_list[0][0][3], True) self.assertEquals(set_file_permissions_mock.call_args_list[1][0][3], False) self.assertTrue(len(change_owner_mock.call_args_list) == len(configDefaults.NR_CHANGE_OWNERSHIP_LIST)) self.assertEquals(change_owner_mock.call_args_list[0][0][2], True) finally: configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list configDefaults.NR_CHANGE_OWNERSHIP_LIST = old_change_owner_list pass # # Test ambari repo file permission change call # # Test the case when ambari repo file is available # Reset the set_file_permissions() mock function set_file_permissions_mock.reset_mock() # Save the existing permissions list old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST # Set up the mock function for os_utils.get_ambari_repo_file_full_name() get_ambari_repo_file_full_name_mock.return_value = "ambari.dummy.repo" # Set up the mock function for os_utils.get_file_owner() get_file_owner_mock.return_value = "dummy.root" # Set os.path.exists to return true when the input file is an ambari repo file def file_exists_side_effect(*args, **kwargs): if args[0] == get_ambari_repo_file_full_name_mock(): return True else: return False exists_mock.side_effect = file_exists_side_effect exists_mock.return_value = None try: # Clear the list of files whose permissions are to be changed configDefaults.NR_ADJUST_OWNERSHIP_LIST = [ ] # Call the function to be tested. adjust_directory_permissions("dummy_user") # Assert that set_file_permissions() was called self.assertTrue(set_file_permissions_mock.called) # One of the entries in NR_ADJUST_OWNERSHIP_LIST should be the full path to the ambari repo file. # These are the expected values: ambari_repo_file_entry = ( get_ambari_repo_file_full_name_mock(), '644', get_file_owner_mock(), False ) # Assert the arguments to the call set_file_permissions() - got from NR_ADJUST_OWNERSHIP_LIST # Flag to ensure we found our entry in the set_file_permissions() call entry_found = False for args_entry in set_file_permissions_mock.call_args_list: if args_entry[0][0] == ambari_repo_file_entry[0]: # File name # ambari repo file name matched; assert the rest of the entries self.assertEquals(args_entry[0][1], ambari_repo_file_entry[1]) # Permissions self.assertEquals(args_entry[0][2], ambari_repo_file_entry[2]) # File owner self.assertEquals(args_entry[0][3], ambari_repo_file_entry[3]) # Non-recursive entry_found = True break # Ensure that the ambari repo file entry was found self.assertTrue(entry_found) finally: # Restore the permissions list configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list pass #Test the case when ambari repo file is unavailable # Reset the set_file_permissions() mock function set_file_permissions_mock.reset_mock() # Save the existing permissions list old_adjust_owner_list = configDefaults.NR_ADJUST_OWNERSHIP_LIST # Set up the mock function for os_utils.get_ambari_repo_file_full_name() get_ambari_repo_file_full_name_mock.return_value = "ambari.dummy.repo" # Set up the mock function for os_utils.get_file_owner() get_file_owner_mock.return_value = "dummy.root" # Set os.path.exists to return false always exists_mock.side_effect = None exists_mock.return_value = False try: # Clear the list of files whose permissions are to be changed configDefaults.NR_ADJUST_OWNERSHIP_LIST = [ ] # Call the function to be tested. adjust_directory_permissions("dummy_user") # One of the entries in NR_ADJUST_OWNERSHIP_LIST should be the full path to the ambari repo file. # These are the expected values: ambari_repo_file_entry = ( get_ambari_repo_file_full_name_mock(), '644', get_file_owner_mock(), False ) # Assert the arguments to the call set_file_permissions() - got from NR_ADJUST_OWNERSHIP_LIST # Flag to ensure we found our entry in the set_file_permissions() call entry_found = False for args_entry in set_file_permissions_mock.call_args_list: if args_entry[0][0] == ambari_repo_file_entry[0]: # File name entry_found = True break # Ensure that the ambari repo file entry was not found self.assertFalse(entry_found) finally: # Restore the permissions list configDefaults.NR_ADJUST_OWNERSHIP_LIST = old_adjust_owner_list pass @not_for_platform(PLATFORM_WINDOWS) @patch("os.path.exists") @patch("ambari_commons.os_linux.os_run_os_command") @patch("ambari_commons.os_linux.print_warning_msg") @patch("ambari_commons.os_utils.print_info_msg") def test_set_file_permissions(self, print_info_msg_mock, print_warning_msg_mock, run_os_command_mock, exists_mock): # Testing not existent file scenario exists_mock.return_value = False set_file_permissions("dummy-file", "dummy-mod", "dummy-user", False) self.assertFalse(run_os_command_mock.called) self.assertTrue(print_info_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() # Testing OK scenario exists_mock.return_value = True run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")] set_file_permissions("dummy-file", "dummy-mod", "dummy-user", False) self.assertTrue(len(run_os_command_mock.call_args_list) == 2) self.assertFalse(print_warning_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() # Testing first command fail run_os_command_mock.side_effect = [(1, "", ""), (0, "", "")] set_file_permissions("dummy-file", "dummy-mod", "dummy-user", False) self.assertTrue(len(run_os_command_mock.call_args_list) == 2) self.assertTrue(print_warning_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() # Testing second command fail run_os_command_mock.side_effect = [(0, "", ""), (1, "", "")] set_file_permissions("dummy-file", "dummy-mod", "dummy-user", False) self.assertTrue(len(run_os_command_mock.call_args_list) == 2) self.assertTrue(print_warning_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() # Testing recursive operation exists_mock.return_value = True run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")] set_file_permissions("dummy-file", "dummy-mod", "dummy-user", True) self.assertTrue(len(run_os_command_mock.call_args_list) == 2) self.assertTrue("-R" in run_os_command_mock.call_args_list[0][0][0]) self.assertTrue("-R" in run_os_command_mock.call_args_list[1][0][0]) self.assertFalse(print_warning_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() # Testing non-recursive operation exists_mock.return_value = True run_os_command_mock.side_effect = [(0, "", ""), (0, "", "")] set_file_permissions("dummy-file", "dummy-mod", "dummy-user", False) self.assertTrue(len(run_os_command_mock.call_args_list) == 2) self.assertFalse("-R" in run_os_command_mock.call_args_list[0][0][0]) self.assertFalse("-R" in run_os_command_mock.call_args_list[1][0][0]) self.assertFalse(print_warning_msg_mock.called) run_os_command_mock.reset_mock() print_warning_msg_mock.reset_mock() pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.get_validated_string_input") @patch("ambari_server.serverSetup.print_info_msg") @patch("ambari_server.serverSetup.print_warning_msg") @patch("ambari_server.serverSetup.run_os_command") def test_create_custom_user(self, run_os_command_mock, print_warning_msg_mock, print_info_msg_mock, get_validated_string_input_mock): options = self._create_empty_options_mock() user = "dummy-user" get_validated_string_input_mock.return_value = user userChecks = AmbariUserChecks(options) # Testing scenario: absent user run_os_command_mock.side_effect = [(0, "", "")] result = userChecks._create_custom_user() self.assertFalse(print_warning_msg_mock.called) self.assertEquals(result, 0) self.assertEquals(userChecks.user, user) print_info_msg_mock.reset_mock() print_warning_msg_mock.reset_mock() run_os_command_mock.reset_mock() # Testing scenario: existing user run_os_command_mock.side_effect = [(9, "", "")] result = userChecks._create_custom_user() self.assertTrue("User dummy-user already exists" in str(print_info_msg_mock.call_args_list[1][0])) self.assertEquals(result, 0) self.assertEquals(userChecks.user, user) print_info_msg_mock.reset_mock() print_warning_msg_mock.reset_mock() run_os_command_mock.reset_mock() # Testing scenario: os command fail run_os_command_mock.side_effect = [(1, "", "")] result = userChecks._create_custom_user() self.assertTrue(print_warning_msg_mock.called) self.assertEquals(result, 1) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("win32security.LsaAddAccountRights") @patch("win32security.LookupAccountName") @patch("win32net.NetUserAdd") @patch("win32net.NetUserGetInfo") @patch("win32security.LsaOpenPolicy") @patch("win32net.NetGetDCName") @patch("ambari_server.serverSetup.get_validated_string_input") @patch("ambari_server.serverSetup.print_info_msg") @patch("ambari_server.serverSetup.print_warning_msg") def test_create_custom_user(self, print_warning_msg_mock, print_info_msg_mock, get_validated_string_input_mock, net_get_dc_name_mock, lsa_open_policy_mock, net_user_get_info_mock, net_user_add_mock, lookup_account_name_mock, lsa_add_account_rights_mock): def _reset_mocks(): get_validated_string_input_mock.reset_mock() print_info_msg_mock.reset_mock() print_warning_msg_mock.reset_mock() net_get_dc_name_mock.reset_mock() net_user_get_info_mock.reset_mock() net_user_add_mock.reset_mock() lookup_account_name_mock.reset_mock() lsa_add_account_rights_mock.reset_mock() pass options = MagicMock() user = "dummy-user" get_validated_string_input_mock.return_value = user userChecks = AmbariUserChecks(options) # Testing scenario: absent user def user_not_found(*args, **keywargs): import pywintypes raise pywintypes.error(2221) net_user_get_info_mock.side_effect = user_not_found result = userChecks._create_custom_user() self.assertTrue(print_warning_msg_mock.called) self.assertTrue(net_user_add_mock.called) self.assertEqual(str(net_user_add_mock.call_args_list[0][0]), str((None, 1, {'comment': 'Ambari user', 'password': 'dummy-user', 'flags': 513, 'name': 'dummy-user', 'priv': 1}))) self.assertEquals(result, 0) self.assertEquals(userChecks.user, ".\\" + user) _reset_mocks() # Testing scenario: existing user net_user_get_info_mock.side_effect = None net_user_get_info_mock.return_value = { "name":"dummy_user" } #lookup_account_name_mock #lsa_add_account_rights_mock result = userChecks._create_custom_user() self.assertTrue("User dummy-user already exists" in print_info_msg_mock.call_args_list[0][0][0]) self.assertEquals(result, 0) self.assertEquals(userChecks.user, ".\\" + user) self.assertFalse(net_user_add_mock.called) _reset_mocks() # Testing scenario: new domain user get_validated_string_input_mock.side_effect = ["dummy_domain\\dummy_user", "newpassword"] net_get_dc_name_mock.return_value = "dummy_dc" net_user_get_info_mock.side_effect = user_not_found result = userChecks._create_custom_user() self.assertTrue(net_get_dc_name_mock.called) self.assertEqual(str(net_get_dc_name_mock.call_args_list[0][0]), str((None, "dummy_domain"))) self.assertTrue(net_user_add_mock.called) self.assertEqual(str(net_user_add_mock.call_args_list[0][0]), str(('dummy_dc', 1, {'comment': 'Ambari user', 'password': 'newpassword', 'flags': 513, 'name': 'dummy_user', 'priv': 1}))) self.assertEquals(result, 0) self.assertEquals(userChecks.user, "dummy_domain\\dummy_user") _reset_mocks() # Testing scenario: existing domain user get_validated_string_input_mock.side_effect = ["dummy_domain\\dummy_user", "newpassword"] net_user_get_info_mock.side_effect = None net_user_get_info_mock.return_value = { "name":"dummy_domain\\dummy_user" } result = userChecks._create_custom_user() self.assertTrue("User dummy_domain\\dummy_user already exists" in print_info_msg_mock.call_args_list[0][0][0]) self.assertTrue(net_get_dc_name_mock.called) self.assertEqual(str(net_get_dc_name_mock.call_args_list[0][0]), str((None, "dummy_domain"))) self.assertFalse(net_user_add_mock.called) self.assertEquals(result, 0) self.assertEquals(userChecks.user, "dummy_domain\\dummy_user") pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.read_ambari_user") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.get_validated_string_input") @patch("ambari_server.serverSetup.adjust_directory_permissions") @patch("ambari_server.serverSetup.run_os_command") @patch("ambari_server.serverSetup.print_error_msg") @patch("ambari_server.serverSetup.print_warning_msg") @patch("ambari_server.serverSetup.print_info_msg") def test_check_ambari_user(self, print_info_msg_mock, print_warning_msg_mock, print_error_msg_mock, run_os_command_mock, adjust_directory_permissions_mock, get_validated_string_input_mock, get_YN_input_mock, read_ambari_user_mock): def _reset_mocks(): get_YN_input_mock.reset_mock() get_validated_string_input_mock.reset_mock() run_os_command_mock.reset_mock() adjust_directory_permissions_mock.reset_mock() pass options = self._create_empty_options_mock() run_os_command_mock.return_value = (0, "", "") # Scenario: user is already defined, user does not want to reconfigure it read_ambari_user_mock.return_value = "dummy-user" get_YN_input_mock.return_value = False result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertFalse(get_validated_string_input_mock.called) self.assertFalse(run_os_command_mock.called) self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Scenario: user is already defined, but user wants to reconfigure it read_ambari_user_mock.return_value = "dummy-user" get_validated_string_input_mock.return_value = "new-dummy-user" get_YN_input_mock.return_value = True result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(result[2] == "new-dummy-user") self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Negative scenario: user is already defined, but user wants # to reconfigure it, user creation failed read_ambari_user_mock.return_value = "dummy-user" run_os_command_mock.return_value = (1, "", "") get_YN_input_mock.return_value = True result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(run_os_command_mock.called) self.assertFalse(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 1) _reset_mocks() # Scenario: user is not defined (setup process) read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True get_validated_string_input_mock.return_value = "dummy-user" run_os_command_mock.return_value = (0, "", "") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(run_os_command_mock.called) self.assertTrue(result[2] == "dummy-user") self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Scenario: user is not defined (setup process), user creation failed read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True run_os_command_mock.return_value = (1, "", "") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(run_os_command_mock.called) self.assertFalse(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 1) _reset_mocks() # negative scenario: user is not defined (setup process), user creation failed read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True run_os_command_mock.return_value = (1, "", "") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(run_os_command_mock.called) self.assertFalse(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 1) _reset_mocks() # Scenario: user is not defined and left to be root read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = False result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertFalse(get_validated_string_input_mock.called) self.assertFalse(run_os_command_mock.called) self.assertTrue(result[2] == "root") self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_commons.os_windows.UserHelper.add_user_privilege") @patch("ambari_commons.os_windows.UserHelper.create_user") @patch("ambari_commons.os_windows.UserHelper.find_user") @patch("ambari_server.serverSetup.read_ambari_user") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.get_validated_string_input") @patch("ambari_server.serverSetup.adjust_directory_permissions") @patch("ambari_server.serverSetup.run_os_command") @patch("ambari_server.serverSetup.print_error_msg") @patch("ambari_server.serverSetup.print_warning_msg") @patch("ambari_server.serverSetup.print_info_msg") def test_check_ambari_user(self, print_info_msg_mock, print_warning_msg_mock, print_error_msg_mock, run_os_command_mock, adjust_directory_permissions_mock, get_validated_string_input_mock, get_YN_input_mock, read_ambari_user_mock, find_user_mock, create_user_mock, add_user_privilege_mock): def _reset_mocks(): get_YN_input_mock.reset_mock() get_validated_string_input_mock.reset_mock() find_user_mock.reset_mock() create_user_mock.reset_mock() adjust_directory_permissions_mock.reset_mock() pass options = MagicMock() options.svc_user = None options.svc_password = None run_os_command_mock.return_value = (0, "", "") # Scenario: user is already defined, user does not want to reconfigure it read_ambari_user_mock.return_value = "dummy-user" get_YN_input_mock.return_value = False result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertFalse(get_validated_string_input_mock.called) self.assertFalse(find_user_mock.called) self.assertFalse(create_user_mock.called) self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Scenario: user is already defined, but user wants to reconfigure it read_ambari_user_mock.return_value = "dummy-user" get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"] get_YN_input_mock.return_value = True find_user_mock.return_value = False create_user_mock.return_value = (0, "User created") add_user_privilege_mock.return_value = (0, "Privilege added") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertEqual(result[2], ".\\new-dummy-user") self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(adjust_directory_permissions_mock.called) self.assertTrue(find_user_mock.called) self.assertTrue(create_user_mock.called) self.assertTrue(add_user_privilege_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Negative scenario: user is already defined, but user wants # to reconfigure it, user creation failed read_ambari_user_mock.return_value = "dummy-user" get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"] find_user_mock.return_value = False create_user_mock.return_value = (-1, "Failed") get_YN_input_mock.return_value = True result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(create_user_mock.called) self.assertFalse(adjust_directory_permissions_mock.called) self.assertEqual(result[0], -1) _reset_mocks() # Scenario: user is not defined (setup process) read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True get_validated_string_input_mock.side_effect = ["dummy-user", "new_password"] create_user_mock.return_value = (0, "User created") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(create_user_mock.called) self.assertTrue(result[2] == ".\\dummy-user") self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Scenario: user is not defined, use system account (setup process) read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True get_validated_string_input_mock.side_effect = ["NT AUTHORITY\\SYSTEM"] create_user_mock.return_value = (0, "User created") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertEqual(get_validated_string_input_mock.call_count, 1) self.assertFalse(find_user_mock.called) self.assertFalse(create_user_mock.called) self.assertTrue(result[2] == "NT AUTHORITY\\SYSTEM") self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) _reset_mocks() # Scenario: user is not defined (setup process), user creation failed read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = True get_validated_string_input_mock.side_effect = ["new-dummy-user", "new_password"] find_user_mock.return_value = False create_user_mock.return_value = (-1, "Failed") result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertTrue(create_user_mock.called) self.assertFalse(adjust_directory_permissions_mock.called) self.assertEqual(result[0], -1) _reset_mocks() # Scenario: user is not defined and left to be the default read_ambari_user_mock.return_value = None get_YN_input_mock.return_value = False result = check_ambari_user(options) self.assertTrue(get_YN_input_mock.called) self.assertFalse(get_validated_string_input_mock.called) self.assertFalse(run_os_command_mock.called) self.assertTrue(result[2] == "NT AUTHORITY\\SYSTEM") self.assertTrue(adjust_directory_permissions_mock.called) self.assertEqual(result[0], 0) pass @patch("ambari_server.serverConfiguration.search_file") @patch("__builtin__.open") @patch("ambari_server.serverConfiguration.read_ambari_user") @patch("ambari_server.serverConfiguration.set_file_permissions") def test_store_password_file(self, set_file_permissions_mock, read_ambari_user_mock, open_mock, search_file_mock): search_file_mock.return_value = "/etc/ambari-server/conf/ambari.properties" open_mock.return_value = MagicMock() store_password_file("password", "passfile") self.assertTrue(set_file_permissions_mock.called) pass @patch("resource_management.core.shell.call") @patch.object(OSCheck, "get_os_family") @patch.object(OSCheck, "get_os_type") @patch.object(OSCheck, "get_os_major_version") def test_check_firewall_is_running(self, get_os_major_version_mock, get_os_type_mock, get_os_family_mock, shell_call_mock): get_os_major_version_mock.return_value = 18 get_os_type_mock.return_value = OSConst.OS_FEDORA get_os_family_mock.return_value = OSConst.REDHAT_FAMILY firewall_obj = Firewall().getFirewallObject() shell_call_mock.return_value = (0, "active", "err") self.assertEqual("Fedora18FirewallChecks", firewall_obj.__class__.__name__) self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (3, "", "err") self.assertFalse(firewall_obj.check_firewall()) self.assertEqual("err", firewall_obj.stderrdata) get_os_type_mock.return_value = OSConst.OS_UBUNTU get_os_family_mock.return_value = OSConst.UBUNTU_FAMILY firewall_obj = Firewall().getFirewallObject() shell_call_mock.return_value = (0, "Status: active", "err") self.assertEqual("UbuntuFirewallChecks", firewall_obj.__class__.__name__) self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (0, "Status: inactive", "err") self.assertFalse(firewall_obj.check_firewall()) self.assertEqual("err", firewall_obj.stderrdata) get_os_type_mock.return_value = "" get_os_family_mock.return_value = OSConst.SUSE_FAMILY firewall_obj = Firewall().getFirewallObject() shell_call_mock.return_value = (0, "running", "err") self.assertEqual("SuseFirewallChecks", firewall_obj.__class__.__name__) self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (0, "unused", "err") self.assertFalse(firewall_obj.check_firewall()) self.assertEqual("err", firewall_obj.stderrdata) get_os_type_mock.return_value = "" get_os_family_mock.return_value = OSConst.REDHAT_FAMILY get_os_major_version_mock.return_value = 6 firewall_obj = Firewall().getFirewallObject() shell_call_mock.return_value = (0, "Table: filter", "err") self.assertEqual("FirewallChecks", firewall_obj.__class__.__name__) self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (3, "", "err") self.assertFalse(firewall_obj.check_firewall()) self.assertEqual("err", firewall_obj.stderrdata) get_os_major_version_mock.return_value = 7 get_os_type_mock.return_value = "" get_os_family_mock.return_value = OSConst.REDHAT_FAMILY firewall_obj = Firewall().getFirewallObject() shell_call_mock.return_value = (0, "active\nactive", "err") self.assertEqual("RedHat7FirewallChecks", firewall_obj.__class__.__name__) self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (3, "inactive\nactive", "err") self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (3, "active\ninactive", "err") self.assertTrue(firewall_obj.check_firewall()) shell_call_mock.return_value = (3, "inactive\ninactive", "err") self.assertFalse(firewall_obj.check_firewall()) self.assertEqual("err", firewall_obj.stderrdata) pass @patch("ambari_server.setupHttps.get_validated_filepath_input") @patch("ambari_server.setupHttps.get_validated_string_input") @patch("ambari_server.setupHttps.run_os_command") @patch("ambari_server.setupHttps.get_and_persist_truststore_type") @patch("__builtin__.open") @patch("ambari_server.setupHttps.find_properties_file") @patch("ambari_server.setupHttps.run_component_https_cmd") @patch("ambari_server.setupHttps.get_delete_cert_command") @patch("ambari_server.setupHttps.get_and_persist_truststore_password") @patch("ambari_server.setupHttps.get_and_persist_truststore_path") @patch("ambari_server.setupHttps.get_YN_input") @patch("ambari_server.setupHttps.get_ambari_properties") @patch("ambari_server.setupHttps.find_jdk") def test_setup_truststore(self, find_jdk_mock, get_ambari_properties_mock, get_YN_input_mock, get_and_persist_truststore_path_mock, get_and_persist_truststore_password_mock, get_delete_cert_command_mock, run_component_https_cmd_mock, find_properties_file_mock, open_mock, get_and_persist_truststore_type_mock, run_os_command_mock, get_validated_string_input_mock, get_validated_filepath_input_mock): out = StringIO.StringIO() sys.stdout = out component = "component" command = "command" property = "use_ssl" alias = "alias" options = self._create_empty_options_mock() #Silent mode set_silent(True) setup_truststore(options) self.assertEqual('setup-security is not enabled in silent mode.\n', out.getvalue()) sys.stdout = sys.__stdout__ #Verbouse mode and jdk_path is None set_silent(False) p = get_ambari_properties_mock.return_value # Dont disable ssl get_YN_input_mock.side_effect = [False] get_validated_string_input_mock.return_value = "alias" setup_truststore(options) self.assertTrue(get_YN_input_mock.called) p.get_property.reset_mock() get_YN_input_mock.reset_mock() # Cant find jdk find_jdk_mock.return_value = None try: setup_truststore(options) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue('No JDK found, please run the "ambari-server setup" command to install a' + ' JDK automatically or install any JDK manually to ' in fe.reason) pass #Verbouse mode and jdk_path is not None (use_https = true) find_jdk_mock.return_value = "/jdk_path" p.get_property.side_effect = ["true"] get_YN_input_mock.side_effect = [True,True] get_and_persist_truststore_path_mock.return_value = "/truststore_path" get_and_persist_truststore_password_mock.return_value = "/truststore_password" get_delete_cert_command_mock.return_value = "rm -f" setup_truststore(options, True) self.assertTrue(get_and_persist_truststore_path_mock.called) self.assertTrue(get_and_persist_truststore_password_mock.called) self.assertTrue(get_delete_cert_command_mock.called) self.assertTrue(find_properties_file_mock.called) self.assertTrue(open_mock.called) self.assertTrue(p.store.called) self.assertTrue(run_component_https_cmd_mock.called) p.process_pair.reset_mock() get_and_persist_truststore_path_mock.reset_mock() get_and_persist_truststore_password_mock.reset_mock() get_delete_cert_command_mock.reset_mock() find_properties_file_mock.reset_mock() open_mock.reset_mock() p.store.reset_mock() #Verbouse mode and jdk_path is not None (use_https = false) and import cert p.get_property.side_effect = ["false"] get_YN_input_mock.side_effect = [True,True] setup_truststore(options, True) self.assertTrue(get_and_persist_truststore_type_mock.called) self.assertTrue(get_and_persist_truststore_path_mock.called) self.assertTrue(get_and_persist_truststore_password_mock.called) self.assertTrue(get_delete_cert_command_mock.called) self.assertTrue(find_properties_file_mock.called) self.assertTrue(open_mock.called) self.assertTrue(p.store.called) self.assertTrue(run_component_https_cmd_mock.called) self.assertTrue(run_os_command_mock.called) self.assertTrue(get_validated_filepath_input_mock.called) p.process_pair.reset_mock() get_and_persist_truststore_type_mock.reset_mock() get_and_persist_truststore_path_mock.reset_mock() get_and_persist_truststore_password_mock.reset_mock() get_delete_cert_command_mock.reset_mock() find_properties_file_mock.reset_mock() open_mock.reset_mock() p.store.reset_mock() run_os_command_mock.reset_mock() get_validated_filepath_input_mock.reset_mock() pass @patch("__builtin__.open") @patch("ambari_commons.logging_utils.get_silent") @patch("ambari_server.setupHttps.find_jdk") @patch("ambari_server.setupHttps.get_ambari_properties") @patch("ambari_server.setupHttps.get_YN_input") @patch("ambari_server.setupHttps.get_and_persist_truststore_type") @patch("ambari_server.setupHttps.get_and_persist_truststore_path") @patch("ambari_server.setupHttps.get_and_persist_truststore_password") @patch("ambari_server.setupHttps.find_properties_file") @patch("ambari_server.setupHttps.get_validated_string_input") @patch("ambari_server.setupHttps.run_os_command") @patch("ambari_server.setupHttps.get_validated_filepath_input") @patch("ambari_server.setupHttps.get_import_cert_command") @patch("ambari_server.setupHttps.run_component_https_cmd") def test_reconfigure_truststore(self, run_component_https_cmd_mock, get_import_cert_command_mock, get_validated_filepath_input_mock, run_os_command_mock, get_validated_string_input_mock, find_properties_file_mock, get_and_persist_truststore_password_mock, get_and_persist_truststore_path_mock, get_and_persist_truststore_type_mock, get_YN_input_mock, get_ambari_properties_mock, find_jdk_mock, get_silent_mock, open_mock): def reset_mocks(): open_mock.reset_mock() find_jdk_mock.reset_mock() get_ambari_properties_mock.reset_mock() get_YN_input_mock.reset_mock() get_and_persist_truststore_type_mock.reset_mock() get_and_persist_truststore_path_mock.reset_mock() get_and_persist_truststore_password_mock.reset_mock() find_properties_file_mock.reset_mock() get_validated_string_input_mock.reset_mock() run_os_command_mock.reset_mock() get_validated_filepath_input_mock.reset_mock() get_import_cert_command_mock.reset_mock() run_component_https_cmd_mock.reset_mock() #Test preconditions get_silent_mock.return_value = False find_jdk_mock.return_value = "/path" options = self._create_empty_options_mock() #Reconfiguration allowed by the user reset_mocks() get_YN_input_mock.side_effect = [True, True, True] setup_truststore(options) self.assertTrue(get_and_persist_truststore_type_mock.called) self.assertTrue(get_and_persist_truststore_path_mock.called) self.assertTrue(get_and_persist_truststore_password_mock.called) #Reconfiguration disallowed by the user reset_mocks() get_YN_input_mock.side_effect = [True, False] setup_truststore(options) self.assertTrue(get_and_persist_truststore_type_mock.called) self.assertTrue(get_and_persist_truststore_path_mock.called) self.assertTrue(get_and_persist_truststore_password_mock.called) #Reconfiguration should be disabled when 'import_cert' flag is 'True' reset_mocks() get_YN_input_mock.side_effect = [True, True] setup_truststore(options, True) self.assertTrue(get_and_persist_truststore_type_mock.called) self.assertTrue(get_and_persist_truststore_path_mock.called) self.assertTrue(get_and_persist_truststore_password_mock.called) self.assertTrue(get_import_cert_command_mock.called) pass @patch("ambari_server.setupHttps.adjust_directory_permissions") @patch("ambari_server.setupHttps.read_ambari_user") @patch("ambari_server.setupHttps.get_validated_string_input") @patch("ambari_server.setupHttps.find_properties_file") @patch("ambari_server.setupHttps.get_ambari_properties") @patch("ambari_server.setupHttps.import_cert_and_key_action") @patch("ambari_server.setupHttps.get_YN_input") @patch("__builtin__.open") @patch("ambari_server.setupHttps.is_root") @patch("ambari_server.setupHttps.is_valid_cert_host") @patch("ambari_server.setupHttps.is_valid_cert_exp") def test_setup_https(self, is_valid_cert_exp_mock, is_valid_cert_host_mock, \ is_root_mock, open_Mock, get_YN_input_mock, \ import_cert_and_key_action_mock, get_ambari_properties_mock, \ find_properties_file_mock, \ get_validated_string_input_mock, read_ambari_user_method, \ adjust_directory_permissions_mock): is_valid_cert_exp_mock.return_value = True is_valid_cert_host_mock.return_value = True open_Mock.return_value = file p = get_ambari_properties_mock.return_value args = MagicMock() args.api_ssl_port = None args.api_ssl = None args.import_cert_path = None args.import_key_path = None args.pem_password = None # Testing call under root is_root_mock.return_value = True read_ambari_user_method.return_value = "user" #Case #1: if client ssl is on and user didnt choose #disable ssl option and choose import certs and keys p.get_property.side_effect = ["key_dir", "5555", "6666", "true", "5555", "true", "true", "5555"] get_YN_input_mock.side_effect = [False, True] get_validated_string_input_mock.side_effect = ["4444"] get_property_expected = "[call('security.server.keys_dir'),\n" + \ " call('client.api.ssl.port'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('api.ssl'),\n call('client.api.ssl.port')]" process_pair_expected = "[call('client.api.ssl.port', '4444')]" set_silent(False) setup_https(args) self.assertTrue(p.process_pair.called) self.assertTrue(p.get_property.call_count == 8) self.assertEqual(str(p.get_property.call_args_list), get_property_expected) self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected) self.assertTrue(p.store.called) self.assertTrue(import_cert_and_key_action_mock.called) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #2: if client ssl is on and user choose to disable ssl option p.get_property.side_effect = ["key_dir", "", "true", "", "true", "false", ""] get_YN_input_mock.side_effect = [True] get_validated_string_input_mock.side_effect = ["4444"] get_property_expected = "[call('security.server.keys_dir'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('api.ssl')]" process_pair_expected = "[call('api.ssl', 'false')]" setup_https(args) self.assertTrue(p.process_pair.called) self.assertTrue(p.get_property.call_count == 6) self.assertEqual(str(p.get_property.call_args_list), get_property_expected) self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected) self.assertTrue(p.store.called) self.assertFalse(import_cert_and_key_action_mock.called) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #3: if client ssl is off and user choose option #to import cert and keys p.get_property.side_effect = ["key_dir", "", None, "", None, None, ""] get_YN_input_mock.side_effect = [True, True] get_validated_string_input_mock.side_effect = ["4444"] get_property_expected = "[call('security.server.keys_dir'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('api.ssl'),\n call('client.api.ssl.port')]" process_pair_expected = "[call('client.api.ssl.port', '4444')]" setup_https(args) self.assertTrue(p.process_pair.called) self.assertTrue(p.get_property.call_count == 7) self.assertEqual(str(p.get_property.call_args_list), get_property_expected) self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected) self.assertTrue(p.store.called) self.assertTrue(import_cert_and_key_action_mock.called) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #4: if client ssl is off and #user did not choose option to import cert and keys p.get_property.side_effect = ["key_dir", "", None, "", None] get_YN_input_mock.side_effect = [False] get_validated_string_input_mock.side_effect = ["4444"] get_property_expected = "[call('security.server.keys_dir'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl')]" process_pair_expected = "[]" setup_https(args) self.assertFalse(p.process_pair.called) self.assertTrue(p.get_property.call_count == 5) self.assertEqual(str(p.get_property.call_args_list), get_property_expected) self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected) self.assertFalse(p.store.called) self.assertFalse(import_cert_and_key_action_mock.called) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #5: if cert must be imported but didnt imported p.get_property.side_effect = ["key_dir", "", "false", "", "false"] get_YN_input_mock.side_effect = [True] import_cert_and_key_action_mock.side_effect = [False] get_validated_string_input_mock.side_effect = ["4444"] get_property_expected = "[call('security.server.keys_dir'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl'),\n" + \ " call('client.api.ssl.port'),\n call('api.ssl')]" process_pair_expected = "[call('client.api.ssl.port', '4444')]" self.assertFalse(setup_https(args)) self.assertTrue(p.process_pair.called) self.assertTrue(p.get_property.call_count == 5) self.assertEqual(str(p.get_property.call_args_list), get_property_expected) self.assertEqual(str(p.process_pair.call_args_list), process_pair_expected) self.assertFalse(p.store.called) self.assertTrue(import_cert_and_key_action_mock.called) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #6: if silent mode is enabled set_silent(True) try: setup_https(args) self.fail("Should throw exception") except NonFatalException as fe: self.assertTrue("setup-https is not enabled in silent mode" in fe.reason) p.process_pair.reset_mock() p.get_property.reset_mock() p.store.reset_mock() import_cert_and_key_action_mock.reset_mock() #Case #7: read property throw exception set_silent(False) find_properties_file_mock.return_value = "propertyFile" p.get_property.side_effect = KeyError("Failed to read property") try: setup_https(args) self.fail("Should throw exception") except FatalException as fe: self.assertTrue("Failed to read property" in fe.reason) pass @patch("ambari_server.setupHttps.import_cert_and_key") def test_import_cert_and_key_action(self, import_cert_and_key_mock): import_cert_and_key_mock.return_value = True properties = MagicMock() properties.get_property.side_effect = ["key_dir", "5555", "6666", "true"] properties.process_pair = MagicMock() expect_process_pair = "[call('client.api.ssl.cert_name', 'https.crt'),\n" + \ " call('client.api.ssl.key_name', 'https.key'),\n" + \ " call('api.ssl', 'true')]" options = self._create_empty_options_mock() import_cert_and_key_action("key_dir", properties, options) self.assertEqual(str(properties.process_pair.call_args_list), \ expect_process_pair) pass @patch("ambari_server.setupHttps.remove_file") @patch("ambari_server.setupHttps.copy_file") @patch("ambari_server.setupHttps.read_ambari_user") @patch("ambari_server.setupHttps.set_file_permissions") @patch("ambari_server.setupHttps.import_file_to_keystore") @patch("__builtin__.open") @patch("ambari_server.setupHttps.run_os_command") @patch("os.path.join") @patch("os.path.isfile") @patch("__builtin__.raw_input") @patch("ambari_server.setupHttps.get_validated_string_input") @patch("ambari_server.setupHttps.is_valid_cert_host") @patch("ambari_server.setupHttps.is_valid_cert_exp") def test_ambariServerSetupWithCustomDbName(self, is_valid_cert_exp_mock, \ is_valid_cert_host_mock, \ get_validated_string_input_mock, \ raw_input_mock, \ os_path_isfile_mock, \ os_path_join_mock, run_os_command_mock, \ open_mock, import_file_to_keystore_mock, \ set_file_permissions_mock, read_ambari_user_mock, copy_file_mock, \ remove_file_mock): is_valid_cert_exp_mock.return_value = True is_valid_cert_host_mock.return_value = True os_path_isfile_mock.return_value = True get_validated_string_input_mock.return_value = "password" raw_input_mock.side_effect = \ ["cert_file_path", "key_file_path"] os_path_join_mock.side_effect = ["keystore_file_path", "keystore_file_path_tmp", \ "pass_file_path", "pass_file_path_tmp", \ "passin_file_path", "password_file_path", \ "keystore_cert_file_path", \ "keystore_cert_key_file_path", ] run_os_command_mock.return_value = (0, "", "") om = open_mock.return_value expect_import_file_to_keystore = "[call('keystore_file_path_tmp'," + \ " 'keystore_file_path'),\n" + \ " call('pass_file_path_tmp'," + \ " 'pass_file_path'),\n" + \ " call('cert_file_path'," + \ " 'keystore_cert_file_path'),\n" + \ " call('key_file_path'," + \ " 'keystore_cert_key_file_path')]" options = self._create_empty_options_mock() import_cert_and_key("key_dir", options) self.assertTrue(raw_input_mock.call_count == 2) self.assertTrue(get_validated_string_input_mock.called) self.assertEqual(os_path_join_mock.call_count, 8) self.assertTrue(set_file_permissions_mock.call_count == 1) self.assertEqual(str(import_file_to_keystore_mock.call_args_list), \ expect_import_file_to_keystore) pass @patch("ambari_server.setupHttps.remove_file") @patch("ambari_server.setupHttps.copy_file") @patch("ambari_server.setupHttps.generate_random_string") @patch("ambari_server.setupHttps.read_ambari_user") @patch("ambari_server.setupHttps.set_file_permissions") @patch("ambari_server.setupHttps.import_file_to_keystore") @patch("__builtin__.open") @patch("ambari_server.setupHttps.run_os_command") @patch("os.path.join") @patch("ambari_server.setupHttps.get_validated_filepath_input") @patch("ambari_server.setupHttps.get_validated_string_input") @patch("ambari_server.setupHttps.is_valid_cert_host") @patch("ambari_server.setupHttps.is_valid_cert_exp") def test_import_cert_and_key_with_empty_password(self, \ is_valid_cert_exp_mock, is_valid_cert_host_mock, get_validated_string_input_mock, get_validated_filepath_input_mock, \ os_path_join_mock, run_os_command_mock, open_mock, \ import_file_to_keystore_mock, set_file_permissions_mock, read_ambari_user_mock, generate_random_string_mock, copy_file_mock, \ remove_file_mock): is_valid_cert_exp_mock.return_value = True is_valid_cert_host_mock.return_value = True get_validated_string_input_mock.return_value = "" get_validated_filepath_input_mock.side_effect = \ ["cert_file_path", "key_file_path"] os_path_join_mock.side_effect = ["keystore_file_path", "keystore_file_path_tmp", \ "pass_file_path", "pass_file_path_tmp", \ "passin_file_path", "password_file_path", \ "keystore_cert_file_path", \ "keystore_cert_key_file_path", ] run_os_command_mock.return_value = (0, "", "") expect_import_file_to_keystore = "[call('keystore_file_path_tmp'," + \ " 'keystore_file_path'),\n" + \ " call('pass_file_path_tmp'," + \ " 'pass_file_path'),\n" + \ " call('cert_file_path'," + \ " 'keystore_cert_file_path'),\n" + \ " call('key_file_path.secured'," + \ " 'keystore_cert_key_file_path')]" options = self._create_empty_options_mock() import_cert_and_key("key_dir", options) self.assertEquals(get_validated_filepath_input_mock.call_count, 2) self.assertTrue(get_validated_string_input_mock.called) self.assertEquals(os_path_join_mock.call_count, 8) self.assertEquals(set_file_permissions_mock.call_count, 1) self.assertEqual(str(import_file_to_keystore_mock.call_args_list), \ expect_import_file_to_keystore) self.assertTrue(generate_random_string_mock.called) pass @patch("__builtin__.open") @patch("ambari_server.setupHttps.copy_file") @patch("ambari_server.setupHttps.is_root") @patch("ambari_server.setupHttps.read_ambari_user") @patch("ambari_server.setupHttps.set_file_permissions") @patch("ambari_server.setupHttps.import_file_to_keystore") @patch("ambari_server.setupHttps.run_os_command") @patch("os.path.join") @patch("ambari_server.setupHttps.get_validated_filepath_input") @patch("ambari_server.setupHttps.get_validated_string_input") def test_import_cert_and_key_with_incorrect_password(self, get_validated_string_input_mock, \ get_validated_filepath_input_mock, \ os_path_join_mock, \ run_os_command_mock, \ import_file_to_keystore_mock, \ set_file_permissions_mock, \ read_ambari_user_mock, \ is_root_mock, \ copy_file_mock, \ open_mock): get_validated_string_input_mock.return_value = "incorrect_password" get_validated_filepath_input_mock.return_value = 'filename' open_mock.return_value = MagicMock() os_path_join_mock.return_value = '' is_root_mock.return_value = True options = self._create_empty_options_mock() #provided password doesn't match, openssl command returns an error run_os_command_mock.return_value = (1, "", "Some error message") self.assertFalse(import_cert_and_key_action(*["key_dir", None, options])) self.assertFalse(import_cert_and_key("key_dir", options)) pass def test_is_valid_cert_exp(self): #No data in certInfo certInfo = {} is_valid = is_valid_cert_exp(certInfo) self.assertFalse(is_valid) #Issued in future issuedOn = (datetime.datetime.now() + datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT) expiresOn = (datetime.datetime.now() + datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT) certInfo = {NOT_BEFORE_ATTR: issuedOn, NOT_AFTER_ATTR: expiresOn} is_valid = is_valid_cert_exp(certInfo) self.assertFalse(is_valid) #Was expired issuedOn = (datetime.datetime.now() - datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT) expiresOn = (datetime.datetime.now() - datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT) certInfo = {NOT_BEFORE_ATTR: issuedOn, NOT_AFTER_ATTR: expiresOn} is_valid = is_valid_cert_exp(certInfo) self.assertFalse(is_valid) #Valid issuedOn = (datetime.datetime.now() - datetime.timedelta(hours=2000)).strftime(SSL_DATE_FORMAT) expiresOn = (datetime.datetime.now() + datetime.timedelta(hours=1000)).strftime(SSL_DATE_FORMAT) certInfo = {NOT_BEFORE_ATTR: issuedOn, NOT_AFTER_ATTR: expiresOn} is_valid = is_valid_cert_exp(certInfo) self.assertTrue(is_valid) pass @patch("ambari_server.setupHttps.get_fqdn") def test_is_valid_cert_host(self, get_fqdn_mock): #No data in certInfo certInfo = {} is_valid = is_valid_cert_host(certInfo) self.assertFalse(is_valid) #Failed to get FQDN get_fqdn_mock.return_value = None is_valid = is_valid_cert_host(certInfo) self.assertFalse(is_valid) #FQDN and Common name in certificated don't correspond get_fqdn_mock.return_value = 'host1' certInfo = {COMMON_NAME_ATTR: 'host2'} is_valid = is_valid_cert_host(certInfo) self.assertFalse(is_valid) #FQDN and Common name in certificated correspond get_fqdn_mock.return_value = 'host1' certInfo = {COMMON_NAME_ATTR: 'host1'} is_valid = is_valid_cert_host(certInfo) self.assertTrue(is_valid) pass @patch("ambari_server.setupHttps.get_ambari_properties") def test_is_valid_https_port(self, get_ambari_properties_mock): #No ambari.properties get_ambari_properties_mock.return_value = -1 is_valid = is_valid_https_port(1111) self.assertEqual(is_valid, False) #User entered port used by one way auth portOneWay = "1111" portTwoWay = "2222" validPort = "3333" get_ambari_properties_mock.return_value = {SRVR_ONE_WAY_SSL_PORT_PROPERTY: portOneWay, SRVR_TWO_WAY_SSL_PORT_PROPERTY: portTwoWay} is_valid = is_valid_https_port(portOneWay) self.assertEqual(is_valid, False) #User entered port used by two way auth is_valid = is_valid_https_port(portTwoWay) self.assertEqual(is_valid, False) #User entered valid port get_ambari_properties_mock.return_value = {SRVR_ONE_WAY_SSL_PORT_PROPERTY: portOneWay, SRVR_TWO_WAY_SSL_PORT_PROPERTY: portTwoWay} is_valid = is_valid_https_port(validPort) self.assertEqual(is_valid, True) pass @patch("socket.getfqdn") @patch("urllib2.urlopen") @patch("ambari_server.setupHttps.get_ambari_properties") def test_get_fqdn(self, get_ambari_properties_mock, url_open_mock, getfqdn_mock): #No ambari.properties get_ambari_properties_mock.return_value = -1 fqdn = get_fqdn() self.assertEqual(fqdn, None) #Check mbari_server.GET_FQDN_SERVICE_URL property name (AMBARI-2612) #property name should be server.fqdn.service.url self.assertEqual(GET_FQDN_SERVICE_URL, "server.fqdn.service.url") #Read FQDN from service p = MagicMock() p[GET_FQDN_SERVICE_URL] = 'someurl' get_ambari_properties_mock.return_value = p u = MagicMock() host = 'host1.domain.com' u.read.return_value = host url_open_mock.return_value = u fqdn = get_fqdn() self.assertEqual(fqdn, host) #Failed to read FQDN from service, getting from socket u.reset_mock() u.side_effect = Exception("Failed to read FQDN from service") getfqdn_mock.return_value = host fqdn = get_fqdn() self.assertEqual(fqdn, host) pass def test_get_ulimit_open_files(self): # 1 - No ambari.properties p = Properties() open_files = get_ulimit_open_files(p) self.assertEqual(open_files, ULIMIT_OPEN_FILES_DEFAULT) # 2 - With ambari.properties - ok prop_value = 65000 p.process_pair(ULIMIT_OPEN_FILES_KEY, str(prop_value)) open_files = get_ulimit_open_files(p) self.assertEqual(open_files, 65000) # 2 - With ambari.properties - default tf1 = tempfile.NamedTemporaryFile() prop_value = 0 p.process_pair(ULIMIT_OPEN_FILES_KEY, str(prop_value)) open_files = get_ulimit_open_files(p) self.assertEqual(open_files, ULIMIT_OPEN_FILES_DEFAULT) pass @patch("ambari_server.setupHttps.run_os_command") def test_get_cert_info(self, run_os_command_mock): # Error running openssl command path = 'path/to/certificate' run_os_command_mock.return_value = -1, None, None cert_info = get_cert_info(path) self.assertEqual(cert_info, None) #Empty result of openssl command run_os_command_mock.return_value = 0, None, None cert_info = get_cert_info(path) self.assertEqual(cert_info, None) #Positive scenario notAfter = 'Jul 3 14:12:57 2014 GMT' notBefore = 'Jul 3 14:12:57 2013 GMT' attr1_key = 'A' attr1_value = 'foo' attr2_key = 'B' attr2_value = 'bar' attr3_key = 'CN' attr3_value = 'host.domain.com' subject_pattern = '/{attr1_key}={attr1_value}/{attr2_key}={attr2_value}/{attr3_key}={attr3_value}' subject = subject_pattern.format(attr1_key=attr1_key, attr1_value=attr1_value, attr2_key=attr2_key, attr2_value=attr2_value, attr3_key=attr3_key, attr3_value=attr3_value) out_pattern = \ "notAfter={notAfter}" + os.linesep + \ "notBefore={notBefore}" + os.linesep + \ "subject={subject}" + os.linesep + \ "-----BEGIN CERTIFICATE-----" + os.linesep + \ "MIIFHjCCAwYCCQDpHKOBI+Lt0zANBgkqhkiG9w0BAQUFADBRMQswCQYDVQQGEwJV" + os.linesep + \ "..." + os.linesep + \ "5lqd8XxOGSYoMOf+70BLN2sB" + os.linesep + \ "-----END CERTIFICATE-----" + os.linesep + \ "" out = out_pattern.format(notAfter=notAfter, notBefore=notBefore, subject=subject) run_os_command_mock.return_value = 0, out, None cert_info = get_cert_info(path) self.assertEqual(cert_info['notAfter'], notAfter) self.assertEqual(cert_info['notBefore'], notBefore) self.assertEqual(cert_info['subject'], subject) self.assertEqual(cert_info[attr1_key], attr1_value) self.assertEqual(cert_info[attr2_key], attr2_value) self.assertEqual(cert_info[attr3_key], attr3_value) pass @patch("__builtin__.raw_input") def test_get_validated_string_input(self, raw_input_mock): prompt = 'prompt' default_value = 'default' description = 'desc' validator = MagicMock() validator.return_value = True inputed_value1 = 'val1' inputed_value2 = 'val2' raw_input_mock.return_value = inputed_value1 input = get_validated_string_input(prompt, default_value, None, description, False, False, validator) self.assertTrue(validator.called) self.assertEqual(inputed_value1, input) validator.side_effect = [False, True] raw_input_mock.side_effect = [inputed_value1, inputed_value2] input = get_validated_string_input(prompt, default_value, None, description, False, False, validator) self.assertEqual(inputed_value2, input) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverUtils.run_os_command") @patch("__builtin__.open") @patch("os.path.exists") def test_is_server_runing(self, os_path_exists_mock, open_mock, \ run_os_command_mock): os_path_exists_mock.return_value = True f = open_mock.return_value f.readline.return_value = "111" run_os_command_mock.return_value = 0, "", "" status, pid = is_server_runing() self.assertTrue(status) self.assertEqual(111, pid) os_path_exists_mock.return_value = False status, pid = is_server_runing() self.assertFalse(status) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("os_windows.win32serviceutil.QueryServiceStatus") def test_is_server_runing(self, query_service_status_mock): query_service_status_mock.return_value = ("", 4) status, desc = is_server_runing() self.assertTrue(status) self.assertEqual("", desc) query_service_status_mock.return_value = ("", 1) status, desc = is_server_runing() self.assertFalse(status) self.assertEqual("stopped", desc) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverUtils.run_os_command") @patch("__builtin__.open") @patch("os.path.exists") def test_is_server_runing_bad_file(self, os_path_exists_mock, open_mock, \ run_os_command_mock): os_path_exists_mock.return_value = True f = open_mock.return_value f.readline.return_value = "" # empty file content run_os_command_mock.return_value = 0, "", "" self.assertRaises(NonFatalException, is_server_runing) open_mock.side_effect = IOError('[Errno 13] Permission denied: /var/run/ambari-server/ambari-server.pid') self.assertRaises(FatalException, is_server_runing) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("os.path.exists") @patch("os.makedirs") @patch("os.chdir") @patch("ambari_server.serverSetup.run_os_command") def test_install_jdk(self, run_os_command_mock, os_chdir_mock, os_makedirs_mock, os_path_exists_mock): run_os_command_mock.return_value = 1, "", "" os_path_exists_mock.return_value = False failed = False try: jdkSetup = JDKSetup() jdkSetup._install_jdk(MagicMock(), MagicMock()) self.fail("Exception was not rised!") except FatalException: failed = True self.assertTrue(failed) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("os.path.exists") @patch("os.makedirs") @patch("os.chdir") @patch("ambari_server.serverSetup.run_os_command") def test_install_jdk(self, run_os_command_mock, os_chdir_mock, os_makedirs_mock, os_path_exists_mock): jdk_cfg = MagicMock() jdk_cfg.inst_dir = "java_home_dir" run_os_command_mock.return_value = 1, "", "" os_path_exists_mock.return_value = False failed = False try: jdkSetup = JDKSetup() jdkSetup._install_jdk("jdk.exe", jdk_cfg) self.fail("Exception was not rised!") except FatalException: failed = True self.assertTrue(failed) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.read_ambari_user") @patch("os.stat") @patch("os.path.isfile") @patch("os.path.exists") @patch("os.chdir") @patch("os.makedirs") @patch("ambari_server.serverSetup.JDKSetupLinux.adjust_jce_permissions") @patch("ambari_server.serverSetup.expand_jce_zip_file") @patch("ambari_server.serverSetup.force_download_file") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.run_os_command") @patch("ambari_server.serverSetup.update_properties") @patch("ambari_server.serverSetup.get_validated_string_input") @patch("ambari_server.serverSetup.print_info_msg") @patch("ambari_server.serverSetup.validate_jdk") @patch("ambari_server.serverSetup.get_JAVA_HOME") @patch("ambari_server.serverSetup.get_resources_location") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("shutil.copyfile") @patch("sys.exit") def test_download_jdk(self, exit_mock, copyfile_mock, get_ambari_properties_mock, get_resources_location_mock, get_JAVA_HOME_mock, \ validate_jdk_mock, print_info_msg_mock, get_validated_string_input_mock, update_properties_mock, \ run_os_command_mock, get_YN_input_mock, force_download_file_mock, expand_jce_zip_file_mock, adjust_jce_permissions_mock, os_makedirs_mock, os_chdir_mock, path_existsMock, path_isfileMock, statMock, read_ambari_user_mock): @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY) def _init_test_jdk_mocks(): jdk1_url = "http://somewhere/myjdk.exe" res_location = "resources" p = Properties() p.process_pair("java.releases", "jdk1") p.process_pair("jdk1.desc", "JDK name") p.process_pair("jdk1.url", "http://somewhere/myjdk.exe") p.process_pair("jdk1.dest-file", "myjdk.exe") p.process_pair("jdk1.jcpol-url", "http://somewhere/some-jcpol.zip") p.process_pair("jdk1.jcpol-file", "some-jcpol.zip") p.process_pair("jdk1.home", "C:\\jdk1") p.process_pair("jdk1.re", "(jdk.*)/jre") p.process_pair("jdk.download.supported", "true") p.process_pair("jce.download.supported", "true") pem_side_effect1 = [False, True, False] return p, jdk1_url, res_location, pem_side_effect1 @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT) def _init_test_jdk_mocks(): jdk1_url = "http://somewhere/somewhere.tar.gz" res_location = MagicMock() p = Properties() p.process_pair("java.releases", "jdk1") p.process_pair("jdk1.desc", "JDK name") p.process_pair("jdk1.url", jdk1_url) p.process_pair("jdk1.dest-file", "somewhere.tar.gz") p.process_pair("jdk1.jcpol-url", "http://somewhere/some-jcpol.tar.gz") p.process_pair("jdk1.jcpol-file", "some-jcpol.tar.gz") p.process_pair("jdk1.home", "/jdk1") p.process_pair("jdk1.re", "(jdk.*)/jre") p.process_pair("jdk.download.supported", "true") p.process_pair("jce.download.supported", "true") pem_side_effect1 = [True, False, True, False] return p, jdk1_url, res_location, pem_side_effect1 args = MagicMock() args.java_home = "somewhere" args.silent = False p, jdk1_url, res_location, pem_side_effect1 = _init_test_jdk_mocks() validate_jdk_mock.return_value = False path_existsMock.return_value = False get_resources_location_mock.return_value = res_location get_JAVA_HOME_mock.return_value = False read_ambari_user_mock.return_value = "ambari" get_ambari_properties_mock.return_value = p # Test case: ambari.properties not found try: download_and_install_jdk(args) self.fail("Should throw exception because of not found ambari.properties") except FatalException: # Expected self.assertTrue(get_ambari_properties_mock.called) pass # Test case: JDK already exists args.java_home = None args.jdk_location = None get_JAVA_HOME_mock.return_value = "some_jdk" validate_jdk_mock.return_value = True get_YN_input_mock.return_value = False path_existsMock.return_value = False run_os_command_mock.return_value = 0, "", "" rcode = download_and_install_jdk(args) self.assertEqual(0, rcode) # Test case: java home setup args.java_home = "somewhere" validate_jdk_mock.return_value = True path_existsMock.return_value = False get_JAVA_HOME_mock.return_value = None rcode = download_and_install_jdk(args) self.assertEqual(0, rcode) self.assertTrue(update_properties_mock.called) # Test case: JDK file does not exist, property not defined validate_jdk_mock.return_value = False path_existsMock.return_value = False get_ambari_properties_mock.return_value = p p.removeProp("jdk1.url") try: download_and_install_jdk(args) self.fail("Should throw exception") except FatalException: # Expected pass # Test case: JDK file does not exist, HTTP response does not # contain Content-Length p.process_pair("jdk1.url", jdk1_url) validate_jdk_mock.return_value = False path_existsMock.return_value = False get_YN_input_mock.return_value = True get_validated_string_input_mock.return_value = "1" run_os_command_mock.return_value = (0, "Wrong out", None) try: download_and_install_jdk(args) self.fail("Should throw exception") except FatalException: # Expected pass # Successful JDK download args.java_home = None validate_jdk_mock.return_value = False path_existsMock.reset_mock() path_existsMock.side_effect = [False, False, False] path_isfileMock.return_value = False args.jdk_location = None run_os_command_mock.return_value = (0, "Creating jdk1/jre", None) statResult = MagicMock() statResult.st_size = 32000 statMock.return_value = statResult try: rcode = download_and_install_jdk(args) except Exception, e: raise self.assertEqual(0, rcode) # Test case: not accept the license" get_YN_input_mock.return_value = False path_existsMock.reset_mock() path_existsMock.side_effect = [False, False, True, False, True, False] download_and_install_jdk(args) self.assertTrue(exit_mock.called) # Test case: jdk is already installed, ensure that JCE check is skipped if -j option is not supplied. args.jdk_location = None get_JAVA_HOME_mock.return_value = "some_jdk" validate_jdk_mock.return_value = True get_YN_input_mock.return_value = False path_existsMock.reset_mock() path_existsMock.side_effect = pem_side_effect1 force_download_file_mock.reset_mock() with patch("ambari_server.serverSetup.JDKSetup._download_jce_policy") as download_jce_policy_mock: rcode = download_and_install_jdk(args) self.assertFalse(download_jce_policy_mock.called) self.assertFalse(force_download_file_mock.called) # Test case: Update JAVA_HOME location using command: ambari-server setup -j %NEW_LOCATION% update_properties_mock.reset_mock() args.java_home = "somewhere" validate_jdk_mock.return_value = True path_existsMock.reset_mock() path_existsMock.side_effect = pem_side_effect1 get_JAVA_HOME_mock.return_value = "some_jdk" path_isfileMock.return_value = True download_and_install_jdk(args) self.assertTrue(update_properties_mock.call_count == 1) # Test case: Negative test case JAVA_HOME location should not be updated if -j option is supplied and # jce_policy file already exists in resources dir. #write_property_mock.reset_mock() #args.java_home = "somewhere" #path_existsMock.side_effect = None #path_existsMock.return_value = True #get_JAVA_HOME_mock.return_value = "some_jdk" #try: # download_and_install_jdk(args) # self.fail("Should throw exception") #except FatalException as fe: # Expected # self.assertFalse(write_property_mock.called) # Test case: Setup ambari-server first time, Custom JDK selected, JDK exists args.java_home = None args.jdk_location = None validate_jdk_mock.return_value = False update_properties_mock.reset_mock() path_existsMock.reset_mock() path_existsMock.side_effect = [True, True, True, True] get_validated_string_input_mock.return_value = "2" get_JAVA_HOME_mock.return_value = None rcode = download_and_install_jdk(args) self.assertEqual(0, rcode) self.assertTrue(update_properties_mock.called) # Test case: Setup ambari-server first time, Custom JDK selected, JDK not exists update_properties_mock.reset_mock() validate_jdk_mock.return_value = False path_existsMock.reset_mock() path_existsMock.side_effect = pem_side_effect1 get_validated_string_input_mock.return_value = "2" get_JAVA_HOME_mock.return_value = None try: download_and_install_jdk(args) self.fail("Should throw exception") except FatalException as fe: # Expected pass # Test when custom java home exists but java binary file doesn't exist args.java_home = None validate_jdk_mock.return_value = False path_isfileMock.return_value = False update_properties_mock.reset_mock() path_existsMock.reset_mock() path_existsMock.side_effect = pem_side_effect1 get_validated_string_input_mock.return_value = "2" get_JAVA_HOME_mock.return_value = None flag = False try: download_and_install_jdk(args) self.fail("Should throw exception") except FatalException as fe: # Expected flag = True pass self.assertTrue(flag) #Test case: Setup ambari-server with java home passed. Path to java home doesn't exist args.java_home = "somewhere" validate_jdk_mock.return_value = False path_existsMock.reset_mock() path_existsMock.side_effect = pem_side_effect1 try: download_and_install_jdk(args) self.fail("Should throw exception") except FatalException as fe: self.assertTrue("Path to java home somewhere or java binary file does not exists" in fe.reason) pass pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration_linux.run_os_command") def test_get_postgre_status(self, run_os_command_mock): run_os_command_mock.return_value = (0, "running", None) pg_status, retcode, out, err = PGConfig._get_postgre_status() self.assertEqual("running", pg_status) run_os_command_mock.return_value = (1, "wrong", None) pg_status, retcode, out, err = PGConfig._get_postgre_status() self.assertEqual(None, pg_status) pass @not_for_platform(PLATFORM_WINDOWS) @patch("time.sleep") @patch("subprocess.Popen") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch.object(PGConfig, "_get_postgre_status") def test_check_postgre_up(self, get_postgre_status_mock, run_os_command_mock, popen_mock, sleep_mock): from ambari_server import serverConfiguration p = MagicMock() p.communicate.return_value = (None, None) p.returncode = 0 popen_mock.return_value = p get_postgre_status_mock.return_value = "running", 0, "", "" serverConfiguration.OS_TYPE = OSConst.OS_REDHAT p.poll.return_value = 0 run_os_command_mock.return_value = (0, None, None) pg_status, retcode, out, err = PGConfig._check_postgre_up() self.assertEqual(0, retcode) serverConfiguration.OS_TYPE = OSConst.OS_SUSE run_os_command_mock.return_value = (0, None, None) p.poll.return_value = 0 get_postgre_status_mock.return_value = "stopped", 0, "", "" pg_status, retcode, out, err = PGConfig._check_postgre_up() self.assertEqual(0, retcode) pass @patch("platform.linux_distribution") @patch("platform.system") @patch("ambari_commons.logging_utils.print_info_msg") @patch("ambari_commons.logging_utils.print_error_msg") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.write_property") @patch("ambari_server.serverConfiguration.get_conf_dir") def test_configure_os_settings(self, get_conf_dir_mock, write_property_mock, get_ambari_properties_mock, print_error_msg_mock, print_info_msg_mock, systemMock, distMock): get_ambari_properties_mock.return_value = -1 rcode = configure_os_settings() self.assertEqual(-1, rcode) p = MagicMock() p[OS_TYPE_PROPERTY] = 'somevalue' get_ambari_properties_mock.return_value = p rcode = configure_os_settings() self.assertEqual(0, rcode) p.__getitem__.return_value = "" rcode = configure_os_settings() self.assertEqual(0, rcode) self.assertTrue(write_property_mock.called) self.assertEqual(2, write_property_mock.call_count) self.assertEquals(write_property_mock.call_args_list[0][0][0], "server.os_family") self.assertEquals(write_property_mock.call_args_list[1][0][0], "server.os_type") pass @patch("__builtin__.open") @patch("ambari_server.serverConfiguration.Properties") @patch("ambari_server.serverConfiguration.search_file") @patch("ambari_server.serverConfiguration.get_conf_dir") def test_get_JAVA_HOME(self, get_conf_dir_mock, search_file_mock, Properties_mock, openMock): openMock.side_effect = Exception("exception") result = get_JAVA_HOME() self.assertEqual(None, result) expected = os.path.dirname(__file__) p = MagicMock() p.__getitem__.return_value = expected openMock.side_effect = None Properties_mock.return_value = p result = get_JAVA_HOME() self.assertEqual(expected, result) pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.dbConfiguration.get_ambari_properties") def test_prompt_db_properties_default(self, get_ambari_properties_mock): args = MagicMock() args.must_set_database_options = False del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type get_ambari_properties_mock.return_value = Properties() prompt_db_properties(args) self.assertEqual(args.database_index, 0) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(LinuxDBMSConfig, "_setup_remote_server") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.serverSetup.get_YN_input") def test_prompt_db_properties_oracle_sname(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, srs_mock): gyni_mock.return_value = True list_of_return_values = ["ambari-server", "ambari", "1", "1521", "localhost", "2"] def side_effect(*args, **kwargs): return list_of_return_values.pop() gvsi_mock.side_effect = side_effect gvsi_2_mock.side_effect = side_effect rp_mock.return_value = "password" args = MagicMock() args.must_set_database_options = True del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.sid_or_sname del args.jdbc_url set_silent(False) prompt_db_properties(args) self.assertEqual(args.database_index, 1) props = Properties() factory = DBMSConfigFactory() dbmsConfig = factory.create(args, props) self.assertEqual(dbmsConfig.dbms, "oracle") self.assertEqual(dbmsConfig.database_port, "1521") self.assertEqual(dbmsConfig.database_host, "localhost") self.assertEqual(dbmsConfig.database_name, "ambari") self.assertEqual(dbmsConfig.database_username, "ambari") self.assertEqual(dbmsConfig.database_password, "bigdata") self.assertEqual(dbmsConfig.sid_or_sname, "sid") dbmsConfig.configure_database(props, args) self.assertEqual(dbmsConfig.database_username, "ambari-server") self.assertEqual(dbmsConfig.sid_or_sname, "sname") pass @not_for_platform(PLATFORM_WINDOWS) @patch("os.path.isdir") @patch("os.mkdir") @patch("os.chown") @patch("pwd.getpwnam") @patch.object(OSCheck, "get_os_family") @patch.object(LinuxDBMSConfig, "_setup_remote_server") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.serverSetup.get_YN_input") def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock, get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock): gyni_mock.return_value = True list_of_return_values = ["ambari-server", "ambari", "2", "1521", "localhost", "2"] get_os_family_mock.return_value = OSConst.SUSE_FAMILY pw = MagicMock() pw.setattr('pw_uid', 0) pw.setattr('pw_gid', 0) get_pw_nam_mock.return_value = pw def side_effect(*args, **kwargs): return list_of_return_values.pop() gvsi_mock.side_effect = side_effect gvsi_2_mock.side_effect = side_effect rp_mock.return_value = "password" args = MagicMock() args.must_set_database_options = True del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.sid_or_sname del args.jdbc_url set_silent(False) prompt_db_properties(args) self.assertEqual(args.database_index, 1) props = Properties() factory = DBMSConfigFactory() dbmsConfig = factory.create(args, props) self.assertEqual(dbmsConfig.dbms, "oracle") self.assertEqual(dbmsConfig.database_port, "1521") self.assertEqual(dbmsConfig.database_host, "localhost") self.assertEqual(dbmsConfig.database_name, "ambari") self.assertEqual(dbmsConfig.database_username, "ambari") self.assertEqual(dbmsConfig.database_password, "bigdata") isdir_mock.return_value = False dbmsConfig.configure_database(props, args) self.assertEqual(dbmsConfig.database_username, "ambari-server") self.assertEqual(dbmsConfig.database_password, "password") self.assertEqual(dbmsConfig.sid_or_sname, "sid") self.assertTrue(chown_mock.called) self.assertTrue(mkdir_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch("os.path.isdir") @patch("os.mkdir") @patch("os.chown") @patch("pwd.getpwnam") @patch.object(OSCheck, "get_os_family") @patch.object(PGConfig, "_setup_local_server") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.serverSetup.get_YN_input") def test_prompt_db_properties_postgre_adv(self, gyni_mock, gvsi_mock, gvsi_2_mock, rp_mock, print_info_msg_mock, sls_mock, get_os_family_mock, get_pw_nam_mock, chown_mock, mkdir_mock, isdir_mock): gyni_mock.return_value = True list_of_return_values = ["ambari-server", "postgres", "ambari", "ambari", "1"] get_os_family_mock.return_value = OSConst.SUSE_FAMILY pw = MagicMock() pw.setattr('pw_uid', 0) pw.setattr('pw_gid', 0) get_pw_nam_mock.return_value = pw def side_effect(*args, **kwargs): return list_of_return_values.pop() gvsi_mock.side_effect = side_effect gvsi_2_mock.side_effect = side_effect rp_mock.return_value = "password" args = MagicMock() args.must_set_database_options = True del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type set_silent(False) prompt_db_properties(args) self.assertEqual(args.database_index, 0) props = Properties() factory = DBMSConfigFactory() dbmsConfig = factory.create(args, props) self.assertEqual(dbmsConfig.dbms, "postgres") self.assertEqual(dbmsConfig.database_port, "5432") self.assertEqual(dbmsConfig.database_host, "localhost") self.assertEqual(dbmsConfig.database_name, "ambari") self.assertEqual(dbmsConfig.database_username, "ambari") self.assertEqual(dbmsConfig.database_password, "bigdata") dbmsConfig.configure_database(props, args) self.assertEqual(dbmsConfig.database_username, "ambari-server") self.assertEqual(dbmsConfig.database_password, "password") self.assertEqual(dbmsConfig.sid_or_sname, "sid") pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.dbConfiguration_linux.store_password_file") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration_linux.get_YN_input") def test_prompt_db_properties_for_each_database_type(self, gyni_mock, gvsi_mock, rp_mock, spf_mock): """ :return: Validates that installation for each database type correctly stores the database type, database name, and optionally the postgres schema name. """ from ambari_server import serverConfiguration gyni_mock.return_value = True rp_mock.return_value = "password" spf_mock.return_value = "encrypted password" # Values to use while installing several database types hostname = "localhost" db_name = "db_ambari" postgres_schema = "sc_ambari" port = "1234" local_admin_user = "postgres" oracle_service = "1" oracle_service_name = "ambari" user_name = "ambari" # Input values postgres_embedded_values = [local_admin_user, db_name, postgres_schema, hostname] oracle_values = [hostname, port, oracle_service, oracle_service_name, user_name] mysql_values = [hostname, port, db_name, user_name] postgres_external_values = [hostname, port, db_name, postgres_schema, user_name] mssql_values = [hostname, port, db_name, user_name] list_of_return_values = postgres_embedded_values + oracle_values + mysql_values + postgres_external_values + mssql_values list_of_return_values = list_of_return_values[::-1] # Reverse the list since the input will be popped def side_effect(*args, **kwargs): return list_of_return_values.pop() gvsi_mock.side_effect = side_effect if AMBARI_CONF_VAR in os.environ: del os.environ[AMBARI_CONF_VAR] tempdir = tempfile.gettempdir() os.environ[AMBARI_CONF_VAR] = tempdir prop_file = os.path.join(tempdir, "ambari.properties") for i in range(0, 5): # Use the expected path of the ambari.properties file to delete it if it exists, and then create a new one # during each use case. if os.path.exists(prop_file): os.remove(prop_file) with open(prop_file, "w") as f: f.write("server.jdbc.database_name=oldDBName") f.close() serverConfiguration.AMBARI_PROPERTIES_FILE = prop_file args = MagicMock() properties = Properties() args.database_index = i args.silent = False del args.dbms del args.database_host del args.local_admin_user del args.database_port del args.database_name del args.database_username del args.database_password del args.sid_or_sname del args.jdbc_url factory = DBMSConfigFactory() dbConfig = factory.create(args, properties) dbConfig._prompt_db_properties() if dbConfig._is_local_database(): dbConfig._setup_local_server(properties, None) else: dbConfig._setup_remote_server(properties, None) if i == 0: # Postgres Embedded self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "postgres") self.assertEqual(properties[JDBC_DATABASE_NAME_PROPERTY], db_name) self.assertEqual(properties[JDBC_POSTGRES_SCHEMA_PROPERTY], postgres_schema) self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "local") elif i == 1: # Oracle self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "oracle") self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames()) self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote") elif i == 2: # MySQL self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "mysql") self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames()) self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote") elif i == 3: # Postgres External self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "postgres") self.assertEqual(properties[JDBC_DATABASE_NAME_PROPERTY], db_name) self.assertEqual(properties[JDBC_POSTGRES_SCHEMA_PROPERTY], postgres_schema) self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote") elif i == 4: # MSSQL self.assertEqual(properties[JDBC_DATABASE_PROPERTY], "mssql") self.assertFalse(JDBC_POSTGRES_SCHEMA_PROPERTY in properties.propertyNames()) self.assertEqual(properties[PERSISTENCE_TYPE_PROPERTY], "remote") pass @patch.object(os.path, "exists") @patch.object(os.path, "isfile") def test_validate_jdk(self, isfile_mock, exists_mock): exists_mock.side_effect = [False] result = validate_jdk("path") self.assertFalse(result) exists_mock.side_effect = [True, False] result = validate_jdk("path") self.assertFalse(result) exists_mock.side_effect = [True, True] isfile_mock.return_value = False result = validate_jdk("path") self.assertFalse(result) exists_mock.side_effect = [True, True] isfile_mock.return_value = True result = validate_jdk("path") self.assertTrue(result) pass @patch("glob.glob") @patch("ambari_server.serverConfiguration.get_JAVA_HOME") @patch("ambari_server.serverConfiguration.validate_jdk") def test_find_jdk(self, validate_jdk_mock, get_JAVA_HOME_mock, globMock): get_JAVA_HOME_mock.return_value = "somewhere" validate_jdk_mock.return_value = True result = find_jdk() self.assertEqual("somewhere", result) get_JAVA_HOME_mock.return_value = None globMock.return_value = [] result = find_jdk() self.assertEqual(None, result) globMock.return_value = ["one", "two"] result = find_jdk() self.assertNotEqual(None, result) globMock.return_value = ["one", "two"] validate_jdk_mock.side_effect = [False, True] result = find_jdk() self.assertEqual(result, "one") pass @patch("os.path.exists") @patch("zipfile.ZipFile") @patch("os.path.split") @patch("os.listdir") @patch("ambari_server.serverSetup.copy_files") @patch("shutil.rmtree") def test_unpack_jce_policy(self, rmtree_mock, copy_files_mock, os_listdir_mock, os_path_split_mock, zipfile_mock, exists_mock): # Testing the case when the zip file doesn't contains any folder exists_mock.return_value = True zipfile = MagicMock() zipfile_mock.return_value = zipfile zip_members = ["US_export_policy.jar", "local_policy.jar", "README.txt"] zipfile.namelist.return_value = zip_members os_path_split_mock.return_value = [""] expand_jce_zip_file("", "") self.assertTrue(exists_mock.called) self.assertTrue(zipfile_mock.called) self.assertTrue(os_path_split_mock.called) # Testing the case when the zip file contains a folder unziped_jce_path = "jce" os_path_split_mock.return_value = unziped_jce_path expand_jce_zip_file("", "") self.assertTrue(exists_mock.called) self.assertTrue(zipfile_mock.called) self.assertTrue(os_listdir_mock.called) self.assertTrue(copy_files_mock.called) self.assertTrue(rmtree_mock.called) # Testing when the jdk_security_path or jce_zip_path doesn't exist exists_mock.return_value = False try: expand_jce_zip_file("", "") except FatalException: self.assertTrue(True) exists_mock.return_value = True # Testing when zipfile fail with an error zipfile_mock.side_effect = FatalException(1,"Extract error") try: expand_jce_zip_file("", "") except FatalException: self.assertTrue(True) @patch("os.path.exists") @patch("shutil.copy") @patch("os.path.split") @patch("ambari_server.serverSetup.update_properties") @patch.object(JDKSetup, "unpack_jce_policy") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_commons.os_utils.search_file") @patch("__builtin__.open") @patch("ambari_server.serverSetup.logger") def test_setup_jce_policy(self, logger_mock, open_mock, search_file_mock, get_ambari_properties_mock, unpack_jce_policy_mock, update_properties_mock, path_split_mock, shutil_copy_mock, exists_mock): exists_mock.return_value = True properties = Properties() properties.process_pair(JAVA_HOME_PROPERTY, "/java_home") unpack_jce_policy_mock.return_value = 0 get_ambari_properties_mock.return_value = properties conf_file = 'etc/ambari-server/conf/ambari.properties' search_file_mock.return_value = conf_file path_split_mock.return_value = ["/path/to", "JCEPolicy.zip"] args = ['setup-jce', '/path/to/JCEPolicy.zip'] setup_jce_policy(args) shutil_copy_mock.assert_called_with(args[1], configDefaults.SERVER_RESOURCES_DIR) self.assertTrue(unpack_jce_policy_mock.called) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(update_properties_mock.called) # Testing that if the source and the destination is the same will not try to copy the file path_split_mock.return_value = [configDefaults.SERVER_RESOURCES_DIR, "JCEPolicy.zip"] shutil_copy_mock.reset_mock() setup_jce_policy(args) self.assertFalse(shutil_copy_mock.called) self.assertTrue(unpack_jce_policy_mock.called) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(update_properties_mock.called) path_split_mock.return_value = ["/path/to", "JCEPolicy.zip"] # Testing with bad path exists_mock.return_value = False try: setup_jce_policy(args) except FatalException: self.assertTrue(True) exists_mock.return_value = True # Testing with an error produced by shutil.copy shutil_copy_mock.reset_mock() shutil_copy_mock.side_effect = FatalException(1, "Error trying to copy the file.") try: setup_jce_policy(args) except FatalException: self.assertTrue(True) # Testing with an error produced by Properties.store function update_properties_mock.side_effect = Exception("Invalid file.") try: setup_jce_policy(args) except Exception: self.assertTrue(True) update_properties_mock.reset_mock() # Testing with an error produced by unpack_jce_policy unpack_jce_policy_mock.side_effect = FatalException(1, "Can not install JCE policy") try: setup_jce_policy(args) except FatalException: self.assertTrue(True) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("pwd.getpwnam") @patch("resource_management.core.shell.call") @patch("os.path.exists") @patch("os.path.isfile") @patch("ambari_commons.os_utils.remove_file") @patch("ambari_server.dbConfiguration_linux.LinuxDBMSConfig.ensure_jdbc_driver_installed") @patch("ambari_server.dbConfiguration_linux.get_YN_input") @patch("ambari_server.serverSetup.update_properties") @patch("ambari_server.dbConfiguration_linux.get_ambari_properties") @patch("ambari_server.dbConfiguration_linux.store_password_file") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.dbConfiguration_linux.PGConfig._configure_postgres") @patch("ambari_server.dbConfiguration_linux.PGConfig._check_postgre_up") @patch("ambari_server.dbConfiguration_linux.PGConfig._is_jdbc_user_changed") @patch("ambari_server.serverSetup.verify_setup_allowed") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.configure_os_settings") @patch("ambari_server.serverSetup.download_and_install_jdk") @patch("ambari_server.serverSetup.check_ambari_user") @patch("ambari_server.serverSetup.check_jdbc_drivers") @patch("ambari_server.serverSetup.disable_security_enhancements") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.proceedJDBCProperties") @patch("ambari_server.serverSetup.extract_views") @patch("ambari_server.serverSetup.adjust_directory_permissions") @patch("ambari_server.serverSetup.service_setup") @patch("ambari_server.serverSetup.read_ambari_user") @patch("ambari_server.serverSetup.expand_jce_zip_file") def test_setup_linux(self, expand_jce_zip_file_mock, read_ambari_user_mock, service_setup_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_root_mock, disable_security_enhancements_mock, check_jdbc_drivers_mock, check_ambari_user_mock, download_jdk_mock, configure_os_settings_mock, get_ambari_properties_mock, get_YN_input_mock, gvsi_mock, gvsi_1_mock, read_password_mock, verify_setup_allowed_method, is_jdbc_user_changed_mock, check_postgre_up_mock, configure_postgres_mock, run_os_command_1_mock, store_password_file_mock, get_ambari_properties_1_mock, update_properties_mock, get_YN_input_1_mock, ensure_jdbc_driver_installed_mock, remove_file_mock, isfile_mock, exists_mock, run_os_command_mock, get_pw_nam_mock): hostname = "localhost" db_admin_user = 'postgres' db_name = "db_ambari" postgres_schema = "sc_ambari" db_username = 'u_ambari' port = "1234" oracle_service = "1" oracle_service_name = "ambari" user_name = "ambari" args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password args.silent = False failed = False properties = Properties() def side_effect(username): raise KeyError("") get_pw_nam_mock.side_effect = side_effect get_YN_input_mock.return_value = False isfile_mock.return_value = False verify_setup_allowed_method.return_value = 0 exists_mock.return_value = False remove_file_mock.return_value = 0 run_os_command_mock.return_value = 3,"","" extract_views_mock.return_value = 0 read_ambari_user_mock.return_value = "ambari" read_password_mock.return_value = "bigdata2" get_ambari_properties_mock.return_value = properties get_ambari_properties_1_mock.return_value = properties store_password_file_mock.return_value = "encrypted_bigdata2" ensure_jdbc_driver_installed_mock.return_value = True check_postgre_up_mock.return_value = (PGConfig.PG_STATUS_RUNNING, 0, "", "") configure_postgres_mock.return_value = (0, "", "") run_os_command_1_mock.return_value = (0, "", "") expand_jce_zip_file_mock.return_value = 0 def reset_mocks(): is_jdbc_user_changed_mock.reset_mock() is_root_mock.reset_mock() disable_security_enhancements_mock.reset_mock() check_jdbc_drivers_mock.reset_mock() check_ambari_user_mock.reset_mock() run_os_command_mock.reset_mock() configure_os_settings_mock.reset_mock() run_os_command_1_mock.reset_mock() get_YN_input_1_mock.reset_mock() update_properties_mock.reset_mock() extract_views_mock.reset_mock() args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.sid_or_sname del args.jdbc_url del args.init_script_file del args.drop_script_file args.jdbc_driver= None args.jdbc_db = None args.silent = False args.skip_view_extraction = False return args # Testing call under non-root is_root_mock.return_value = False try: setup(args) except FatalException as fe: self.fail("Should not throw exception, only print warning") # see AMBARI-15245 args = reset_mocks() # Testing calls under root # remote case is_root_mock.return_value = True disable_security_enhancements_mock.return_value = (0, "") check_ambari_user_mock.return_value = (0, False, 'user', None) check_jdbc_drivers_mock.return_value = 0 download_jdk_mock.return_value = 0 configure_os_settings_mock.return_value = 0 result = setup(args) self.assertEqual(None, result) self.assertTrue(check_ambari_user_mock.called) self.assertEqual(1, run_os_command_mock.call_count) self.assertTrue(extract_views_mock.called) # test view extraction is skipped on-demand args = reset_mocks() args.skip_view_extraction = True setup(args) self.assertFalse(extract_views_mock.called) #Local case args = reset_mocks() # Input values db_selection_values = ["1"] postgres_values = [db_admin_user, db_name, postgres_schema, db_username] postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped def side_effect(*args, **kwargs): return db_selection_values.pop() gvsi_mock.side_effect = side_effect def side_effect_1(*args, **kwargs): return postgres_values.pop() gvsi_1_mock.side_effect = side_effect_1 get_YN_input_mock.return_value = True # is_local_database_mock.return_value = True is_jdbc_user_changed_mock.return_value = False try: result = setup(args) except FatalException: self.fail("Setup should be successful") self.assertEqual(None, result) self.assertTrue(is_jdbc_user_changed_mock.called) self.assertTrue(update_properties_mock.called) self.assertTrue(run_os_command_1_mock.called) self.assertFalse(remove_file_mock.called) self.assertTrue("Ambari-DDL-Postgres-CREATE.sql" in run_os_command_1_mock.call_args[0][0][3]) self.assertTrue("-U {0}".format(db_username) in run_os_command_1_mock.call_args[0][0][3]) #if DB user name was changed args = reset_mocks() # is_local_database_mock.return_value = True is_jdbc_user_changed_mock.return_value = True db_selection_values = ["1"] postgres_values = [db_admin_user, db_name, postgres_schema, db_username] postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped try: result = setup(args) except FatalException: self.fail("Setup should be successful") self.assertEqual(None, result) self.assertTrue(is_jdbc_user_changed_mock.called) self.assertTrue(update_properties_mock.called) self.assertTrue(run_os_command_1_mock.called) self.assertFalse(remove_file_mock.called) #negative case args = reset_mocks() # Use remote database get_YN_input_1_mock.return_value = False db_selection_values = ["4"] postgres_values = [hostname, port, db_name, postgres_schema, user_name] postgres_values = postgres_values[::-1] # Reverse the list since the input will be popped try: result = setup(args) self.fail("Should throw exception") except NonFatalException as fe: self.assertTrue("Remote database setup aborted." in fe.reason) self.assertFalse(run_os_command_1_mock.called) # test not run setup if ambari-server setup executed with jdbc properties args = reset_mocks() args.jdbc_driver= "path/to/driver" args.jdbc_db = "test_db_name" setup(args) self.assertTrue(proceedJDBCProperties_mock.called) self.assertFalse(disable_security_enhancements_mock.called) self.assertFalse(check_ambari_user_mock.called) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("resource_management.core.shell.call") @patch("os.path.exists") @patch("os.path.isfile") @patch("ambari_commons.os_utils.remove_file") @patch("ambari_server.dbConfiguration_windows.MSSQLConfig.ensure_jdbc_driver_installed") @patch("ambari_server.serverSetup.update_properties") @patch("ambari_server.dbConfiguration_windows.store_password_file") @patch("ambari_server.dbConfiguration_windows.run_os_command") @patch("ambari_server.serverSetup.verify_setup_allowed") @patch("ambari_server.dbConfiguration_windows.get_validated_string_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.configure_os_settings") @patch("ambari_server.serverSetup.download_and_install_jdk") @patch("ambari_server.serverSetup.check_firewall") @patch("ambari_server.serverSetup.check_ambari_user") @patch("ambari_server.serverSetup.check_jdbc_drivers") @patch("ambari_server.serverSetup.disable_security_enhancements") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.proceedJDBCProperties") @patch("ambari_server.serverSetup.extract_views") @patch("ambari_server.serverSetup.adjust_directory_permissions") @patch("ambari_server.serverSetup.service_setup") @patch("ambari_server.serverSetup.read_ambari_user") @patch("ambari_server.serverSetup.expand_jce_zip_file") def test_setup_windows(self, expand_jce_zip_file_mock, read_ambari_user_mock, service_setup_mock, adjust_dirs_mock, extract_views_mock, proceedJDBCProperties_mock, is_root_mock, disable_security_enhancements_mock, check_jdbc_drivers_mock, check_ambari_user_mock, check_firewall_mock, download_jdk_mock, configure_os_settings_mock, get_ambari_properties_mock, get_YN_input_mock, gvsi_mock, gvsi_1_mock, verify_setup_allowed_method, run_os_command_1_mock, store_password_file_mock, update_properties_mock, ensure_jdbc_driver_installed_mock, remove_file_mock, isfile_mock, exists_mock, run_os_command_mock): hostname = "localhost" db_name = "db_ambari" port = "1433" user_name = "ambari" password = "bigdata2" failed = False properties = Properties() get_YN_input_mock.return_value = False isfile_mock.return_value = False verify_setup_allowed_method.return_value = 0 exists_mock.return_value = False remove_file_mock.return_value = 0 run_os_command_mock.return_value = 3,"","" extract_views_mock.return_value = 0 read_ambari_user_mock.return_value = "ambari" get_ambari_properties_mock.return_value = properties store_password_file_mock.return_value = "encrypted_bigdata2" ensure_jdbc_driver_installed_mock.return_value = True run_os_command_1_mock.return_value = (0, "", "") expand_jce_zip_file_mock.return_value = 0 def reset_mocks(): is_root_mock.reset_mock() disable_security_enhancements_mock.reset_mock() check_jdbc_drivers_mock.reset_mock() check_ambari_user_mock.reset_mock() run_os_command_mock.reset_mock() configure_os_settings_mock.reset_mock() run_os_command_1_mock.reset_mock() update_properties_mock.reset_mock() args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.default_database_host del args.persistence_type del args.init_db_script_file del args.cleanup_db_script_file del args.sid_or_sname del args.jdbc_url args.jdbc_driver= None args.jdbc_db = None args.silent = False args.must_set_database_options = True return args args = reset_mocks() # Testing call under non-root is_root_mock.return_value = False try: setup(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("administrator-level" in fe.reason) pass args = reset_mocks() # Testing calls under root is_root_mock.return_value = True disable_security_enhancements_mock.return_value = (0, "") check_ambari_user_mock.return_value = (0, False, 'user', None) check_jdbc_drivers_mock.return_value = 0 download_jdk_mock.return_value = 0 configure_os_settings_mock.return_value = 0 result = setup(args) self.assertEqual(None, result) self.assertTrue(check_ambari_user_mock.called) self.assertEqual(2, run_os_command_1_mock.call_count) #negative case args = reset_mocks() # Use Windows authentication get_YN_input_mock.return_value = True gvsi_1_mock.side_effect = [hostname, "1"] try: result = setup(args) except Exception: self.fail("Shouldn't throw exception") self.assertTrue(run_os_command_1_mock.called) # Use SQL Server authentication get_YN_input_mock.return_value = True gvsi_1_mock.side_effect = [hostname, "2", user_name, password] try: result = setup(args) except Exception: self.fail("Shouldn't throw exception") self.assertTrue(run_os_command_1_mock.called) # test not run setup if ambari-server setup executed with jdbc properties args = reset_mocks() args.jdbc_driver= "path/to/driver" args.jdbc_db = "test_db_name" setup(args) self.assertTrue(proceedJDBCProperties_mock.called) self.assertFalse(disable_security_enhancements_mock.called) self.assertFalse(check_ambari_user_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(OracleConfig, "_get_remote_script_line") @patch("ambari_server.serverSetup.is_server_runing") @patch("ambari_server.dbConfiguration_linux.get_YN_input") @patch("ambari_server.serverSetup.get_YN_input") @patch.object(PGConfig, "_setup_db") @patch("ambari_server.dbConfiguration_linux.print_warning_msg") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.is_root") def test_reset(self, is_root_mock, get_ambari_properties_mock, decrypt_password_for_alias_mock, run_os_command_mock, print_info_msg_mock, print_warning_msg_mock, setup_db_mock, get_YN_input_mock, get_YN_input_2_mock, is_server_running_mock, get_remote_script_line_mock): def reset_mocks(): args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.init_script_file del args.drop_script_file del args.sid_or_sname del args.jdbc_url return args properties = Properties() get_ambari_properties_mock.return_value = properties args = reset_mocks() args.persistence_type = "local" get_YN_input_mock.return_value = False decrypt_password_for_alias_mock.return_value = "password" is_server_running_mock.return_value = (False, 0) setup_db_mock.side_effect = [(0,None, None),(0,None, "ERROR: database 'ambari' is being accessed by other users"), (0, None, "ERROR: user 'mapred' already exist")] # Testing call under non-root is_root_mock.return_value = False try: reset(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("root-level" in fe.reason) pass # Testing calls under root is_root_mock.return_value = True try: reset(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertFalse("root-level" in fe.reason) pass get_YN_input_mock.return_value = True get_YN_input_2_mock.return_value = True run_os_command_mock.return_value = (1, None, None) try: reset(args) self.fail("Should throw exception") except FatalException: # Expected pass run_os_command_mock.return_value = (0, None, None) reset(args) self.assertTrue(setup_db_mock.called) # Database errors cases is_server_running_mock.side_effect = [(True, 123), (False, 0), (False, 0), (False, 0), (False, 0)] try: reset(args) self.fail("Should throw exception") except FatalException: # Expected pass try: reset(args) self.fail("Should throw exception") except NonFatalException: # Expected pass args = reset_mocks() args.dbms = "postgres" try: #remote db case reset(args) self.fail("Should throw exception") except NonFatalException: # Expected pass args = reset_mocks() args.dbms = "oracle" print_warning_msg_mock.reset_mock() get_remote_script_line_mock.reset_mock() get_remote_script_line_mock.side_effect = ["drop", "create"] try: #remote db case (not Postgres) rcode = reset(args) self.fail("Should throw exception") except NonFatalException: # Expected self.assertTrue(get_remote_script_line_mock.called) self.assertTrue(print_warning_msg_mock.called) pass pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.is_server_runing") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.dbConfiguration_windows.print_warning_msg") @patch("ambari_server.dbConfiguration_windows.print_info_msg") @patch("ambari_server.dbConfiguration_windows.run_os_command") @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.is_root") def test_reset(self, is_root_mock, get_ambari_properties_mock, decrypt_password_for_alias_mock, run_os_command_mock, print_info_msg_mock, print_warning_msg_mock, get_YN_input_mock, is_server_running_mock): def reset_mocks(): args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.default_database_host del args.persistence_type del args.init_db_script_file del args.cleanup_db_script_file del args.sid_or_sname del args.jdbc_url return args properties = Properties() get_ambari_properties_mock.return_value = properties args = reset_mocks() args.persistence_type = "local" get_YN_input_mock.return_value = False decrypt_password_for_alias_mock.return_value = "password" is_server_running_mock.return_value = (False, 0) # Testing call under non-root is_root_mock.return_value = False try: reset(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("administrator-level" in fe.reason) pass # Testing calls under root is_root_mock.return_value = True try: reset(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertFalse("administrator-level" in fe.reason) pass get_YN_input_mock.return_value = True run_os_command_mock.return_value = (1, None, None) try: reset(args) self.fail("Should throw exception") except FatalException: # Expected pass run_os_command_mock.reset_mock() run_os_command_mock.return_value = (0, None, None) reset(args) self.assertTrue(run_os_command_mock.called) self.assertEqual(run_os_command_mock.call_count, 2) # Database errors cases is_server_running_mock.side_effect = [(True, 123), (False, 0)] try: reset(args) self.fail("Should throw exception") except FatalException: # Expected pass try: reset(args) except NonFatalException: self.fail("Shouldn't throw exception") pass pass @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.get_YN_input") @patch("__builtin__.raw_input") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.logger") def test_reset_default(self, logger_mock, is_root_mock, raw_input_mock, get_YN_inputMock): is_root_mock.return_value=True get_YN_inputMock.return_value = False raw_input_mock.return_value="" args = MagicMock() try: reset(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue(fe.code == 1) pass pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(PGConfig, "_setup_db") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.is_server_runing") def test_silent_reset(self, is_server_runing_mock, is_root_mock, run_os_command_mock, print_info_msg_mock, setup_db_mock): is_root_mock.return_value = True args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.init_script_file del args.drop_script_file set_silent(True) self.assertTrue(get_silent()) setup_db_mock.return_value = (0, None, None) run_os_command_mock.return_value = (0, None, None) is_server_runing_mock.return_value = (False, 0) def signal_handler(signum, frame): self.fail("Timed out!") signal.signal(signal.SIGALRM, signal_handler) try: signal.alarm(5) rcode = reset(args) signal.alarm(0) self.assertEqual(None, rcode) self.assertTrue(setup_db_mock.called) finally: signal.signal(signal.SIGALRM, signal.SIG_IGN) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.dbConfiguration_windows.MSSQLConfig._execute_db_script") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.is_server_runing") def test_silent_reset(self, is_server_runing_mock, is_root_mock, get_ambari_properties_mock, execute_db_script_mock): is_root_mock.return_value = True args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.default_database_host del args.persistence_type del args.init_db_script_file del args.cleanup_db_script_file set_silent(True) self.assertTrue(get_silent()) properties = Properties() get_ambari_properties_mock.return_value = properties is_server_runing_mock.return_value = (False, 0) rcode = reset(args) self.assertEqual(None, rcode) self.assertEqual(execute_db_script_mock.call_count, 2) @not_for_platform(PLATFORM_WINDOWS) @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12')) @patch("ambari_server_main.get_is_active_instance") @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("sys.stdout.flush") @patch("sys.stdout.write") @patch("ambari_server_main.looking_for_pid") @patch("ambari_server_main.wait_for_ui_start") @patch("ambari_server_main.save_main_pid_ex") @patch("ambari_server_main.check_exitcode") @patch("os.makedirs") @patch("ambari_server_main.locate_file") @patch.object(_ambari_server_, "is_server_runing") @patch("os.chown") @patch("ambari_server.setupSecurity.get_master_key_location") @patch("ambari_server.setupSecurity.save_master_key") @patch("ambari_server.setupSecurity.get_is_persisted") @patch("ambari_server.setupSecurity.get_is_secure") @patch('os.chmod', autospec=True) @patch("ambari_server.serverConfiguration.write_property") @patch("ambari_server.serverConfiguration.get_validated_string_input") @patch("os.environ") @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server_main.get_ambari_properties") @patch("os.path.exists") @patch("__builtin__.open") @patch("subprocess.Popen") @patch("ambari_server.serverConfiguration.search_file") @patch("ambari_server_main.check_database_name_property") @patch("ambari_server_main.find_jdk") @patch("ambari_server_main.print_warning_msg") @patch("ambari_server_main.print_info_msg") @patch.object(PGConfig, "_check_postgre_up") @patch("ambari_server_main.read_ambari_user") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.dbConfiguration_linux.is_root") @patch("ambari_server_main.is_root") @patch.object(LinuxDBMSConfig, "_find_jdbc_driver") @patch("getpass.getuser") @patch("os.chdir") @patch.object(ResourceFilesKeeper, "perform_housekeeping") @patch.object(_ambari_server_, "logger") def test_start(self, logger_mock, perform_housekeeping_mock, chdir_mock, getuser_mock, find_jdbc_driver_mock, is_root_mock, is_root_2_mock, is_root_3_mock, read_ambari_user_mock, check_postgre_up_mock, print_info_msg_mock, print_warning_msg_mock, find_jdk_mock, check_database_name_property_mock, search_file_mock, popenMock, openMock, pexistsMock, get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock, get_ambari_properties_4_mock, get_ambari_properties_5_mock, os_environ_mock, get_validated_string_input_method, write_property_method, os_chmod_method, get_is_secure_mock, get_is_persisted_mock, save_master_key_method, get_master_key_location_method, os_chown_mock, is_server_running_mock, locate_file_mock, os_makedirs_mock, check_exitcode_mock, save_main_pid_ex_mock, wait_for_ui_start_mock, looking_for_pid_mock, stdout_write_mock, stdout_flush_mock, get_is_active_instance_mock): def reset_mocks(): pexistsMock.reset_mock() get_is_active_instance_mock.reset_mock() get_is_active_instance_mock.return_value = True args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.sid_or_sname del args.jdbc_url del args.debug del args.suspend_start args.skip_properties_validation = False return args args = reset_mocks() locate_file_mock.side_effect = lambda *args: '/bin/su' if args[0] == 'su' else '/bin/sh' f = MagicMock() f.readline.return_value = '42' openMock.return_value = f looking_for_pid_mock.return_value = [{ "pid": "777", "exe": "/test", "cmd": "test arg" }] wait_for_ui_start_mock.return_value = True check_exitcode_mock.return_value = 0 p = Properties() p.process_pair(PID_DIR_PROPERTY, '/var/run/ambari-server') p.process_pair(SECURITY_IS_ENCRYPTION_ENABLED, 'False') p.process_pair(JDBC_DATABASE_NAME_PROPERTY, 'some_value') p.process_pair(NR_USER_PROPERTY, 'some_value') p.process_pair(STACK_LOCATION_KEY, 'some_value') p.process_pair(SERVER_VERSION_FILE_PATH, 'some_value') p.process_pair(OS_TYPE_PROPERTY, 'some_value') p.process_pair(JAVA_HOME_PROPERTY, 'some_value') p.process_pair(JDK_NAME_PROPERTY, 'some_value') p.process_pair(JCE_NAME_PROPERTY, 'some_value') p.process_pair(COMMON_SERVICES_PATH_PROPERTY, 'some_value') p.process_pair(JDBC_PASSWORD_PROPERTY, 'some_value') p.process_pair(WEBAPP_DIR_PROPERTY, 'some_value') p.process_pair(SHARED_RESOURCES_DIR, 'some_value') p.process_pair(SECURITY_KEYS_DIR, 'some_value') p.process_pair(JDBC_USER_NAME_PROPERTY, 'some_value') p.process_pair(BOOTSTRAP_SCRIPT, 'some_value') p.process_pair(OS_FAMILY_PROPERTY, 'some_value') p.process_pair(RESOURCES_DIR_PROPERTY, 'some_value') p.process_pair(CUSTOM_ACTION_DEFINITIONS, 'some_value') p.process_pair(BOOTSTRAP_SETUP_AGENT_SCRIPT, 'some_value') p.process_pair(STACKADVISOR_SCRIPT, 'some_value') p.process_pair(BOOTSTRAP_DIR_PROPERTY, 'some_value') p.process_pair(MPACKS_STAGING_PATH_PROPERTY, 'some_value') get_ambari_properties_5_mock.return_value = get_ambari_properties_4_mock.return_value = \ get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = p get_is_secure_mock.return_value = False get_is_persisted_mock.return_value = (False, None) search_file_mock.return_value = None is_server_running_mock.return_value = (True, 123) os_chown_mock.return_value = None # Checking "server is running" pexistsMock.return_value = True if get_platform() != PLATFORM_WINDOWS: with patch("pwd.getpwnam") as getpwnam_mock: pw = MagicMock() pw.setattr('pw_uid', 0) pw.setattr('pw_gid', 0) getpwnam_mock.return_value = pw try: _ambari_server_.start(args) self.fail("Should fail with 'Server is running'") except FatalException as e: # Expected self.assertTrue('Ambari Server is already running.' in e.reason) args = reset_mocks() is_server_running_mock.return_value = (False, 0) pexistsMock.return_value = False # Checking situation when ambari user is not set up read_ambari_user_mock.return_value = None try: _ambari_server_.start(args) self.fail("Should fail with 'Can not detect a system user for Ambari'") except FatalException as e: # Expected self.assertTrue('Unable to detect a system user for Ambari Server.' in e.reason) # Checking start from non-root when current user is not the same as a # custom user args = reset_mocks() read_ambari_user_mock.return_value = "dummy-user" getuser_mock.return_value = "non_custom_user" is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = False try: _ambari_server_.start(args) self.fail("Should fail with 'Can not start ambari-server as user...'") except FatalException as e: # Expected self.assertTrue('Unable to start Ambari Server as user' in e.reason) # If not active instance, exception should be thrown args = reset_mocks() get_is_active_instance_mock.return_value = False try: _ambari_server_.start(args) self.fail("Should fail with 'This is not an active instance. Shutting down...'") except FatalException as e: # Expected self.assertTrue('This is not an active instance' in e.reason) pass # Checking "jdk not found" args = reset_mocks() is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = True find_jdk_mock.return_value = None try: _ambari_server_.start(args) self.fail("Should fail with 'No JDK found'") except FatalException as e: # Expected self.assertTrue('No JDK found' in e.reason) args = reset_mocks() find_jdk_mock.return_value = "somewhere" ## Testing workflow under root is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = True # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') # Case when jdbc driver is not used find_jdbc_driver_mock.return_value = -1 try: _ambari_server_.start(args) self.fail("Should fail with exception") except FatalException as e: self.assertTrue('Before starting Ambari Server' in e.reason) args = reset_mocks() # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') find_jdbc_driver_mock.reset_mock() find_jdbc_driver_mock.return_value = -1 try: _ambari_server_.start(args) except FatalException as e: # Ignored pass args = reset_mocks() # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'oracle') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') find_jdbc_driver_mock.reset_mock() find_jdbc_driver_mock.return_value = 0 # Test exception handling on resource files housekeeping perform_housekeeping_mock.reset_mock() perform_housekeeping_mock.side_effect = KeeperException("some_reason") pexistsMock.return_value = True try: _ambari_server_.start(args) self.fail("Should fail with exception") except FatalException as e: self.assertTrue('some_reason' in e.reason) self.assertTrue(perform_housekeeping_mock.called) perform_housekeeping_mock.side_effect = lambda *v, **kv : None perform_housekeeping_mock.reset_mock() self.assertFalse('Unable to start PostgreSQL server' in e.reason) self.assertFalse(check_postgre_up_mock.called) args = reset_mocks() # Local DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') check_postgre_up_mock.reset_mock() # case: postgres failed to start check_postgre_up_mock.return_value = None, 1, "Unable to start PostgreSQL serv", "error" try: _ambari_server_.start(args) self.fail("Should fail with 'Unable to start PostgreSQL server'") except FatalException as e: # Expected self.assertTrue('Unable to start PostgreSQL server' in e.reason) self.assertTrue(check_postgre_up_mock.called) args = reset_mocks() # Local DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') check_postgre_up_mock.return_value = "running", 0, "success", "" # Case: custom user is "root" read_ambari_user_mock.return_value = "root" # Java failed to start proc = MagicMock() proc.pid = -186 popenMock.return_value = proc try: _ambari_server_.start(args) except FatalException as e: # Expected self.assertTrue(popenMock.called) self.assertTrue('Ambari Server java process died' in e.reason) self.assertTrue(perform_housekeeping_mock.called) args = reset_mocks() # Java OK proc.pid = 186 popenMock.reset_mock() _ambari_server_.start(args) self.assertTrue(popenMock.called) popen_arg = popenMock.call_args[0][0] self.assertTrue(popen_arg[0] == "/bin/sh") self.assertTrue(perform_housekeeping_mock.called) args = reset_mocks() # Local DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') perform_housekeeping_mock.reset_mock() popenMock.reset_mock() # Case: custom user is not "root" read_ambari_user_mock.return_value = "not-root-user" _ambari_server_.start(args) self.assertTrue(chdir_mock.called) self.assertTrue(popenMock.called) popen_arg = popenMock.call_args_list[0][0][0] self.assertTrue("; /bin/su" in popen_arg[2]) self.assertTrue(perform_housekeeping_mock.called) args = reset_mocks() # Local DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') check_postgre_up_mock.reset_mock() popenMock.reset_mock() ## Testing workflow under non-root is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = False read_ambari_user_mock.return_value = "not-root-user" getuser_mock.return_value = read_ambari_user_mock.return_value _ambari_server_.start(args) self.assertFalse(check_postgre_up_mock.called) args = reset_mocks() # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') _ambari_server_.start(args) self.assertFalse(check_postgre_up_mock.called) args = reset_mocks() # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') # Checking call _ambari_server_.start(args) self.assertTrue(popenMock.called) popen_arg = popenMock.call_args[0][0] self.assertTrue(popen_arg[0] == "/bin/sh") args = reset_mocks() # Remote DB p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'remote') # Test start under wrong user read_ambari_user_mock.return_value = "not-root-user" getuser_mock.return_value = "non_custom_user" try: _ambari_server_.start(args) self.fail("Can not start ambari-server as user non_custom_user.") except FatalException as e: # Expected self.assertTrue('Unable to start Ambari Server as user' in e.reason) args = reset_mocks() # Check environ master key is set popenMock.reset_mock() os_environ_mock.copy.return_value = {"a": "b", SECURITY_KEY_ENV_VAR_NAME: "masterkey"} p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') read_ambari_user_mock.return_value = "root" is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = True _ambari_server_.start(args) self.assertFalse(get_validated_string_input_method.called) self.assertFalse(save_master_key_method.called) popen_arg = popenMock.call_args[1]['env'] self.assertEquals(os_environ_mock.copy.return_value, popen_arg) args = reset_mocks() # Check environ master key is not set popenMock.reset_mock() os_environ_mock.reset_mock() p.process_pair(SECURITY_IS_ENCRYPTION_ENABLED, 'True') os_environ_mock.copy.return_value = {"a": "b"} p.process_pair(JDBC_DATABASE_PROPERTY, 'postgres') p.process_pair(PERSISTENCE_TYPE_PROPERTY, 'local') read_ambari_user_mock.return_value = "root" is_root_3_mock.return_value = \ is_root_2_mock.return_value = \ is_root_mock.return_value = True get_validated_string_input_method.return_value = "masterkey" os_chmod_method.return_value = None get_is_secure_mock.return_value = True _ambari_server_.start(args) self.assertTrue(get_validated_string_input_method.called) self.assertTrue(save_master_key_method.called) popen_arg = popenMock.call_args[1]['env'] self.assertEquals(os_environ_mock.copy.return_value, popen_arg) # Checking situation when required properties not set up args = reset_mocks() p.removeProp(JAVA_HOME_PROPERTY) get_ambari_properties_mock.return_value = p try: _ambari_server_.start(args) self.fail("Should fail with 'Required properties are not found:'") except FatalException as e: # Expected self.assertTrue('Required properties are not found:' in e.reason) @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "is_server_runing") @patch("os.remove") @patch("os.killpg") @patch("os.getpgid") @patch.object(_ambari_server_, "print_info_msg") def test_stop(self, print_info_msg_mock, gpidMock, removeMock, killMock, isServerRuningMock): isServerRuningMock.return_value = (True, 123) _ambari_server_.stop(None) self.assertTrue(killMock.called) self.assertTrue(removeMock.called) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("win32serviceutil.StopServiceWithDeps") @patch("win32serviceutil.StopService") @patch("win32serviceutil.WaitForServiceStatus") def test_stop(self, WaitForServiceStatusMock, StopServiceMock, StopServiceWithDepsMock): _ambari_server_.stop() self.assertTrue(StopServiceWithDepsMock.called) self.assertFalse(StopServiceMock.called) self.assertTrue(WaitForServiceStatusMock.called) pass @patch.object(_ambari_server_, "BackupRestore_main") def test_backup(self, bkrestore_mock): args = ["", "/some/path/file.zip"] _ambari_server_.backup(args) self.assertTrue(bkrestore_mock.called) pass @patch.object(_ambari_server_, "BackupRestore_main") def test_backup_no_path(self, bkrestore_mock): args = [""] _ambari_server_.backup(args) self.assertTrue(bkrestore_mock.called) pass @patch.object(_ambari_server_, "BackupRestore_main") @patch.object(_ambari_server_, "logger") def test_restore(self, logger_mock, bkrestore_mock): args = ["", "/some/path/file.zip"] _ambari_server_.restore(args) self.assertTrue(bkrestore_mock.called) pass @patch.object(_ambari_server_, "BackupRestore_main") @patch.object(_ambari_server_, "logger") def test_restore_no_path(self, logger_mock, bkrestore_mock): args = [""] _ambari_server_.restore(args) self.assertTrue(bkrestore_mock.called) pass @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("os.listdir") @patch("os.path.isfile") @patch("shutil.move") def test_move_user_custom_actions(self, shutil_move_mock, os_path_isfile_mock, os_listdir_mock, get_ambari_properties_mock): properties = Properties() properties.process_pair(RESOURCES_DIR_PROPERTY, 'some/test/fake/resources/dir/path') get_ambari_properties_mock.return_value = properties os_listdir_mock.return_value = ['sometestdir', 'sometestfile.md', 'sometestfile.py', 'sometestfile2.java', 'sometestfile2.py', 'sometestdir2.py'] os_path_isfile_mock.side_effect = [False, True, True, True, True, False] move_user_custom_actions() custom_actions_scripts_dir = os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'scripts') shutil_move_mock.assert_has_calls([call(os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'sometestfile.py'), custom_actions_scripts_dir), call(os.path.join('some/test/fake/resources/dir/path', 'custom_actions', 'sometestfile2.py'), custom_actions_scripts_dir)]) self.assertEqual(shutil_move_mock.call_count, 2) pass @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch("ambari_server.serverConfiguration.get_conf_dir") @patch("ambari_server.serverUpgrade.run_os_command") @patch("ambari_server.serverUpgrade.get_java_exe_path") def test_run_stack_upgrade(self, java_exe_path_mock, run_os_command_mock, get_conf_dir_mock): java_exe_path_mock.return_value = "/usr/lib/java/bin/java" run_os_command_mock.return_value = (0, None, None) get_conf_dir_mock.return_value = '/etc/conf' stackIdMap = {'HDP' : '2.0', 'repo_url' : 'http://test.com'} run_stack_upgrade(None, 'HDP', '2.0', 'http://test.com', None) self.assertTrue(java_exe_path_mock.called) self.assertTrue(get_conf_dir_mock.called) self.assertTrue(run_os_command_mock.called) run_os_command_mock.assert_called_with('/usr/lib/java/bin/java -cp \'/etc/conf:/usr/lib/ambari-server/*\' ' 'org.apache.ambari.server.upgrade.StackUpgradeHelper ' 'updateStackId ' + "'" + json.dumps(stackIdMap) + "'" + ' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep + 'ambari-server.out 2>&1') pass @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12')) @patch("ambari_server.serverConfiguration.get_conf_dir") @patch("ambari_server.serverUpgrade.run_os_command") @patch("ambari_server.serverUpgrade.get_java_exe_path") def test_run_stack_upgrade_with_url_os(self, java_exe_path_mock, run_os_command_mock, get_conf_dir_mock): java_exe_path_mock.return_value = "/usr/lib/java/bin/java" run_os_command_mock.return_value = (0, None, None) get_conf_dir_mock.return_value = '/etc/conf' stackIdMap = {'HDP' : '2.0', 'repo_url': 'http://test.com', 'repo_url_os': 'centos5,centos6'} run_stack_upgrade(None, 'HDP', '2.0', 'http://test.com', 'centos5,centos6') self.assertTrue(java_exe_path_mock.called) self.assertTrue(get_conf_dir_mock.called) self.assertTrue(run_os_command_mock.called) run_os_command_mock.assert_called_with('/usr/lib/java/bin/java -cp /etc/conf' + os.pathsep + 'test:path12 ' 'org.apache.ambari.server.upgrade.StackUpgradeHelper ' 'updateStackId ' + "'" + json.dumps(stackIdMap) + "'" + ' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep + 'ambari-server.out 2>&1') pass @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = '/etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12' + os.pathsep +'/path/to/jdbc.jar')) @patch("ambari_server.serverUpgrade.ensure_jdbc_driver_is_installed") @patch("ambari_server.serverUpgrade.get_jdbc_driver_path") @patch("ambari_server.serverUpgrade.ensure_can_start_under_current_user") @patch("ambari_server.serverUpgrade.generate_env") @patch("ambari_server.serverUpgrade.read_ambari_user") @patch("ambari_server.serverConfiguration.get_conf_dir") @patch("ambari_server.serverUpgrade.run_os_command") @patch("ambari_server.serverUpgrade.get_java_exe_path") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.get_YN_input") def test_run_schema_upgrade(self, get_YN_input_mock, get_ambari_properties_mock, java_exe_path_mock, run_os_command_mock, get_conf_dir_mock, read_ambari_user_mock, generate_env_mock, ensure_can_start_under_current_user_mock, get_jdbc_mock, ensure_jdbc_driver_is_installed_mock): java_exe_path_mock.return_value = "/usr/lib/java/bin/java" run_os_command_mock.return_value = (0, None, None) get_conf_dir_mock.return_value = '/etc/conf' command = '/usr/lib/java/bin/java -cp /etc/conf' + os.pathsep + 'test' + os.pathsep + 'path12' + \ os.pathsep +'/path/to/jdbc.jar ' \ 'org.apache.ambari.server.upgrade.SchemaUpgradeHelper ' \ '> ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep + 'ambari-server.out 2>&1' environ = {} generate_env_mock.return_value = environ ensure_can_start_under_current_user_mock.return_value = "root" read_ambari_user_mock.return_value = "ambari" properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "local") get_ambari_properties_mock.return_value = properties get_YN_input_mock.return_value = True get_jdbc_mock.return_value = '/path/to/jdbc.jar' run_schema_upgrade(None) self.assertTrue(java_exe_path_mock.called) self.assertTrue(ensure_can_start_under_current_user_mock.called) self.assertTrue(generate_env_mock.called) self.assertTrue(read_ambari_user_mock.called) self.assertTrue(run_os_command_mock.called) run_os_command_mock.assert_called_with(command, env=environ) @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch("ambari_server.serverConfiguration.get_conf_dir") @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12')) @patch("ambari_server.serverUpgrade.run_os_command") @patch("ambari_server.serverUpgrade.get_java_exe_path") def test_run_metainfo_upgrade(self, java_exe_path_mock, run_os_command_mock, get_conf_dir_mock): java_exe_path_mock.return_value = "/usr/lib/java/bin/java" run_os_command_mock.return_value = (0, None, None) get_conf_dir_mock.return_value = '/etc/conf' json_map = {'a': 'http://newurl'} run_metainfo_upgrade(None, json_map) self.assertTrue(java_exe_path_mock.called) self.assertTrue(run_os_command_mock.called) run_os_command_mock.assert_called_with('/usr/lib/java/bin/java ' '-cp test' + os.pathsep + 'path12 ' 'org.apache.ambari.server.upgrade.StackUpgradeHelper updateMetaInfo ' + "'" + json.dumps(json_map) + "'" + ' > ' + os.sep + 'var' + os.sep + 'log' + os.sep + 'ambari-server' + os.sep + 'ambari-server.out 2>&1') pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("os.path.isfile") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("os.path.exists") @patch("os.path.lexists") @patch("os.remove") @patch("os.symlink") @patch("shutil.copy") def test_proceedJDBCProperties(self, copy_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, get_ambari_properties_mock, isfile_mock): args = MagicMock() # test incorrect path to jdbc-driver isfile_mock.return_value = False args.jdbc_driver = "test jdbc" fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("File test jdbc does not exist!", e.reason) fail = True self.assertTrue(fail) # test incorrect jdbc-db isfile_mock.return_value = True args.jdbc_db = "incorrect db" fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("Unsupported database name incorrect db. Please see help for more information.", e.reason) fail = True self.assertTrue(fail) # test getAmbariProperties failed args.jdbc_db = "mysql" get_ambari_properties_mock.return_value = -1 fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("Error getting ambari properties", e.reason) fail = True self.assertTrue(fail) # test getAmbariProperties failed args.jdbc_db = "mssql" get_ambari_properties_mock.return_value = -1 fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("Error getting ambari properties", e.reason) fail = True self.assertTrue(fail) # test get resource dir param failed args.jdbc_db = "oracle" p = MagicMock() get_ambari_properties_mock.return_value = p p.__getitem__.side_effect = KeyError("test exception") exists_mock.return_value = False fail = False try: proceedJDBCProperties(args) except FatalException as e: fail = True self.assertTrue(fail) # test copy jdbc failed and symlink exists lexists_mock.return_value = True args.jdbc_db = "postgres" get_ambari_properties_mock.return_value = MagicMock() isfile_mock.side_effect = [True, False] exists_mock.return_value = True fail = False def side_effect(): raise Exception(-1, "Failed to copy!") copy_mock.side_effect = side_effect try: proceedJDBCProperties(args) except FatalException as e: fail = True self.assertTrue(fail) self.assertTrue(os_remove_mock.called) # test success symlink creation get_ambari_properties_mock.reset_mock() os_remove_mock.reset_mock() p = MagicMock() get_ambari_properties_mock.return_value = p p.__getitem__.side_effect = None p.__getitem__.return_value = "somewhere" copy_mock.reset_mock() copy_mock.side_effect = None isfile_mock.side_effect = [True, False] proceedJDBCProperties(args) self.assertTrue(os_remove_mock.called) self.assertTrue(os_symlink_mock.called) self.assertTrue(copy_mock.called) self.assertEquals(os_symlink_mock.call_args_list[0][0][0], os.path.join("somewhere","test jdbc")) self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join("somewhere","postgres-jdbc-driver.jar")) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("os.path.isfile") @patch("ambari_server.serverSetup.get_ambari_properties") @patch("os.path.exists") @patch("os.path.lexists") @patch("os.remove") @patch("os.symlink") @patch("shutil.copy") def test_proceedJDBCProperties(self, copy_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, get_ambari_properties_mock, isfile_mock): args = MagicMock() # test incorrect path to jdbc-driver isfile_mock.return_value = False args.jdbc_driver = "test jdbc" fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("File test jdbc does not exist!", e.reason) fail = True self.assertTrue(fail) # test incorrect jdbc-db isfile_mock.return_value = True args.jdbc_db = "incorrect db" fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("Unsupported database name incorrect db. Please see help for more information.", e.reason) fail = True self.assertTrue(fail) # test getAmbariProperties succeeded args.jdbc_db = "mssql" get_ambari_properties_mock.return_value = -1 fail = False try: proceedJDBCProperties(args) except FatalException as e: self.assertEquals("Error getting ambari properties", e.reason) fail = True self.assertFalse(fail) pass @patch("shutil.copytree") @patch("os.makedirs") @patch("os.path.islink") @patch("os.path.exists") @patch("os.path.getctime") @patch("re.compile") @patch("os.path.join") @patch("os.path.basename") @patch("os.path.isdir") @patch("glob.glob") def test_find_and_copy_custom_services(self, glob_mock, isdir_mock, basename_mock, join_mock, re_compile_mock, getctime_mock, exists_mock, islink_mock, makedirs_mock, copytree_mock): # service/version dir is not link glob_mock.return_value = [""] isdir_mock.side_effect = [False, True, True] islink_mock.return_value = False exists_mock.side_effect = [True, False] find_and_copy_custom_services("", "", "", "", "", "/common-services/") self.assertTrue(makedirs_mock.called) self.assertTrue(copytree_mock.called) # service/version dir is link makedirs_mock.reset_mock() copytree_mock.reset_mock() islink_mock.side_effect = [False, True] self.assertFalse(makedirs_mock.called) self.assertFalse(copytree_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("__builtin__.open") @patch("os.path.isfile") @patch("os.path.lexists") @patch("os.path.exists") @patch("os.remove") @patch("os.symlink") @patch.object(Properties, "store") @patch("ambari_server.serverUpgrade.adjust_directory_permissions") @patch("ambari_server.serverUpgrade.print_warning_msg") @patch("ambari_server.serverUpgrade.read_ambari_user") @patch("ambari_server.serverUpgrade.run_schema_upgrade") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.serverConfiguration.find_properties_file") @patch("ambari_server.serverUpgrade.update_ambari_properties") @patch("ambari_server.serverUpgrade.is_root") @patch("ambari_server.serverConfiguration.write_property") @patch("ambari_server.serverConfiguration.get_ambari_version") @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.upgrade_local_repo") @patch("ambari_server.serverUpgrade.move_user_custom_actions") @patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties") @patch("ambari_server.serverUpgrade.update_ambari_env") @patch("ambari_server.setupMpacks.get_replay_log_file") @patch("ambari_server.serverUpgrade.logger") @patch.object(PGConfig, "_change_db_files_owner", return_value=0) def test_upgrade_from_161(self, change_db_files_owner_mock, logger_mock, get_replay_log_file_mock, update_ambari_env_mock, update_krb_jaas_login_properties_mock, move_user_custom_actions_mock, upgrade_local_repo_mock, get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock, get_ambari_version_mock, write_property_mock, is_root_mock, update_ambari_properties_mock, find_properties_file_mock, run_os_command_mock, run_schema_upgrade_mock, read_ambari_user_mock, print_warning_msg_mock, adjust_directory_permissions_mock, properties_store_mock, os_symlink_mock, os_remove_mock, exists_mock, lexists_mock, isfile_mock, open_mock): def reset_mocks(): run_os_command_mock.reset_mock() write_property_mock.reset_mock() isfile_mock.reset_mock() lexists_mock.reeset_mock() os_symlink_mock.reset_mock() lexists_mock.return_value = False args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.database_windows_auth del args.default_database_host del args.init_db_script_file del args.cleanup_db_script_file del args.must_set_database_options del args.sid_or_sname del args.jdbc_url args.jdbc_driver= None args.jdbc_db = None args.silent = False return args args = reset_mocks() args.dbms = "postgres" is_root_mock.return_value = True update_ambari_properties_mock.return_value = 0 update_ambari_env_mock.return_value = 0 get_ambari_version_mock.return_value = "1.7.0" move_user_custom_actions_mock.return_value = None update_krb_jaas_login_properties_mock.return_value = -2 # Local Postgres # In Ambari 1.6.1 for an embedded postgres database, the "server.jdbc.database" property stored the DB name, # and the DB type was assumed to be "postgres" if the "server.persistence.type" property was "local" properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "local") properties.process_pair(JDBC_DATABASE_PROPERTY, "ambari") properties.process_pair(RESOURCES_DIR_PROPERTY, "/tmp") get_ambari_properties_mock.return_value = properties properties2 = Properties() properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "local") properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari") properties2.process_pair(JDBC_DATABASE_PROPERTY, "postgres") get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2] get_replay_log_file_mock.return_value = "/invalid_path/mpacks_replay.log" run_schema_upgrade_mock.return_value = 0 read_ambari_user_mock.return_value = "custom_user" run_os_command_mock.return_value = (0, "", "") isfile_mock.return_value = False try: upgrade(args) except FatalException as fe: self.fail("Did not expect failure: " + str(fe)) else: self.assertTrue(write_property_mock.called) self.assertEquals(write_property_mock.call_args_list[0][0][0], JDBC_DATABASE_NAME_PROPERTY) self.assertEquals(write_property_mock.call_args_list[0][0][1], "ambari") self.assertEquals(write_property_mock.call_args_list[1][0][0], JDBC_DATABASE_PROPERTY) self.assertEquals(write_property_mock.call_args_list[1][0][1], "postgres") self.assertFalse(move_user_custom_actions_mock.called) args = reset_mocks() # External Postgres # In Ambari 1.6.1 for an external postgres database, the "server.jdbc.database" property stored the # DB type ("postgres"), and the "server.jdbc.schema" property stored the DB name. properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties.process_pair(JDBC_DATABASE_PROPERTY, "postgres") properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari") properties2 = Properties() properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari") properties2.process_pair(JDBC_DATABASE_PROPERTY, "postgres") properties2.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties2.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari") get_ambari_properties_mock.return_value = properties get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2] exists_mock.return_value = True try: upgrade(args) except FatalException as fe: self.fail("Did not expect failure: " + str(fe)) else: self.assertTrue(write_property_mock.called) self.assertFalse(run_os_command_mock.called) self.assertFalse(move_user_custom_actions_mock.called) args = reset_mocks() # External Postgres missing DB type, so it should be set based on the JDBC URL. properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties.process_pair(JDBC_URL_PROPERTY, "jdbc:postgresql://c6410.ambari.apache.org:5432/ambari") get_ambari_properties_mock.return_value = properties get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2] try: upgrade(args) except FatalException as fe: self.fail("Did not expect failure: " + str(fe)) else: self.assertTrue(write_property_mock.call_count == 2) self.assertFalse(move_user_custom_actions_mock.called) args = reset_mocks() # External MySQL # In Ambari 1.6.1 for an external MySQL database, the "server.jdbc.database" property stored the DB type ("mysql"), # And the "server.jdbc.schema" property stored the DB name. properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties.process_pair(JDBC_DATABASE_PROPERTY, "mysql") properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari") properties2 = Properties() properties2.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties2.process_pair(JDBC_DATABASE_PROPERTY, "mysql") properties2.process_pair(JDBC_DATABASE_NAME_PROPERTY, "ambari") properties2.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties2.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari") get_ambari_properties_mock.return_value = properties get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2] isfile_mock.side_effect = [False, True, False, False, False] try: upgrade(args) except FatalException as fe: self.fail("Did not expect failure: " + str(fe)) else: self.assertTrue(write_property_mock.called) self.assertFalse(move_user_custom_actions_mock.called) self.assertTrue(os_symlink_mock.called) self.assertTrue(os_symlink_mock.call_args_list[0][0][0] == "/var/lib/ambari-server/resources/mysql-connector-java.jar") self.assertTrue(os_symlink_mock.call_args_list[0][0][1] == "/var/lib/ambari-server/resources/mysql-jdbc-driver.jar") args = reset_mocks() # External MySQL missing DB type, so it should be set based on the JDBC URL. properties = Properties() properties.process_pair(PERSISTENCE_TYPE_PROPERTY, "remote") properties.process_pair(JDBC_RCA_SCHEMA_PROPERTY, "ambari") properties.process_pair(JDBC_URL_PROPERTY, "jdbc:mysql://c6409.ambari.apache.org:3306/ambari") get_ambari_properties_mock.return_value = properties get_ambari_properties_3_mock.side_effect = get_ambari_properties_2_mock.side_effect = [properties, properties2, properties2] isfile_mock.side_effect = None try: upgrade(args) except FatalException as fe: self.fail("Did not expect failure: " + str(fe)) else: self.assertTrue(write_property_mock.call_count == 2) self.assertFalse(move_user_custom_actions_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("__builtin__.open") @patch("os.path.isfile") @patch("os.path.exists") @patch("os.path.lexists") @patch("os.remove") @patch("os.symlink") @patch.object(Properties, "store") @patch.object(PGConfig, "_change_db_files_owner") @patch("ambari_server.serverConfiguration.find_properties_file") @patch("ambari_server.serverUpgrade.adjust_directory_permissions") @patch("ambari_server.serverUpgrade.print_warning_msg") @patch("ambari_server.serverUpgrade.read_ambari_user") @patch("ambari_server.serverUpgrade.run_schema_upgrade") @patch("ambari_server.serverUpgrade.update_ambari_properties") @patch("ambari_server.serverUpgrade.parse_properties_file") @patch("ambari_server.serverUpgrade.get_ambari_version") @patch("ambari_server.serverConfiguration.get_ambari_version") @patch("ambari_server.serverUpgrade.is_root") @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.upgrade_local_repo") @patch("ambari_server.serverUpgrade.move_user_custom_actions") @patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties") @patch("ambari_server.serverUpgrade.update_ambari_env") def test_upgrade(self, update_ambari_env_mock, update_krb_jaas_login_properties_mock, move_user_custom_actions, upgrade_local_repo_mock, get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock, is_root_mock, get_ambari_version_mock, get_ambari_version_2_mock, parse_properties_file_mock, update_ambari_properties_mock, run_schema_upgrade_mock, read_ambari_user_mock, print_warning_msg_mock, adjust_directory_permissions_mock, find_properties_file_mock, change_db_files_owner_mock, properties_store_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, isfile_mock, open_mock): def reset_mocks(): isfile_mock.reset_mock() args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type del args.sid_or_sname del args.jdbc_url args.must_set_database_options = True return args args = reset_mocks() properties = Properties() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = properties update_ambari_properties_mock.return_value = 0 update_ambari_env_mock.return_value = 0 run_schema_upgrade_mock.return_value = 0 isfile_mock.return_value = False get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION move_user_custom_actions.return_value = None update_krb_jaas_login_properties_mock.return_value = -2 # Testing call under non-root is_root_mock.return_value = False try: upgrade(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("root-level" in fe.reason) pass args = reset_mocks() # Testing calls under root is_root_mock.return_value = True # Testing with undefined custom user read_ambari_user_mock.return_value = None run_schema_upgrade_mock.return_value = 0 change_db_files_owner_mock.return_value = 0 exists_mock.return_value = True upgrade(args) self.assertTrue(print_warning_msg_mock.called) warning_args = print_warning_msg_mock.call_args[0][0] self.assertTrue("custom ambari user" in warning_args) self.assertTrue(upgrade_local_repo_mock.called) self.assertTrue(move_user_custom_actions.called) args = reset_mocks() # Testing with defined custom user read_ambari_user_mock.return_value = "ambari-custom-user" upgrade(args) self.assertTrue(adjust_directory_permissions_mock.called) args = reset_mocks() run_schema_upgrade_mock.return_value = 0 parse_properties_file_mock.called = False move_user_custom_actions.called = False retcode = upgrade(args) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(get_ambari_properties_2_mock.called) self.assertNotEqual(-1, retcode) self.assertTrue(parse_properties_file_mock.called) self.assertTrue(run_schema_upgrade_mock.called) self.assertTrue(move_user_custom_actions.called) # Assert that move_user_custom_actions is called on upgrade to Ambari == 2.0.0 get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '2.0.0' move_user_custom_actions.called = False upgrade(args) self.assertTrue(move_user_custom_actions.called) # Assert that move_user_custom_actions is not called on upgrade to Ambari < 2.0.0 get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '1.6.0' move_user_custom_actions.called = False upgrade(args) self.assertFalse(move_user_custom_actions.called) get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION # test getAmbariProperties failed args = reset_mocks() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = -1 fail = False try: upgrade(args) except FatalException as e: self.assertEquals("Error getting ambari properties", e.reason) fail = True self.assertTrue(fail) # test get resource dir param failed args = reset_mocks() p = MagicMock() get_ambari_properties_mock.reset_mock() get_ambari_properties_2_mock.reset_mock() get_ambari_properties_3_mock.reset_mock() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = p p.__getitem__.side_effect = ["something", "something", "something", "something", KeyError("test exception")] exists_mock.return_value = False fail = False try: upgrade(args) except FatalException as e: fail = True self.assertTrue(fail) # test if some drivers are available in resources, and symlink available too args = reset_mocks() props = Properties() props.process_pair(JDBC_DATABASE_NAME_PROPERTY, "something") props.process_pair(RESOURCES_DIR_PROPERTY, "resources") get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = props exists_mock.return_value = True lexists_mock.return_value = True isfile_mock.side_effect = [True, False, False] upgrade(args) self.assertTrue(os_remove_mock.called) self.assertEquals(os_remove_mock.call_count, 1) self.assertEquals(os_remove_mock.call_args[0][0], os.path.join("resources", "oracle-jdbc-driver.jar")) self.assertEquals(os_symlink_mock.call_count, 1) self.assertEquals(os_symlink_mock.call_args[0][0], os.path.join("resources", "ojdbc6.jar")) self.assertEquals(os_symlink_mock.call_args[0][1], os.path.join("resources", "oracle-jdbc-driver.jar")) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("__builtin__.open") @patch("os.path.isfile") @patch("os.path.exists") @patch("os.path.lexists") @patch("os.remove") @patch("os.symlink") @patch.object(Properties, "store") @patch("ambari_server.serverConfiguration.find_properties_file") @patch("ambari_server.serverUpgrade.adjust_directory_permissions") @patch("ambari_server.serverUpgrade.print_warning_msg") @patch("ambari_server.serverUpgrade.read_ambari_user") @patch("ambari_server.serverUpgrade.run_schema_upgrade") @patch("ambari_server.serverUpgrade.update_ambari_properties") @patch("ambari_server.serverUpgrade.parse_properties_file") @patch("ambari_server.serverUpgrade.get_ambari_version") @patch("ambari_server.serverConfiguration.get_ambari_version") @patch("ambari_server.serverUpgrade.is_root") @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.upgrade_local_repo") @patch("ambari_server.serverUpgrade.move_user_custom_actions") @patch("ambari_server.serverUpgrade.update_krb_jaas_login_properties") def test_upgrade(self, update_krb_jaas_login_properties_mock, move_user_custom_actions, upgrade_local_repo_mock, get_ambari_properties_mock, get_ambari_properties_2_mock, get_ambari_properties_3_mock, is_root_mock, get_ambari_version_mock, get_ambari_version_2_mock, parse_properties_file_mock, update_ambari_properties_mock, run_schema_upgrade_mock, read_ambari_user_mock, print_warning_msg_mock, adjust_directory_permissions_mock, find_properties_file_mock, properties_store_mock, os_symlink_mock, os_remove_mock, lexists_mock, exists_mock, isfile_mock, open_mock): def reset_mocks(): isfile_mock.reset_mock() args = MagicMock() del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.default_database_host del args.persistence_type del args.init_db_script_file del args.cleanup_db_script_file del args.sid_or_sname del args.jdbc_url args.must_set_database_options = True return args args = reset_mocks() properties = Properties() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = properties update_ambari_properties_mock.return_value = 0 run_schema_upgrade_mock.return_value = 0 isfile_mock.return_value = False get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION move_user_custom_actions.return_value = None update_krb_jaas_login_properties_mock.return_value = -2 # Testing call under non-root is_root_mock.return_value = False try: upgrade(args) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("administrator-level" in fe.reason) pass args = reset_mocks() # Testing calls under root is_root_mock.return_value = True # Testing with undefined custom user read_ambari_user_mock.return_value = None run_schema_upgrade_mock.return_value = 0 exists_mock.return_value = True upgrade(args) self.assertTrue(print_warning_msg_mock.called) warning_args = print_warning_msg_mock.call_args[0][0] self.assertTrue("custom ambari user" in warning_args) self.assertTrue(upgrade_local_repo_mock.called) self.assertTrue(move_user_custom_actions.called) args = reset_mocks() # Testing with defined custom user read_ambari_user_mock.return_value = "ambari-custom-user" upgrade(args) self.assertTrue(adjust_directory_permissions_mock.called) args = reset_mocks() run_schema_upgrade_mock.return_value = 0 parse_properties_file_mock.called = False move_user_custom_actions.called = False retcode = upgrade(args) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(get_ambari_properties_2_mock.called) self.assertNotEqual(-1, retcode) self.assertTrue(parse_properties_file_mock.called) self.assertTrue(run_schema_upgrade_mock.called) self.assertTrue(move_user_custom_actions.called) # Assert that move_user_custom_actions is called on upgrade to Ambari == 2.0.0 get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '2.0.0' move_user_custom_actions.called = False upgrade(args) self.assertTrue(move_user_custom_actions.called) # Assert that move_user_custom_actions is not called on upgrade to Ambari < 2.0.0 get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = '1.6.0' move_user_custom_actions.called = False upgrade(args) self.assertFalse(move_user_custom_actions.called) get_ambari_version_2_mock.return_value = get_ambari_version_mock.return_value = CURR_AMBARI_VERSION # test getAmbariProperties failed args = reset_mocks() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = -1 fail = False try: upgrade(args) except FatalException as e: self.assertEquals("Error getting ambari properties", e.reason) fail = True self.assertTrue(fail) # test get resource dir param failed args = reset_mocks() p = MagicMock() get_ambari_properties_mock.reset_mock() get_ambari_properties_2_mock.reset_mock() get_ambari_properties_3_mock.reset_mock() get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = p p.__getitem__.side_effect = ["something", "something", "something", "something", KeyError("test exception")] exists_mock.return_value = False fail = False try: upgrade(args) except FatalException as e: fail = True self.assertTrue(fail) # test if some drivers are available in resources, and symlink available too args = reset_mocks() props = Properties() props.process_pair(JDBC_DATABASE_NAME_PROPERTY, "something") props.process_pair(RESOURCES_DIR_PROPERTY, "resources") get_ambari_properties_3_mock.return_value = get_ambari_properties_2_mock.return_value = \ get_ambari_properties_mock.return_value = props exists_mock.return_value = True lexists_mock.return_value = True isfile_mock.side_effect = [True, False, False] pass def test_print_info_msg(self): out = StringIO.StringIO() sys.stdout = out set_verbose(True) print_info_msg("msg") self.assertNotEqual("", out.getvalue()) sys.stdout = sys.__stdout__ pass def test_print_error_msg(self): out = StringIO.StringIO() sys.stdout = out set_verbose(True) print_error_msg("msg") self.assertNotEqual("", out.getvalue()) sys.stdout = sys.__stdout__ pass def test_print_warning_msg(self): out = StringIO.StringIO() sys.stdout = out set_verbose(True) print_warning_msg("msg") self.assertNotEqual("", out.getvalue()) sys.stdout = sys.__stdout__ pass @patch("ambari_server.userInput.get_choice_string_input") def test_get_YN_input(self, get_choice_string_input_mock): get_YN_input("prompt", "default") self.assertTrue(get_choice_string_input_mock.called) self.assertEqual(5, len(get_choice_string_input_mock.call_args_list[0][0])) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") def test_main_db_options(self, setup_mock): base_args = ["ambari-server.py", "setup"] db_args = ["--database", "postgres", "--databasehost", "somehost.net", "--databaseport", "12345", "--databasename", "ambari", "--databaseusername", "ambari", "--databasepassword", "bigdata"] #test no args failed = False sys.argv = list(base_args) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(failed) self.assertTrue(setup_mock.called) self.assertTrue(setup_mock.call_args_list[0][0][0].must_set_database_options) setup_mock.reset_mock() # test embedded option failed = False sys.argv = list(base_args) sys.argv.extend(db_args[-10:]) sys.argv.extend(["--database", "embedded"]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(failed) self.assertTrue(setup_mock.called) setup_mock.reset_mock() #test full args sys.argv = list(base_args) sys.argv.extend(db_args) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(failed) self.assertTrue(setup_mock.called) self.assertFalse(setup_mock.call_args_list[0][0][0].must_set_database_options) setup_mock.reset_mock() #test not full args sys.argv = list(base_args) sys.argv.extend(["--database", "postgres"]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(setup_mock.called) self.assertTrue(failed) setup_mock.reset_mock() #test wrong database failed = False sys.argv = list(base_args) sys.argv.extend(["--database", "unknown"]) sys.argv.extend(db_args[2:]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertTrue(failed) self.assertFalse(setup_mock.called) setup_mock.reset_mock() #test wrong port check failed = False sys.argv = list(base_args) sys.argv.extend(["--databaseport", "unknown"]) sys.argv.extend(db_args[:4]) sys.argv.extend(db_args[6:]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertTrue(failed) self.assertFalse(setup_mock.called) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "setup") def test_main_db_options(self, setup_mock): base_args = ["ambari-server.py", "setup"] db_args = ["--databasehost", "somehost.net", "--databaseport", "12345", "--databasename", "ambari", "--databaseusername", "ambari", "--databasepassword", "bigdata"] #test no args failed = False sys.argv = list(base_args) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(failed) self.assertTrue(setup_mock.called) self.assertTrue(setup_mock.call_args_list[0][0][0].must_set_database_options) setup_mock.reset_mock() #test full args sys.argv = list(base_args) sys.argv.extend(db_args) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(failed) self.assertTrue(setup_mock.called) self.assertFalse(setup_mock.call_args_list[0][0][0].must_set_database_options) setup_mock.reset_mock() #test not full args sys.argv = list(base_args) sys.argv.extend(["--databasehost", "somehost.net"]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertFalse(setup_mock.called) self.assertTrue(failed) setup_mock.reset_mock() #test wrong port check failed = False sys.argv = list(base_args) sys.argv.extend(["--databaseport", "unknown"]) sys.argv.extend(db_args[:2]) sys.argv.extend(db_args[6:]) try: _ambari_server_.mainBody() except SystemExit: failed = True pass self.assertTrue(failed) self.assertFalse(setup_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.dbConfiguration_linux.print_info_msg") @patch("ambari_server.dbConfiguration.get_ambari_properties") def test_prompt_db_properties(self, get_ambari_properties_mock, print_info_msg_mock, get_validated_string_input_mock, get_YN_input_mock): def reset_mocks(): get_validated_string_input_mock.reset_mock() get_YN_input_mock.reset_mock() args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type return args args = reset_mocks() get_ambari_properties_mock.return_value = Properties() set_silent(False) #test not prompt args.must_set_database_options = False prompt_db_properties(args) self.assertFalse(get_validated_string_input_mock.called) self.assertFalse(get_YN_input_mock.called) args = reset_mocks() #test prompt args.must_set_database_options = True get_YN_input_mock.return_value = False prompt_db_properties(args) self.assertTrue(get_YN_input_mock.called) self.assertFalse(get_validated_string_input_mock.called) args = reset_mocks() #test prompt advanced args.must_set_database_options = True get_YN_input_mock.return_value = True get_validated_string_input_mock.return_value = "4" prompt_db_properties(args) self.assertTrue(get_YN_input_mock.called) self.assertTrue(get_validated_string_input_mock.called) self.assertEquals(args.database_index, 3) pass @patch("ambari_server.serverConfiguration.get_conf_dir") def _test_update_ambari_properties(self, get_conf_dir_mock): from ambari_server import serverConfiguration # need to modify constants inside the module properties = ["server.jdbc.user.name=ambari-server\n", "server.jdbc.user.passwd=/etc/ambari-server/conf/password.dat\n", "java.home=/usr/jdk64/jdk1.6.0_31\n", "server.jdbc.database_name=ambari\n", "ambari-server.user=ambari\n", "agent.fqdn.service.url=URL\n", "java.releases=jdk1.7,jdk1.6\n"] NEW_PROPERTY = 'some_new_property=some_value\n' JAVA_RELEASES_NEW_PROPERTY = 'java.releases=jdk1.8,jdk1.7\n' CHANGED_VALUE_PROPERTY = 'server.jdbc.database_name=should_not_overwrite_value\n' get_conf_dir_mock.return_value = '/etc/ambari-server/conf' (tf1, fn1) = tempfile.mkstemp() (tf2, fn2) = tempfile.mkstemp() configDefaults.AMBARI_PROPERTIES_BACKUP_FILE = fn1 os.close(tf1) serverConfiguration.AMBARI_PROPERTIES_FILE = fn2 os.close(tf2) with open(serverConfiguration.AMBARI_PROPERTIES_FILE, "w") as f: f.write(NEW_PROPERTY) f.write(CHANGED_VALUE_PROPERTY) f.write(JAVA_RELEASES_NEW_PROPERTY) f.close() with open(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE, 'w') as f: for line in properties: f.write(line) f.close() #Call tested method update_ambari_properties() timestamp = datetime.datetime.now() #RPMSAVE_FILE wasn't found self.assertFalse(os.path.exists(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE)) #Renamed RPMSAVE_FILE exists self.assertTrue(os.path.exists(configDefaults.AMBARI_PROPERTIES_BACKUP_FILE + '.' + timestamp.strftime('%Y%m%d%H%M%S'))) with open(serverConfiguration.AMBARI_PROPERTIES_FILE, 'r') as f: ambari_properties_content = f.readlines() for line in properties: if (line == "agent.fqdn.service.url=URL\n"): if (not GET_FQDN_SERVICE_URL + "=URL\n" in ambari_properties_content) and ( line in ambari_properties_content): self.fail() elif line == "java.releases=jdk1.7,jdk1.6\n": if not "java.releases=jdk1.8,jdk1.7\n" in ambari_properties_content: self.fail() else: if not line in ambari_properties_content: self.fail() if not NEW_PROPERTY in ambari_properties_content: self.fail() if CHANGED_VALUE_PROPERTY in ambari_properties_content: self.fail() # Command should not fail if *.rpmsave file is missing result = update_ambari_properties() self.assertEquals(result, 0) os.unlink(fn2) #if ambari.properties file is absent then "ambari-server upgrade" should # fail (tf, fn) = tempfile.mkstemp() configDefaults.AMBARI_PROPERTIES_BACKUP_FILE = fn result = update_ambari_properties() self.assertNotEquals(result, 0) pass @patch("ambari_server.properties.Properties.__init__") @patch("ambari_server.serverConfiguration.search_file") def test_update_ambari_properties_negative_case(self, search_file_mock, properties_mock): search_file_mock.return_value = None #Call tested method self.assertEquals(0, update_ambari_properties()) self.assertFalse(properties_mock.called) search_file_mock.return_value = False #Call tested method self.assertEquals(0, update_ambari_properties()) self.assertFalse(properties_mock.called) search_file_mock.return_value = '' #Call tested method self.assertEquals(0, update_ambari_properties()) self.assertFalse(properties_mock.called) pass @patch("ambari_server.serverConfiguration.get_conf_dir") def _test_update_ambari_properties_without_some_properties(self, get_conf_dir_mock): ''' Checks: update_ambari_properties call should add ambari-server.user property if it's absent ''' from ambari_server import serverConfiguration # need to modify constants inside the module properties = ["server.jdbc.user.name=ambari-server\n", "server.jdbc.user.passwd=/etc/ambari-server/conf/password.dat\n", "java.home=/usr/jdk64/jdk1.6.0_31\n", "server.os_type=redhat6\n"] get_conf_dir_mock.return_value = '/etc/ambari-server/conf' (tf1, fn1) = tempfile.mkstemp() os.close(tf1) (tf2, fn2) = tempfile.mkstemp() os.close(tf2) serverConfiguration.AMBARI_PROPERTIES_RPMSAVE_FILE = fn1 serverConfiguration.AMBARI_PROPERTIES_FILE = fn2 with open(serverConfiguration.AMBARI_PROPERTIES_RPMSAVE_FILE, 'w') as f: for line in properties: f.write(line) #Call tested method update_ambari_properties() ambari_properties = Properties() ambari_properties.load(open(fn2)) self.assertTrue(NR_USER_PROPERTY in ambari_properties.keys()) value = ambari_properties[NR_USER_PROPERTY] self.assertEqual(value, "root") self.assertTrue(OS_FAMILY_PROPERTY in ambari_properties.keys()) os.unlink(fn2) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("resource_management.core.shell.call") @patch("ambari_server.serverSetup.verify_setup_allowed") @patch("sys.exit") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.dbConfiguration.get_validated_string_input") @patch("ambari_server.dbConfiguration_linux.get_YN_input") @patch("ambari_server.dbConfiguration_linux.get_validated_string_input") @patch("ambari_server.dbConfiguration_linux.PGConfig._store_remote_properties") @patch("ambari_server.dbConfiguration_linux.LinuxDBMSConfig.ensure_jdbc_driver_installed") @patch("ambari_server.dbConfiguration_linux.read_password") @patch("ambari_server.serverSetup.check_jdbc_drivers") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.check_ambari_user") @patch("ambari_server.serverSetup.download_and_install_jdk") @patch("ambari_server.serverSetup.configure_os_settings") @patch('__builtin__.raw_input') @patch("ambari_server.serverSetup.disable_security_enhancements") @patch("ambari_server.serverSetup.expand_jce_zip_file") @patch("ambari_server.serverSetup.logger") def test_setup_remote_db_wo_client(self, logger_mock, expand_jce_zip_file_mock, check_selinux_mock, raw_input, configure_os_settings_mock, download_jdk_mock, check_ambari_user_mock, is_root_mock, check_jdbc_drivers_mock, read_password_mock, ensure_jdbc_driver_installed_mock, store_remote_properties_mock, get_validated_string_input_0_mock, get_YN_input_0_mock, get_validated_string_input_mock, get_YN_input, exit_mock, verify_setup_allowed_method, run_os_command_mock): args = MagicMock() args.jdbc_driver = None args.jdbc_db = None args.silent = False del args.dbms del args.database_index del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.persistence_type raw_input.return_value = "" is_root_mock.return_value = True check_selinux_mock.return_value = (0, "") run_os_command_mock.return_value = 3,"","" store_remote_properties_mock.return_value = 0 get_YN_input.return_value = True get_validated_string_input_mock.side_effect = ["4"] get_validated_string_input_0_mock.side_effect = ["localhost", "5432", "ambari", "ambari", "admin"] get_YN_input_0_mock.return_value = False read_password_mock.return_value = "encrypted_bigdata" ensure_jdbc_driver_installed_mock.return_value = True check_jdbc_drivers_mock.return_value = 0 check_ambari_user_mock.return_value = (0, False, 'user', None) download_jdk_mock.return_value = 0 configure_os_settings_mock.return_value = 0 verify_setup_allowed_method.return_value = 0 expand_jce_zip_file_mock.return_value = 0 try: setup(args) self.fail("Should throw exception") except NonFatalException as fe: # Expected self.assertTrue("Remote database setup aborted." in fe.reason) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("resource_management.core.shell.call") @patch("sys.exit") @patch("ambari_server.userInput.get_YN_input") @patch("ambari_commons.os_utils.is_root") @patch("ambari_server.dbConfiguration_linux.store_password_file") @patch("__builtin__.raw_input") def test_store_remote_properties(self, raw_input_mock, store_password_file_mock, is_root_mock, get_YN_input, exit_mock, run_os_command_mock ): raw_input_mock.return_value = "" is_root_mock.return_value = True get_YN_input.return_value = False run_os_command_mock.return_value = 3,"","" store_password_file_mock.return_value = "encrypted_bigdata" import optparse args = optparse.Values() args.dbms = "oracle" args.database_host = "localhost" args.database_port = "1234" args.database_name = "ambari" args.postgres_schema = "ambari" args.sid_or_sname = "foo" args.database_username = "foo" args.database_password = "foo" properties0 = Properties() properties = Properties() factory = DBMSConfigFactory() dbConfig = factory.create(args, properties0) dbConfig._store_remote_properties(properties, None) found = False for n in properties.propertyNames(): if not found and n.startswith("server.jdbc.properties"): found = True self.assertTrue(found) # verify that some properties exist self.assertEquals("internal", properties.get_property(JDBC_CONNECTION_POOL_TYPE)) # now try with MySQL instead of Oracle to verify that the properties are different args.dbms = "mysql" args.database_index = 2 properties0 = Properties() properties = Properties() factory = DBMSConfigFactory() dbConfig = factory.create(args, properties0) dbConfig._store_remote_properties(properties, args) # verify MySQL properties self.assertEquals("c3p0", properties.get_property(JDBC_CONNECTION_POOL_TYPE)) @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.find_properties_file") def test_get_ambari_properties(self, find_properties_file_mock): find_properties_file_mock.return_value = None rcode = get_ambari_properties() self.assertEqual(rcode, -1) tf1 = tempfile.NamedTemporaryFile() find_properties_file_mock.return_value = tf1.name prop_name = 'name' prop_value = 'val' with open(tf1.name, 'w') as fout: fout.write(prop_name + '=' + prop_value) fout.close() properties = get_ambari_properties() self.assertEqual(properties[prop_name], prop_value) pass @only_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.find_properties_file") def test_get_ambari_properties(self, find_properties_file): find_properties_file.return_value = None rcode = get_ambari_properties() self.assertEqual(rcode, -1) tf1 = tempfile.NamedTemporaryFile(delete=False) find_properties_file.return_value = tf1.name tf1.close() prop_name = 'name' prop_value = 'val' with open(tf1.name, 'w') as fout: fout.write(prop_name + '=' + prop_value) properties = get_ambari_properties() self.assertEqual(properties[prop_name], prop_value) self.assertEqual(properties.fileName, os.path.abspath(tf1.name)) sys.stdout = sys.__stdout__ pass @patch("os.path.exists") @patch("os.remove") @patch("ambari_commons.os_utils.print_warning_msg") def test_remove_file(self, printWarningMsgMock, removeMock, pathExistsMock): def side_effect(): raise Exception(-1, "Failed to delete!") removeMock.side_effect = side_effect pathExistsMock.return_value = 1 res = remove_file("/someNonExsistantDir/filename") self.assertEquals(res, 1) removeMock.side_effect = None res = remove_file("/someExsistantDir/filename") self.assertEquals(res, 0) @patch("shutil.copyfile") def test_copy_file(self, shutilCopyfileMock): def side_effect(): raise Exception(-1, "Failed to copy!") shutilCopyfileMock.side_effect = side_effect try: copy_file("/tmp/psswd", "/someNonExsistantDir/filename") self.fail("Exception on file not copied has not been thrown!") except FatalException: # Expected pass self.assertTrue(shutilCopyfileMock.called) shutilCopyfileMock.side_effect = None try: copy_file("/tmp/psswd", "/root/psswd") except FatalException: self.fail("Exception on file copied should not be thrown!") self.assertTrue(shutilCopyfileMock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("ambari_server.dbConfiguration_linux.get_ambari_properties") @patch("ambari_server.dbConfiguration_linux.print_error_msg") @patch("ambari_server.dbConfiguration.print_error_msg") @patch("ambari_server.dbConfiguration_linux.print_warning_msg") @patch("__builtin__.raw_input") @patch("glob.glob") @patch("os.path.isdir") @patch("os.path.lexists") @patch("os.remove") def test_ensure_jdbc_drivers_installed(self, os_remove_mock, lexists_mock, isdir_mock, glob_mock, raw_input_mock, print_warning_msg, print_error_msg_mock, print_error_msg_2_mock, get_ambari_properties_mock, get_ambari_properties_2_mock): out = StringIO.StringIO() sys.stdout = out def reset_mocks(): get_ambari_properties_mock.reset_mock() get_ambari_properties_2_mock.reset_mock() print_error_msg_mock.reset_mock() print_warning_msg.reset_mock() raw_input_mock.reset_mock() args = MagicMock() del args.database_index del args.persistence_type del args.silent del args.sid_or_sname del args.jdbc_url args.dbms = "oracle" return args # Check positive scenario drivers_list = [os.path.join(os.sep,'usr','share','java','ojdbc6.jar')] resources_dir = os.sep + 'tmp' props = Properties() props.process_pair(RESOURCES_DIR_PROPERTY, resources_dir) get_ambari_properties_2_mock.return_value = get_ambari_properties_mock.return_value = props factory = DBMSConfigFactory() args = reset_mocks() glob_mock.return_value = drivers_list isdir_mock.return_value = True lexists_mock.return_value = True dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) self.assertTrue(rcode) # Check negative scenarios # Silent option, no drivers set_silent(True) args = reset_mocks() glob_mock.return_value = [] failed = False try: dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) except FatalException: failed = True self.assertTrue(print_error_msg_mock.called) self.assertTrue(failed) # Non-Silent option, no drivers set_silent(False) args = reset_mocks() glob_mock.return_value = [] failed = False try: dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) except FatalException: failed = True self.assertTrue(failed) self.assertTrue(print_error_msg_mock.called) # Non-Silent option, no drivers at first ask, present drivers after that args = reset_mocks() glob_mock.side_effect = [[], drivers_list, drivers_list] dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) self.assertTrue(rcode) # Non-Silent option, no drivers at first ask, no drivers after that args = reset_mocks() glob_mock.side_effect = [[], []] failed = False try: dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) except FatalException: failed = True self.assertTrue(failed) self.assertTrue(print_error_msg_mock.called) # Failed to copy_files args = reset_mocks() glob_mock.side_effect = [[], drivers_list, drivers_list] try: dbms = factory.create(args, props) rcode = dbms.ensure_jdbc_driver_installed(props) except FatalException: failed = True self.assertTrue(failed) sys.stdout = sys.__stdout__ pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.dbConfiguration.get_ambari_properties") @patch("os.path.isdir") @patch("os.path.isfile") @patch("os.path.lexists") @patch("os.remove") @patch("os.symlink") def test_check_jdbc_drivers(self, os_symlink_mock, os_remove_mock, lexists_mock, isfile_mock, isdir_mock, get_ambari_properties_mock): args = MagicMock() # Check positive scenario drivers_list = [os.path.join(os.sep,'usr','share','java','ojdbc6.jar')] resources_dir = os.sep + 'tmp' props = Properties() props.process_pair(RESOURCES_DIR_PROPERTY, resources_dir) get_ambari_properties_mock.return_value = props isdir_mock.return_value = True isfile_mock.side_effect = [True, False, False, False, False] del args.database_index del args.persistence_type del args.silent del args.sid_or_sname del args.jdbc_url lexists_mock.return_value = True check_jdbc_drivers(args) self.assertEquals(os_symlink_mock.call_count, 1) self.assertEquals(os_symlink_mock.call_args_list[0][0][0], os.path.join(os.sep,'tmp','ojdbc6.jar')) self.assertEquals(os_symlink_mock.call_args_list[0][0][1], os.path.join(os.sep,'tmp','oracle-jdbc-driver.jar')) # Check negative scenarios # No drivers deployed get_ambari_properties_mock.reset_mock() os_symlink_mock.reset_mock() isfile_mock.side_effect = [False, False, False, False, False] check_jdbc_drivers(args) self.assertFalse(os_symlink_mock.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.find_properties_file") def test_get_ambari_properties(self, find_properties_file_mock): find_properties_file_mock.return_value = None rcode = get_ambari_properties() self.assertEqual(rcode, -1) tf1 = tempfile.NamedTemporaryFile() find_properties_file_mock.return_value = tf1.name prop_name = 'name' prop_value = 'val' with open(tf1.name, 'w') as fout: fout.write(prop_name + '=' + prop_value) fout.close() properties = get_ambari_properties() self.assertEqual(properties[prop_name], prop_value) self.assertEqual(properties.fileName, os.path.abspath(tf1.name)) sys.stdout = sys.__stdout__ pass @only_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.find_properties_file") def test_get_ambari_properties(self, find_properties_file_mock): find_properties_file_mock.return_value = None rcode = get_ambari_properties() self.assertEqual(rcode, -1) tf1 = tempfile.NamedTemporaryFile(delete=False) find_properties_file_mock.return_value = tf1.name prop_name = 'name' prop_value = 'val' tf1.close() with open(tf1.name, 'w') as fout: fout.write(prop_name + '=' + prop_value) fout.close() properties = get_ambari_properties() self.assertEqual(properties[prop_name], prop_value) self.assertEqual(properties.fileName, os.path.abspath(tf1.name)) sys.stdout = sys.__stdout__ pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.check_database_name_property") @patch("ambari_server.serverConfiguration.find_properties_file") def test_parse_properties_file(self, find_properties_file_mock, check_database_name_property_mock): check_database_name_property_mock.return_value = 1 tf1 = tempfile.NamedTemporaryFile(mode='r') find_properties_file_mock.return_value = tf1.name args = MagicMock() parse_properties_file(args) self.assertEquals(args.persistence_type, "local") with open(tf1.name, 'w') as fout: fout.write("\n") fout.write(PERSISTENCE_TYPE_PROPERTY + "=remote") args = MagicMock() parse_properties_file(args) self.assertEquals(args.persistence_type, "remote") pass @not_for_platform(PLATFORM_WINDOWS) @patch("os.path.isabs") @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.dbConfiguration_linux.get_ambari_properties") def test_configure_database_username_password_masterkey_persisted(self, get_ambari_properties_method, decrypt_password_for_alias_method, path_isabs_method): out = StringIO.StringIO() sys.stdout = out properties = Properties() properties.process_pair(JDBC_USER_NAME_PROPERTY, "fakeuser") properties.process_pair(JDBC_PASSWORD_PROPERTY, "${alias=somealias}") properties.process_pair(JDBC_DATABASE_NAME_PROPERTY, "fakedbname") properties.process_pair(SECURITY_KEY_IS_PERSISTED, "True") get_ambari_properties_method.return_value = properties decrypt_password_for_alias_method.return_value = "falepasswd" args = MagicMock() args.master_key = None del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.sid_or_sname del args.jdbc_url dbms = OracleConfig(args, properties, "local") self.assertTrue(decrypt_password_for_alias_method.called) self.assertEquals("fakeuser", dbms.database_username) self.assertEquals("falepasswd", dbms.database_password) sys.stdout = sys.__stdout__ pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration_linux.read_password") def test_configure_database_password(self, read_password_method): out = StringIO.StringIO() sys.stdout = out read_password_method.return_value = "fakepasswd" result = LinuxDBMSConfig._configure_database_password(True) self.assertTrue(read_password_method.called) self.assertEquals("fakepasswd", result) result = LinuxDBMSConfig._configure_database_password(True) self.assertEquals("fakepasswd", result) result = LinuxDBMSConfig._configure_database_password(True) self.assertEquals("fakepasswd", result) sys.stdout = sys.__stdout__ pass @not_for_platform(PLATFORM_WINDOWS) def test_configure_database_password_silent(self): out = StringIO.StringIO() sys.stdout = out set_silent(True) result = LinuxDBMSConfig._configure_database_password(True, "CustomDefaultPasswd") self.assertEquals("CustomDefaultPasswd", result) sys.stdout = sys.__stdout__ pass @patch("os.path.exists") @patch("ambari_server.setupSecurity.get_is_secure") @patch("ambari_server.setupSecurity.get_is_persisted") @patch("ambari_server.setupSecurity.remove_password_file") @patch("ambari_server.setupSecurity.save_passwd_for_alias") @patch("ambari_server.setupSecurity.read_master_key") @patch("ambari_server.setupSecurity.read_ambari_user") @patch("ambari_server.setupSecurity.get_master_key_location") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.save_master_key") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.setupSecurity.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") def test_setup_master_key_not_persist(self, is_root_method, get_ambari_properties_method, search_file_message, get_YN_input_method, save_master_key_method, update_properties_method, get_master_key_location_method, read_ambari_user_method, read_master_key_method, save_passwd_for_alias_method, remove_password_file_method, get_is_persisted_method, get_is_secure_method, exists_mock): is_root_method.return_value = True p = Properties() FAKE_PWD_STRING = "fakepasswd" p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING) get_ambari_properties_method.return_value = p read_master_key_method.return_value = "aaa" get_YN_input_method.return_value = False read_ambari_user_method.return_value = None save_passwd_for_alias_method.return_value = 0 get_is_persisted_method.return_value = (True, "filepath") get_is_secure_method.return_value = False exists_mock.return_value = False options = self._create_empty_options_mock() setup_master_key(options) self.assertTrue(get_YN_input_method.called) self.assertTrue(read_master_key_method.called) self.assertTrue(read_ambari_user_method.called) self.assertTrue(update_properties_method.called) self.assertFalse(save_master_key_method.called) self.assertTrue(save_passwd_for_alias_method.called) self.assertEquals(3, save_passwd_for_alias_method.call_count) self.assertTrue(remove_password_file_method.called) result_expected = {JDBC_PASSWORD_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), JDBC_RCA_PASSWORD_FILE_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), LDAP_MGR_PASSWORD_PROPERTY: get_alias_string(LDAP_MGR_PASSWORD_ALIAS), SSL_TRUSTSTORE_PASSWORD_PROPERTY: get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS), SECURITY_IS_ENCRYPTION_ENABLED: 'true'} sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) pass @patch("ambari_server.setupSecurity.save_passwd_for_alias") @patch("os.path.exists") @patch("ambari_server.setupSecurity.get_is_secure") @patch("ambari_server.setupSecurity.get_is_persisted") @patch("ambari_server.setupSecurity.read_master_key") @patch("ambari_server.setupSecurity.read_ambari_user") @patch("ambari_server.setupSecurity.get_master_key_location") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.save_master_key") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.serverConfiguration.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") def test_setup_master_key_persist(self, is_root_method, get_ambari_properties_method, search_file_message, get_YN_input_method, save_master_key_method, update_properties_method, get_master_key_location_method, read_ambari_user_method, read_master_key_method, get_is_persisted_method, get_is_secure_method, exists_mock, save_passwd_for_alias_method): is_root_method.return_value = True p = Properties() FAKE_PWD_STRING = "fakepasswd" p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING) get_ambari_properties_method.return_value = p search_file_message.return_value = "propertiesfile" read_master_key_method.return_value = "aaa" get_YN_input_method.side_effect = [True, False] read_ambari_user_method.return_value = None get_is_persisted_method.return_value = (True, "filepath") get_is_secure_method.return_value = False exists_mock.return_value = False save_passwd_for_alias_method.return_value = 0 options = self._create_empty_options_mock() setup_master_key(options) self.assertTrue(get_YN_input_method.called) self.assertTrue(read_master_key_method.called) self.assertTrue(read_ambari_user_method.called) self.assertTrue(update_properties_method.called) self.assertTrue(save_master_key_method.called) result_expected = {JDBC_PASSWORD_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), SECURITY_IS_ENCRYPTION_ENABLED: 'true'} sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) pass @patch("ambari_server.setupSecurity.read_master_key") @patch("ambari_server.setupSecurity.remove_password_file") @patch("os.path.exists") @patch("ambari_server.setupSecurity.read_ambari_user") @patch("ambari_server.setupSecurity.get_master_key_location") @patch("ambari_server.setupSecurity.save_passwd_for_alias") @patch("ambari_server.setupSecurity.read_passwd_for_alias") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.save_master_key") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.setupSecurity.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") def test_reset_master_key_persisted(self, is_root_method, get_ambari_properties_method, search_file_message, get_YN_input_method, get_validated_string_input_method, save_master_key_method, update_properties_method, read_passwd_for_alias_method, save_passwd_for_alias_method, get_master_key_location_method, read_ambari_user_method, exists_mock, remove_password_file_method, read_master_key_method): # Testing call under root is_root_method.return_value = True search_file_message.return_value = "filepath" read_ambari_user_method.return_value = None p = Properties() FAKE_PWD_STRING = '${alias=fakealias}' p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING) get_ambari_properties_method.return_value = p get_YN_input_method.side_effect = [True, True] read_master_key_method.return_value = "aaa" read_passwd_for_alias_method.return_value = "fakepassword" save_passwd_for_alias_method.return_value = 0 exists_mock.return_value = False options = self._create_empty_options_mock() setup_master_key(options) self.assertTrue(save_master_key_method.called) self.assertTrue(get_YN_input_method.called) self.assertTrue(read_master_key_method.called) self.assertTrue(update_properties_method.called) self.assertTrue(read_passwd_for_alias_method.called) self.assertTrue(3, read_passwd_for_alias_method.call_count) self.assertTrue(3, save_passwd_for_alias_method.call_count) result_expected = {JDBC_PASSWORD_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), JDBC_RCA_PASSWORD_FILE_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), LDAP_MGR_PASSWORD_PROPERTY: get_alias_string(LDAP_MGR_PASSWORD_ALIAS), SSL_TRUSTSTORE_PASSWORD_PROPERTY: get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS), SECURITY_IS_ENCRYPTION_ENABLED: 'true'} sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) pass @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12')) @patch("ambari_server.serverUtils.is_server_runing") @patch("ambari_commons.os_utils.run_os_command") @patch("ambari_server.setupSecurity.generate_env") @patch("ambari_server.setupSecurity.ensure_can_start_under_current_user") @patch("ambari_server.serverConfiguration.read_ambari_user") @patch("ambari_server.dbConfiguration.ensure_jdbc_driver_is_installed") @patch("ambari_server.serverConfiguration.parse_properties_file") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_java_exe_path") @patch("sys.exit") def test_check_database(self, exitMock, getJavaExePathMock, getAmbariPropertiesMock, parsePropertiesFileMock, ensureDriverInstalledMock, readAmbariUserMock, ensureCanStartUnderCurrentUserMock, generateEnvMock, runOSCommandMock, isServerRunningMock): properties = Properties() properties.process_pair("server.jdbc.database", "embedded") getJavaExePathMock.return_value = "/path/to/java" getAmbariPropertiesMock.return_value = properties readAmbariUserMock.return_value = "test_user" ensureCanStartUnderCurrentUserMock.return_value = "test_user" generateEnvMock.return_value = {} runOSCommandMock.return_value = (0, "", "") isServerRunningMock.return_value = (False, 1) check_database(properties) self.assertTrue(getJavaExePathMock.called) self.assertTrue(readAmbariUserMock.called) self.assertTrue(ensureCanStartUnderCurrentUserMock.called) self.assertTrue(generateEnvMock.called) self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 org.apache.ambari.server.checks.DatabaseConsistencyChecker') pass @patch("ambari_server.setupSecurity.get_is_persisted") @patch("ambari_server.setupSecurity.get_is_secure") @patch("ambari_server.setupSecurity.remove_password_file") @patch("os.path.exists") @patch("ambari_server.setupSecurity.read_ambari_user") @patch("ambari_server.setupSecurity.get_master_key_location") @patch("ambari_server.setupSecurity.save_passwd_for_alias") @patch("ambari_server.setupSecurity.read_passwd_for_alias") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.save_master_key") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.setupSecurity.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") def test_reset_master_key_not_persisted(self, is_root_method, get_ambari_properties_method, search_file_message, get_YN_input_method, get_validated_string_input_method, save_master_key_method, update_properties_method, read_passwd_for_alias_method, save_passwd_for_alias_method, get_master_key_location_method, read_ambari_user_method, exists_mock, remove_password_file_method, get_is_secure_method, get_is_persisted_method): is_root_method.return_value = True search_file_message.return_value = False read_ambari_user_method.return_value = None p = Properties() FAKE_PWD_STRING = '${alias=fakealias}' p.process_pair(JDBC_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(LDAP_MGR_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(SSL_TRUSTSTORE_PASSWORD_PROPERTY, FAKE_PWD_STRING) p.process_pair(JDBC_RCA_PASSWORD_FILE_PROPERTY, FAKE_PWD_STRING) get_ambari_properties_method.return_value = p get_YN_input_method.side_effect = [True, False] get_validated_string_input_method.return_value = "aaa" read_passwd_for_alias_method.return_value = "fakepassword" save_passwd_for_alias_method.return_value = 0 exists_mock.return_value = False get_is_secure_method.return_value = True get_is_persisted_method.return_value = (True, "filePath") options = self._create_empty_options_mock() setup_master_key(options) self.assertFalse(save_master_key_method.called) self.assertTrue(get_YN_input_method.called) self.assertTrue(get_validated_string_input_method.called) self.assertTrue(update_properties_method.called) self.assertTrue(read_passwd_for_alias_method.called) self.assertTrue(3, read_passwd_for_alias_method.call_count) self.assertTrue(3, save_passwd_for_alias_method.call_count) self.assertFalse(save_master_key_method.called) result_expected = {JDBC_PASSWORD_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), JDBC_RCA_PASSWORD_FILE_PROPERTY: get_alias_string(JDBC_RCA_PASSWORD_ALIAS), LDAP_MGR_PASSWORD_PROPERTY: get_alias_string(LDAP_MGR_PASSWORD_ALIAS), SSL_TRUSTSTORE_PASSWORD_PROPERTY: get_alias_string(SSL_TRUSTSTORE_PASSWORD_ALIAS), SECURITY_IS_ENCRYPTION_ENABLED: 'true'} sorted_x = sorted(result_expected.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) pass @staticmethod @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY) def _init_test_ldap_properties_map_invalid_input_1(): ldap_properties_map = \ { LDAP_PRIMARY_URL_PROPERTY: "a:3", "authentication.ldap.secondaryUrl": "b:2", "authentication.ldap.useSSL": "false", "authentication.ldap.usernameAttribute": "user", "authentication.ldap.baseDn": "uid", "authentication.ldap.bindAnonymously": "true", "ldap.sync.username.collision.behavior": "skip", "authentication.ldap.referral": "follow", "client.security": "ldap", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @staticmethod @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT) def _init_test_ldap_properties_map_invalid_input_1(): ldap_properties_map = \ { LDAP_PRIMARY_URL_PROPERTY: "a:3", "authentication.ldap.secondaryUrl": "b:2", "authentication.ldap.useSSL": "false", "authentication.ldap.userObjectClass": "user", "authentication.ldap.usernameAttribute": "uid", "authentication.ldap.groupObjectClass": "group", "authentication.ldap.groupNamingAttr": "cn", "authentication.ldap.groupMembershipAttr": "member", "authentication.ldap.dnAttribute": "dn", "authentication.ldap.baseDn": "base", "authentication.ldap.referral": "follow", "authentication.ldap.bindAnonymously": "true", "ldap.sync.username.collision.behavior": "skip", "client.security": "ldap", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @staticmethod @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY) def _init_test_ldap_properties_map_invalid_input_2(): ldap_properties_map = \ { LDAP_PRIMARY_URL_PROPERTY: "a:3", "authentication.ldap.useSSL": "false", "authentication.ldap.usernameAttribute": "user", "authentication.ldap.baseDn": "uid", "authentication.ldap.bindAnonymously": "true", "authentication.ldap.referral": "follow", "client.security": "ldap", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @staticmethod @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT) def _init_test_ldap_properties_map_invalid_input_2(): ldap_properties_map = \ { LDAP_PRIMARY_URL_PROPERTY: "a:3", "authentication.ldap.useSSL": "false", "authentication.ldap.userObjectClass": "user", "authentication.ldap.usernameAttribute": "uid", "authentication.ldap.groupObjectClass": "group", "authentication.ldap.groupNamingAttr": "cn", "authentication.ldap.groupMembershipAttr": "member", "authentication.ldap.dnAttribute": "dn", "authentication.ldap.baseDn": "base", "authentication.ldap.referral": "follow", "authentication.ldap.bindAnonymously": "true", "ldap.sync.username.collision.behavior": "skip", "client.security": "ldap", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("__builtin__.raw_input") @patch("ambari_server.setupSecurity.get_is_secure") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_setup_ldap_invalid_input(self, logger_mock, is_root_method, get_ambari_properties_method, search_file_message, update_properties_method, get_YN_input_method, get_is_secure_method, raw_input_mock): out = StringIO.StringIO() sys.stdout = out is_root_method.return_value = True search_file_message.return_value = "filepath" configs = {SECURITY_MASTER_KEY_LOCATION: "filepath", SECURITY_KEYS_DIR: tempfile.gettempdir(), SECURITY_IS_ENCRYPTION_ENABLED: "true" } get_ambari_properties_method.return_value = configs raw_input_mock.side_effect = ['a:3', 'b:b', 'hody', 'b:2', 'false', 'user', 'uid', 'group', 'cn', 'member', 'dn', 'base', 'follow', 'true', 'skip'] set_silent(False) get_YN_input_method.return_value = True options = self._create_empty_options_mock() setup_ldap(options) ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map_invalid_input_1() sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) self.assertTrue(get_YN_input_method.called) self.assertEquals(15, raw_input_mock.call_count) raw_input_mock.reset_mock() raw_input_mock.side_effect = ['a:3', '', 'b:2', 'false', 'user', 'uid', 'group', 'cn', 'member', 'dn', 'base', 'follow', 'true', 'skip'] setup_ldap(options) ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map_invalid_input_2() sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) self.assertEquals(14, raw_input_mock.call_count) sys.stdout = sys.__stdout__ pass @staticmethod @OsFamilyFuncImpl(OSConst.WINSRV_FAMILY) def _init_test_ldap_properties_map(): ldap_properties_map = \ { "authentication.ldap.primaryUrl": "test", "authentication.ldap.secondaryUrl": "test", "authentication.ldap.useSSL": "false", "authentication.ldap.usernameAttribute": "test", "authentication.ldap.baseDn": "test", "authentication.ldap.bindAnonymously": "false", "ldap.sync.username.collision.behavior": "skip", "authentication.ldap.managerDn": "test", "authentication.ldap.referral": "test", "client.security": "ldap", LDAP_MGR_PASSWORD_PROPERTY: "ldap-password.dat", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @staticmethod @OsFamilyFuncImpl(OsFamilyImpl.DEFAULT) def _init_test_ldap_properties_map(): ldap_properties_map = \ { "authentication.ldap.primaryUrl": "test", "authentication.ldap.secondaryUrl": "test", "authentication.ldap.useSSL": "false", "authentication.ldap.userObjectClass": "test", "authentication.ldap.usernameAttribute": "test", "authentication.ldap.baseDn": "test", "authentication.ldap.bindAnonymously": "false", "ldap.sync.username.collision.behavior": "skip", "authentication.ldap.managerDn": "test", "authentication.ldap.groupObjectClass": "test", "authentication.ldap.groupMembershipAttr": "test", "authentication.ldap.groupNamingAttr": "test", "authentication.ldap.dnAttribute": "test", "authentication.ldap.referral": "test", "client.security": "ldap", LDAP_MGR_PASSWORD_PROPERTY: "ldap-password.dat", "ambari.ldap.isConfigured": "true" } return ldap_properties_map @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.setupSecurity.get_is_secure") @patch("ambari_server.setupSecurity.encrypt_password") @patch("ambari_server.setupSecurity.save_passwd_for_alias") @patch("ambari_server.setupSecurity.get_YN_input") @patch("ambari_server.setupSecurity.update_properties_2") @patch("ambari_server.setupSecurity.configure_ldap_password") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.serverConfiguration.search_file") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.read_password") @patch("os.path.exists") @patch("ambari_server.setupSecurity.logger") def test_setup_ldap(self, logger_mock, exists_method, read_password_method, is_root_method, get_ambari_properties_method, search_file_message, get_validated_string_input_method, configure_ldap_password_method, update_properties_method, get_YN_input_method, save_passwd_for_alias_method, encrypt_password_method, get_is_secure_method): out = StringIO.StringIO() sys.stdout = out options = self._create_empty_options_mock() # Testing call under non-root is_root_method.return_value = False try: setup_ldap(options) self.fail("Should throw exception") except FatalException as fe: # Expected self.assertTrue("root-level" in fe.reason) pass # Testing call under root is_root_method.return_value = True search_file_message.return_value = "filepath" configs = {SECURITY_MASTER_KEY_LOCATION: "filepath", SECURITY_KEYS_DIR: tempfile.gettempdir(), SECURITY_IS_ENCRYPTION_ENABLED: "true" } get_ambari_properties_method.return_value = configs configure_ldap_password_method.return_value = "password" save_passwd_for_alias_method.return_value = 0 encrypt_password_method.return_value = get_alias_string(LDAP_MGR_PASSWORD_ALIAS) def yn_input_side_effect(*args, **kwargs): if 'TrustStore' in args[0]: return False else: return True get_YN_input_method.side_effect = [True, ] def valid_input_side_effect(*args, **kwargs): if 'Bind anonymously' in args[0]: return 'false' if 'username collisions' in args[0]: return 'skip' if args[1] == "true" or args[1] == "false": return args[1] else: return "test" get_validated_string_input_method.side_effect = valid_input_side_effect setup_ldap(options) ldap_properties_map = TestAmbariServer._init_test_ldap_properties_map() sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) self.assertEquals(sorted_x, sorted_y) self.assertTrue(update_properties_method.called) self.assertTrue(configure_ldap_password_method.called) self.assertTrue(get_validated_string_input_method.called) self.assertTrue(get_YN_input_method.called) # truststore not found case def os_path_exists(*args, **kwargs): if "bogus" in args[0]: return False else: return True pass def input_enable_ssl(*args, **kwargs): if 'Bind anonymously' in args[0]: return 'false' if "SSL" in args[0]: return "true" if "Path to TrustStore file" in args[0]: if input_enable_ssl.path_counter < 2: input_enable_ssl.path_counter += 1 return "bogus" else: return "valid" if args[1] == "true" or args[1] == "false": return args[1] else: return "test" pass input_enable_ssl.path_counter = 0 exists_method.side_effect = os_path_exists get_validated_string_input_method.side_effect = input_enable_ssl read_password_method.return_value = "password" get_YN_input_method.reset_mock() get_YN_input_method.side_effect = [True, True] update_properties_method.reset_mock() options.ldap_url = None options.ldap_member_attr = None setup_ldap(options) self.assertTrue(read_password_method.called) ldap_properties_map = \ { "authentication.ldap.primaryUrl": "test", "authentication.ldap.secondaryUrl": "test", "authentication.ldap.useSSL": "true", "authentication.ldap.usernameAttribute": "test", "authentication.ldap.baseDn": "test", "authentication.ldap.dnAttribute": "test", "authentication.ldap.bindAnonymously": "false", "ldap.sync.username.collision.behavior": "skip", "authentication.ldap.managerDn": "test", "client.security": "ldap", "ssl.trustStore.type": "test", "ssl.trustStore.path": "valid", "ssl.trustStore.password": "password", LDAP_MGR_PASSWORD_PROPERTY: get_alias_string(LDAP_MGR_PASSWORD_ALIAS) } sorted_x = sorted(ldap_properties_map.iteritems(), key=operator.itemgetter(0)) sorted_y = sorted(update_properties_method.call_args[0][1].iteritems(), key=operator.itemgetter(0)) sys.stdout = sys.__stdout__ pass @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_all(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock): is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') properties.process_pair(CLIENT_API_PORT_PROPERTY, '8080') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = True options.ldap_sync_existing = False sync_ldap(options) url = '{0}://{1}:{2!s}{3}'.format('http', '127.0.0.1', '8080', '/api/v1/ldap_sync_events') request = urlopen_mock.call_args_list[0][0][0] self.assertEquals(url, str(request.get_full_url())) self.assertEquals('[{"Event": {"specs": [{"principal_type": "users", "sync_type": "all"}, {"principal_type": "groups", "sync_type": "all"}]}}]', request.data) self.assertTrue(response.getcode.called) self.assertTrue(response.read.called) pass @patch("__builtin__.open") @patch("os.path.exists") @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_users(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock, os_path_exists_mock, open_mock): os_path_exists_mock.return_value = 1 f = MagicMock() f.__enter__().read.return_value = "bob, tom" open_mock.return_value = f is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = False options.ldap_sync_existing = False options.ldap_sync_users = 'users.txt' options.ldap_sync_groups = None sync_ldap(options) request = urlopen_mock.call_args_list[0][0][0] self.assertEquals('[{"Event": {"specs": [{"principal_type": "users", "sync_type": "specific", "names": "bob, tom"}]}}]', request.data) self.assertTrue(response.getcode.called) self.assertTrue(response.read.called) pass @patch("__builtin__.open") @patch("os.path.exists") @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_groups(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock, os_path_exists_mock, open_mock): os_path_exists_mock.return_value = 1 f = MagicMock() f.__enter__().read.return_value = "group1, group2" open_mock.return_value = f is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = False options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = 'groups.txt' sync_ldap(options) request = urlopen_mock.call_args_list[0][0][0] self.assertEquals('[{"Event": {"specs": [{"principal_type": "groups", "sync_type": "specific", "names": "group1, group2"}]}}]', request.data) self.assertTrue(response.getcode.called) self.assertTrue(response.read.called) pass @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_ssl(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock): is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') properties.process_pair(SSL_API, 'true') properties.process_pair(SSL_API_PORT, '8443') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "https://c6401.ambari.apache.org:8443/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = True options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = None sync_ldap(options) url = '{0}://{1}:{2!s}{3}'.format('https', '127.0.0.1', '8443', '/api/v1/ldap_sync_events') request = urlopen_mock.call_args_list[0][0][0] self.assertEquals(url, str(request.get_full_url())) self.assertTrue(response.getcode.called) self.assertTrue(response.read.called) pass @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_existing(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock): is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = False options.ldap_sync_existing = True options.ldap_sync_users = None options.ldap_sync_groups = None sync_ldap(options) self.assertTrue(response.getcode.called) self.assertTrue(response.read.called) pass @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_no_sync_mode(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock): is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "RUNNING","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}', '{"Event":{"status" : "COMPLETE","summary" : {"groups" : {"created" : 1,"removed" : 0,"updated" : 0},"memberships" : {"created" : 5,"removed" : 0},"users" : {"created" : 5,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() del options.ldap_sync_all del options.ldap_sync_existing del options.ldap_sync_users del options.ldap_sync_groups try: sync_ldap(options) self.fail("Should fail with exception") except FatalException as e: pass pass @patch("urllib2.urlopen") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.logger") def test_ldap_sync_error_status(self, logger_mock, is_root_method, is_server_runing_mock, get_ambari_properties_mock, get_validated_string_input_mock, urlopen_mock): is_root_method.return_value = True is_server_runing_mock.return_value = (True, 0) properties = Properties() properties.process_pair(IS_LDAP_CONFIGURED, 'true') get_ambari_properties_mock.return_value = properties get_validated_string_input_mock.side_effect = ['admin', 'admin'] response = MagicMock() response.getcode.side_effect = [201, 200] response.read.side_effect = ['{"resources" : [{"href" : "http://c6401.ambari.apache.org:8080/api/v1/ldap_sync_events/16","Event" : {"id" : 16}}]}', '{"Event":{"status" : "ERROR","status_detail" : "Error!!","summary" : {"groups" : {"created" : 0,"removed" : 0,"updated" : 0},"memberships" : {"created" : 0,"removed" : 0},"users" : {"created" : 0,"removed" : 0,"updated" : 0}}}}'] urlopen_mock.return_value = response options = self._create_empty_options_mock() options.ldap_sync_all = False options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = None try: sync_ldap(options) self.fail("Should fail with exception") except FatalException as e: pass pass @patch("urllib2.urlopen") @patch("urllib2.Request") @patch("base64.encodestring") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.get_validated_string_input") @patch("ambari_server.setupSecurity.logger") def test_sync_ldap_forbidden(self, logger_mock, get_validated_string_input_method, get_ambari_properties_method, is_server_runing_method, encodestring_method, request_constructor, urlopen_method): options = self._create_empty_options_mock() options.ldap_sync_all = True options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = None is_server_runing_method.return_value = (None, None) try: sync_ldap(options) self.fail("Should throw exception if ambari is stopped") except FatalException as fe: # Expected self.assertTrue("not running" in fe.reason) pass is_server_runing_method.return_value = (True, None) configs = MagicMock() configs.get_property.return_value = None get_ambari_properties_method.return_value = configs try: sync_ldap(options) self.fail("Should throw exception if ldap is not configured") except FatalException as fe: # Expected self.assertTrue("not configured" in fe.reason) pass configs.get_property.return_value = 'true' get_validated_string_input_method.return_value = 'admin' encodestring_method.return_value = 'qwe123' requestMocks = [MagicMock()] request_constructor.side_effect = requestMocks response = MagicMock() response.getcode.return_value = 403 urlopen_method.return_value = response try: sync_ldap(options) self.fail("Should throw exception if return code != 200") except FatalException as fe: # Expected self.assertTrue("status code" in fe.reason) pass pass @patch("ambari_server.setupSecurity.is_root") def test_sync_ldap_ambari_stopped(self, is_root_method): is_root_method.return_value = False options = self._create_empty_options_mock() options.ldap_sync_all = True options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = None try: sync_ldap(options) self.fail("Should throw exception if not root") except FatalException as fe: # Expected self.assertTrue("root-level" in fe.reason) pass pass @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.logger") def test_sync_ldap_ambari_stopped(self, logger_mock, is_server_runing_method, is_root_method): is_root_method.return_value = True is_server_runing_method.return_value = (None, None) options = self._create_empty_options_mock() options.ldap_sync_all = True options.ldap_sync_existing = False options.ldap_sync_users = None options.ldap_sync_groups = None try: sync_ldap(options) self.fail("Should throw exception if ambari is stopped") except FatalException as fe: # Expected self.assertTrue("not running" in fe.reason) pass pass @patch("ambari_server.setupSecurity.is_root") @patch("ambari_server.setupSecurity.is_server_runing") @patch("ambari_server.setupSecurity.get_ambari_properties") @patch("ambari_server.setupSecurity.logger") def test_sync_ldap_not_configured(self, logger_mock, get_ambari_properties_method, is_server_runing_method, is_root_method): is_root_method.return_value = True is_server_runing_method.return_value = (True, None) configs = MagicMock() configs.get_property.return_value = None get_ambari_properties_method.return_value = configs options = self._create_empty_options_mock() options.ldap_sync_all = True del options.ldap_sync_existing del options.ldap_sync_users del options.ldap_sync_groups try: sync_ldap(options) self.fail("Should throw exception if ldap is not configured") except FatalException as fe: # Expected self.assertTrue("not configured" in fe.reason) pass pass @patch("__builtin__.open") @patch("os.path.exists") def test_get_ldap_event_spec_names(self, os_path_exists_mock, open_mock): os_path_exists_mock.return_value = 1 f = MagicMock() f.__enter__().read.return_value = "\n\n\t some group, \tanother group, \n\t\tgrp, \ngroup*\n\n\n\n" open_mock.return_value = f bodies = [{"Event":{"specs":[]}}] body = bodies[0] events = body['Event'] specs = events['specs'] new_specs = [{"principal_type":"groups","sync_type":"specific","names":""}] get_ldap_event_spec_names("groups.txt", specs, new_specs) self.assertEquals("[{'Event': {'specs': [{'principal_type': 'groups', 'sync_type': 'specific', 'names': ' some group, another group, grp, group*'}]}}]", str(bodies)) pass @patch("ambari_server.setupSecurity.read_password") def test_configure_ldap_password(self, read_password_method): out = StringIO.StringIO() sys.stdout = out read_password_method.return_value = "blah" options = self._create_empty_options_mock() configure_ldap_password(options) self.assertTrue(read_password_method.called) sys.stdout = sys.__stdout__ pass @patch("ambari_server.userInput.get_validated_string_input") def test_read_password(self, get_validated_string_input_method): out = StringIO.StringIO() sys.stdout = out passwordDefault = "" passwordPrompt = 'Enter Manager Password* : ' passwordPattern = ".*" passwordDescr = "Invalid characters in password." get_validated_string_input_method.side_effect = ['', 'aaa', 'aaa'] password = read_password(passwordDefault, passwordPattern, passwordPrompt, passwordDescr) self.assertTrue(3, get_validated_string_input_method.call_count) self.assertEquals('aaa', password) get_validated_string_input_method.reset_mock() get_validated_string_input_method.side_effect = ['aaa', 'aaa'] password = read_password(passwordDefault, passwordPattern, passwordPrompt, passwordDescr) self.assertTrue(2, get_validated_string_input_method.call_count) self.assertEquals('aaa', password) get_validated_string_input_method.reset_mock() get_validated_string_input_method.side_effect = ['aaa'] password = read_password('aaa', passwordPattern, passwordPrompt, passwordDescr) self.assertTrue(1, get_validated_string_input_method.call_count) self.assertEquals('aaa', password) sys.stdout = sys.__stdout__ pass def test_generate_random_string(self): random_str_len = 100 str1 = generate_random_string(random_str_len) self.assertTrue(len(str1) == random_str_len) str2 = generate_random_string(random_str_len) self.assertTrue(str1 != str2) pass @patch("__builtin__.open") @patch("ambari_server.serverConfiguration.search_file") @patch("ambari_server.serverConfiguration.backup_file_in_temp") def test_update_properties_2(self, backup_file_in_temp_mock, search_file_mock, open_mock): conf_file = "ambari.properties" propertyMap = {"1": "1", "2": "2"} properties = MagicMock() f = MagicMock(name="file") search_file_mock.return_value = conf_file open_mock.return_value = f update_properties_2(properties, propertyMap) properties.store_ordered.assert_called_with(f.__enter__.return_value) backup_file_in_temp_mock.assert_called_with(conf_file) self.assertEquals(2, properties.removeOldProp.call_count) self.assertEquals(2, properties.process_pair.call_count) properties = MagicMock() backup_file_in_temp_mock.reset_mock() open_mock.reset_mock() update_properties_2(properties, None) properties.store_ordered.assert_called_with(f.__enter__.return_value) backup_file_in_temp_mock.assert_called_with(conf_file) self.assertFalse(properties.removeOldProp.called) self.assertFalse(properties.process_pair.called) pass def test_regexps(self): res = re.search(REGEX_HOSTNAME_PORT, "") self.assertTrue(res is None) res = re.search(REGEX_HOSTNAME_PORT, "ddd") self.assertTrue(res is None) res = re.search(REGEX_HOSTNAME_PORT, "gg:ff") self.assertTrue(res is None) res = re.search(REGEX_HOSTNAME_PORT, "gg:55444325") self.assertTrue(res is None) res = re.search(REGEX_HOSTNAME_PORT, "gg:555") self.assertTrue(res is not None) res = re.search(REGEX_TRUE_FALSE, "") self.assertTrue(res is not None) res = re.search(REGEX_TRUE_FALSE, "t") self.assertTrue(res is None) res = re.search(REGEX_TRUE_FALSE, "trrrr") self.assertTrue(res is None) res = re.search(REGEX_TRUE_FALSE, "true|false") self.assertTrue(res is None) res = re.search(REGEX_TRUE_FALSE, "true") self.assertTrue(res is not None) res = re.search(REGEX_TRUE_FALSE, "false") self.assertTrue(res is not None) res = re.search(REGEX_ANYTHING, "") self.assertTrue(res is not None) res = re.search(REGEX_ANYTHING, "t") self.assertTrue(res is not None) res = re.search(REGEX_ANYTHING, "trrrr") self.assertTrue(res is not None) pass def get_sample(self, sample): """ Returns sample file content as string with normalized line endings """ path = self.get_samples_dir(sample) return self.get_file_string(path) def get_file_string(self, file): """ Returns file content as string with normalized line endings """ string = open(file, 'r').read() return self.normalize(string) def normalize(self, string): """ Normalizes line ending in string according to platform-default encoding """ return string.replace("\n", os.linesep) def get_samples_dir(self, sample): """ Returns full file path by sample name """ testdir = os.path.dirname(__file__) return os.path.dirname(testdir) + os.sep + "resources" + os.sep \ + 'TestAmbaryServer.samples/' + sample @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.dbConfiguration_linux.get_ambari_properties") def test_is_jdbc_user_changed(self, get_ambari_properties_mock): previous_user = "previous_user" new_user = "new_user" props = Properties() props.process_pair(JDBC_USER_NAME_PROPERTY, previous_user) get_ambari_properties_mock.return_value = props #check if users are different result = PGConfig._is_jdbc_user_changed(new_user) self.assertTrue(result) #check if users are equal result = PGConfig._is_jdbc_user_changed(previous_user) self.assertFalse(result) #check if one of users is None result = PGConfig._is_jdbc_user_changed(None) self.assertEqual(None, result) pass @not_for_platform(PLATFORM_WINDOWS) @patch("ambari_server.serverConfiguration.write_property") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_ambari_version") def test_check_database_name_property(self, get_ambari_version_mock, get_ambari_properties_mock, write_property_mock): parser = OptionParser() parser.add_option('--database', default=None, help="Database to use embedded|oracle|mysql|mssql|postgres", dest="dbms") args = parser.parse_args() # negative case get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: ""} try: result = check_database_name_property() self.fail("Should fail with exception") except FatalException as e: self.assertTrue('DB Name property not set in config file.' in e.reason) # positive case dbname = "ambari" get_ambari_properties_mock.reset_mock() get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: dbname} try: result = check_database_name_property() except FatalException: self.fail("Setup should be successful") # Check upgrade. In Ambari < 1.7.1 "database" property contained db name for local db dbname = "ambari" database = "ambari" persistence = "local" get_ambari_properties_mock.reset_mock() get_ambari_properties_mock.return_value = {JDBC_DATABASE_NAME_PROPERTY: dbname, JDBC_DATABASE_PROPERTY: database, PERSISTENCE_TYPE_PROPERTY: persistence} try: result = check_database_name_property(upgrade=True) except FatalException: self.fail("Setup should be successful") self.assertTrue(write_property_mock.called) @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("resource_management.core.shell.call") @patch("ambari_server.dbConfiguration_linux.PGConfig._is_jdbc_user_changed") @patch("ambari_server.serverSetup.verify_setup_allowed") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.configure_os_settings") @patch("ambari_server.serverSetup.download_and_install_jdk") @patch.object(PGConfig, "_configure_postgres") @patch.object(PGConfig, "_check_postgre_up") @patch("ambari_server.serverSetup.check_ambari_user") @patch("ambari_server.serverSetup.check_jdbc_drivers") @patch("ambari_server.serverSetup.check_selinux") @patch("ambari_server.serverSetup.is_root") @patch.object(PGConfig, "_setup_db") @patch("ambari_server.serverSetup.get_is_secure") @patch("ambari_server.dbConfiguration_linux.store_password_file") @patch("ambari_server.serverSetup.extract_views") @patch("ambari_server.serverSetup.adjust_directory_permissions") @patch("sys.exit") @patch("__builtin__.raw_input") @patch("ambari_server.serverSetup.expand_jce_zip_file") def test_ambariServerSetupWithCustomDbName(self, expand_jce_zip_file_mock, raw_input, exit_mock, adjust_dirs_mock, extract_views_mock, store_password_file_mock, get_is_secure_mock, setup_db_mock, is_root_mock, #is_local_database_mock, check_selinux_mock, check_jdbc_drivers_mock, check_ambari_user_mock, check_postgre_up_mock, configure_postgres_mock, download_jdk_mock, configure_os_settings_mock, get_YN_input, verify_setup_allowed_method, is_jdbc_user_changed_mock, run_os_command_mock): args = MagicMock() raw_input.return_value = "" get_YN_input.return_value = False verify_setup_allowed_method.return_value = 0 is_root_mock.return_value = True check_selinux_mock.return_value = 0 check_ambari_user_mock.return_value = (0, False, 'user', None) check_jdbc_drivers_mock.return_value = 0 check_postgre_up_mock.return_value = "running", 0, "", "" configure_postgres_mock.return_value = 0, "", "" download_jdk_mock.return_value = 0 configure_os_settings_mock.return_value = 0 is_jdbc_user_changed_mock.return_value = False setup_db_mock.return_value = (0, None, None) get_is_secure_mock.return_value = False store_password_file_mock.return_value = "password" extract_views_mock.return_value = 0 run_os_command_mock.return_value = 3,"","" new_db = "newDBName" args.dbms = "postgres" args.database_name = new_db args.postgres_schema = new_db args.database_username = "user" args.database_password = "password" args.jdbc_driver= None args.jdbc_db = None args.must_set_database_options = True del args.database_index del args.persistence_type tempdir = tempfile.gettempdir() prop_file = os.path.join(tempdir, "ambari.properties") with open(prop_file, "w") as f: f.write("server.jdbc.database_name=oldDBName") f.close() os.environ[AMBARI_CONF_VAR] = tempdir try: result = setup(args) except FatalException as ex: self.fail("Setup should be successful") properties = get_ambari_properties() self.assertTrue(JDBC_DATABASE_NAME_PROPERTY in properties.keys()) value = properties[JDBC_DATABASE_NAME_PROPERTY] self.assertEqual(value, new_db) del os.environ[AMBARI_CONF_VAR] os.remove(prop_file) pass @only_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch("ambari_server.serverSetup.service_setup") @patch("ambari_server.dbConfiguration_windows.MSSQLConfig._execute_db_script") @patch("ambari_server.dbConfiguration_windows.store_password_file") @patch("ambari_server.dbConfiguration_windows.MSSQLConfig._is_jdbc_driver_installed") @patch("ambari_server.serverSetup.verify_setup_allowed") @patch("ambari_server.serverSetup.get_YN_input") @patch("ambari_server.serverSetup.configure_os_settings") @patch("ambari_server.serverSetup.download_and_install_jdk") @patch("ambari_server.serverSetup.check_firewall") @patch("ambari_server.serverSetup.check_ambari_user") @patch("ambari_server.serverSetup.check_jdbc_drivers") @patch("ambari_server.serverSetup.is_root") @patch("ambari_server.serverSetup.extract_views") @patch("ambari_server.serverSetup.adjust_directory_permissions") def test_ambariServerSetupWithCustomDbName(self, adjust_dirs_mock, extract_views_mock, is_root_mock, check_jdbc_drivers_mock, check_ambari_user_mock, check_firewall_mock, download_jdk_mock, configure_os_settings_mock, get_YN_input, verify_setup_allowed_method, is_jdbc_driver_installed_mock, store_password_file_mock, execute_db_script_mock, service_setup_mock): args = MagicMock() get_YN_input.return_value = False verify_setup_allowed_method.return_value = 0 is_root_mock.return_value = True check_ambari_user_mock.return_value = (0, False, 'user', None) check_jdbc_drivers_mock.return_value = 0 download_jdk_mock.return_value = 0 configure_os_settings_mock.return_value = 0 is_jdbc_driver_installed_mock.return_value = True store_password_file_mock.return_value = "password.dat" extract_views_mock.return_value = 0 new_db = "newDBName" del args.dbms del args.database_index del args.database_host del args.database_port args.database_name = new_db args.database_username = "user" args.database_password = "password" del args.database_windows_auth args.jdbc_driver= None args.jdbc_db = None args.must_set_database_options = True del args.default_database_host del args.persistence_type del args.init_db_script_file del args.cleanup_db_script_file tempdir = tempfile.gettempdir() prop_file = os.path.join(tempdir, "ambari.properties") with open(prop_file, "w") as f: f.write("server.jdbc.database_name=oldDBName") f.close() os.environ[AMBARI_CONF_VAR] = tempdir try: result = setup(args) except FatalException as ex: self.fail("Setup should be successful") properties = get_ambari_properties() self.assertTrue(JDBC_DATABASE_NAME_PROPERTY in properties.keys()) value = properties[JDBC_DATABASE_NAME_PROPERTY] self.assertEqual(value, new_db) self.assertEqual(store_password_file_mock.call_count, 2) self.assertEqual(execute_db_script_mock.call_count, 2) del os.environ[AMBARI_CONF_VAR] os.remove(prop_file) pass def test_is_valid_filepath(self): temp_dir = tempfile.gettempdir() temp_file = tempfile.NamedTemporaryFile(mode='r') # Correct path to an existing file self.assertTrue(temp_file) # Correct path to an existing directory self.assertFalse(is_valid_filepath(temp_dir), \ 'is_valid_filepath(path) should return False is path is a directory') # Incorrect path self.assertFalse(is_valid_filepath('')) pass @patch("ambari_server.setupSecurity.search_file") @patch("ambari_server.setupSecurity.get_validated_string_input") def test_setup_ambari_krb5_jaas_with_options(self, get_validated_string_input_mock, search_file_mock): options = self._create_empty_options_mock() options.jaas_keytab = '/kerberos/admin.keytab' temp_file = tempfile.NamedTemporaryFile(mode='r') search_file_mock.return_value = temp_file.name get_validated_string_input_mock.side_effect = ['adm@EXAMPLE.COM', temp_file] self.assertEqual(None, setup_ambari_krb5_jaas(options)) self.assertTrue(get_validated_string_input_mock.called) self.assertEqual(get_validated_string_input_mock.call_count, 2) get_validated_string_input_mock.assert_called_with("Enter keytab path for ambari server's kerberos principal: ", '/etc/security/keytabs/ambari.keytab', '.*', False, False, validatorFunction = is_valid_filepath, answer='/kerberos/admin.keytab') pass @patch("os.listdir") @patch("os.path.exists") @patch("ambari_server.serverUpgrade.load_stack_values") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.run_metainfo_upgrade") def test_upgrade_local_repo(self, run_metainfo_upgrade_mock, get_ambari_properties_mock, load_stack_values_mock, os_path_exists_mock, os_listdir_mock): from mock.mock import call args = MagicMock() args.persistence_type = "local" def load_values_side_effect(*args, **kwargs): res = {} res['a'] = 'http://oldurl' if -1 != args[1].find("HDPLocal"): res['a'] = 'http://newurl' return res load_stack_values_mock.side_effect = load_values_side_effect properties = Properties() get_ambari_properties_mock.return_value = properties os_path_exists_mock.return_value = 1 os_listdir_mock.return_value = ['1.1'] upgrade_local_repo(args) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(load_stack_values_mock.called) self.assertTrue(run_metainfo_upgrade_mock.called) run_metainfo_upgrade_mock.assert_called_with(args, {'a': 'http://newurl'}) pass @patch("os.listdir") @patch("os.path.exists") @patch("ambari_server.serverUpgrade.load_stack_values") @patch("ambari_server.serverUpgrade.get_ambari_properties") @patch("ambari_server.serverUpgrade.run_metainfo_upgrade") def test_upgrade_local_repo_nochange(self, run_metainfo_upgrade_mock, get_ambari_properties_mock, load_stack_values_mock, os_path_exists_mock, os_listdir_mock): from mock.mock import call args = MagicMock() args.persistence_type = "local" def load_values_side_effect(*args, **kwargs): res = {} res['a'] = 'http://oldurl' return res load_stack_values_mock.side_effect = load_values_side_effect properties = Properties() get_ambari_properties_mock.return_value = properties os_path_exists_mock.return_value = 1 os_listdir_mock.return_value = ['1.1'] upgrade_local_repo(args) self.assertTrue(get_ambari_properties_mock.called) self.assertTrue(load_stack_values_mock.called) self.assertTrue(run_metainfo_upgrade_mock.called) run_metainfo_upgrade_mock.assert_called_with(args, {}) pass @patch("os.path.exists") @patch.object(ResourceFilesKeeper, "perform_housekeeping") def test_refresh_stack_hash(self, perform_housekeeping_mock, path_exists_mock): path_exists_mock.return_value = True properties = Properties() refresh_stack_hash(properties) self.assertTrue(perform_housekeeping_mock.called) pass @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.dbConfiguration_linux.print_error_msg") def test_change_tables_owner_no_tables(self, print_error_msg_mock, run_os_command_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias")) decrypt_password_for_alias_mock.return_value = "password" run_os_command_mock.return_value = 0, "", "" dbms = PGConfig(args, properties, "local") result = dbms._change_tables_owner() self.assertFalse(result) self.assertEquals(print_error_msg_mock.call_args_list[0][0][0], 'Failed to get list of ambari tables') @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.dbConfiguration_linux.print_error_msg") def test_change_tables_owner_fatal_psql(self, print_error_msg_mock, run_os_command_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias")) decrypt_password_for_alias_mock.return_value = "password" run_os_command_mock.return_value = 0, "", "psql: could not connect to server: No such file or directory" dbms = PGConfig(args, properties, "local") result = dbms._change_tables_owner() self.assertFalse(result) self.assertEquals(print_error_msg_mock.call_args_list[0][0][0], """Failed to get list of ambari tables. Message from psql: stdout: stderr:psql: could not connect to server: No such file or directory """) @patch("ambari_server.dbConfiguration.decrypt_password_for_alias") @patch("ambari_server.dbConfiguration_linux.run_os_command") @patch("ambari_server.dbConfiguration_linux.print_error_msg") def test_change_tables_owner(self, print_error_msg_mock, run_os_command_mock, decrypt_password_for_alias_mock): args = MagicMock() del args.database_index del args.dbms del args.database_host del args.database_port del args.database_name del args.database_username del args.database_password del args.init_script_file del args.drop_script_file properties = Properties() properties.process_pair(JDBC_PASSWORD_PROPERTY, get_alias_string("mypwdalias")) decrypt_password_for_alias_mock.return_value = "password" run_os_command_mock.side_effect = [(0, "tbl1\n,tbl2", ""), (0, "", ""), (0, "", ""), (0, "postgres", ""), (0, "ALTER TABLE", ""), (0, "postgres", ""), (0, "ALTER TABLE", "")] dbms = PGConfig(args, properties, "local") result = dbms._change_tables_owner() self.assertTrue(result) self.assertEquals(run_os_command_mock.call_count, 7) @patch("os.path.isdir", new = MagicMock(return_value=True)) @patch("os.access", new = MagicMock(return_value=True)) @patch.object(ServerClassPath, "get_full_ambari_classpath_escaped_for_shell", new = MagicMock(return_value = 'test' + os.pathsep + 'path12')) @patch("ambari_server.serverUtils.is_server_runing") @patch("ambari_commons.os_utils.run_os_command") @patch("ambari_server.setupSecurity.generate_env") @patch("ambari_server.setupSecurity.ensure_can_start_under_current_user") @patch("ambari_server.serverConfiguration.read_ambari_user") @patch("ambari_server.dbConfiguration.ensure_jdbc_driver_is_installed") @patch("ambari_server.serverConfiguration.parse_properties_file") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch("ambari_server.serverConfiguration.get_java_exe_path") @patch("os.path.isfile") @patch("sys.exit") @patch("ambari_server.userInput.get_YN_input") @patch("ambari_server.hostUpdate.logger") def test_update_host_names(self, logger_mock, getYNInput_mock, sysExitMock, isFileMock, getJavaExePathMock, getAmbariPropertiesMock, parsePropertiesFileMock, ensureDriverInstalledMock, readAmbariUserMock, ensureCanStartUnderCurrentUserMock, generateEnvMock, runOSCommandMock, isServerRunningMock): properties = Properties() properties.process_pair("server.jdbc.database", "embedded") getYNInput_mock.return_value = False isFileMock.return_value = True getJavaExePathMock.return_value = "/path/to/java" getAmbariPropertiesMock.return_value = properties readAmbariUserMock.return_value = "test_user" ensureCanStartUnderCurrentUserMock.return_value = "test_user" generateEnvMock.return_value = {} runOSCommandMock.return_value = (0, "", "") isServerRunningMock.return_value = (False, 1) update_host_names(["update-host-names", "/testFileWithChanges"], properties) self.assertEquals(len(sysExitMock.call_args_list), 3) self.assertTrue(isFileMock.called) self.assertTrue(getJavaExePathMock.called) self.assertTrue(readAmbariUserMock.called) self.assertTrue(ensureCanStartUnderCurrentUserMock.called) self.assertTrue(generateEnvMock.called) self.assertEquals(runOSCommandMock.call_args[0][0], '/path/to/java -cp test:path12 ' 'org.apache.ambari.server.update.HostUpdateHelper /testFileWithChanges > ' '/var/log/ambari-server/ambari-server.out 2>&1') pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "is_server_runing") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_status_running(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, is_server_runing_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() del options.exit_message args = ["status"] opm.parse_args.return_value = (options, args) is_server_runing_method.return_value = (True, 100) options.dbms = None options.sid_or_sname = "sid" try: _ambari_server_.mainBody() except SystemExit as e: self.assertTrue(e.code == 0) self.assertTrue(is_server_runing_method.called) pass @not_for_platform(PLATFORM_WINDOWS) @patch.object(OSCheck, "os_distribution", new = MagicMock(return_value = os_distro_value)) @patch.object(_ambari_server_, "is_server_runing") @patch("optparse.OptionParser") @patch.object(_ambari_server_, "logger") @patch("ambari_server.serverConfiguration.get_ambari_properties") @patch.object(_ambari_server_, "setup_logging") @patch.object(_ambari_server_, "init_logging") def test_main_test_status_not_running(self, init_logging_mock, setup_logging_mock, get_ambari_properties_mock, logger_mock, optionParserMock, is_server_runing_method): opm = optionParserMock.return_value options = self._create_empty_options_mock() del options.exit_message args = ["status"] opm.parse_args.return_value = (options, args) is_server_runing_method.return_value = (False, None) options.dbms = None options.sid_or_sname = "sid" try: _ambari_server_.mainBody() except SystemExit as e: self.assertTrue(e.code == 3) self.assertTrue(is_server_runing_method.called) pass def test_web_server_startup_timeout(self): from ambari_server.serverConfiguration import get_web_server_startup_timeout from ambari_server.serverConfiguration import WEB_SERVER_STARTUP_TIMEOUT properties = Properties() timeout = get_web_server_startup_timeout(properties) self.assertEquals(50, timeout) properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "") timeout = get_web_server_startup_timeout(properties) self.assertEquals(50, timeout) properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "120") timeout = get_web_server_startup_timeout(properties) self.assertEquals(120, timeout) properties.process_pair(WEB_SERVER_STARTUP_TIMEOUT, "120 ") timeout = get_web_server_startup_timeout(properties) self.assertEquals(120, timeout) def _create_empty_options_mock(self): options = MagicMock() options.ldap_url = None options.ldap_secondary_url = None options.ldap_ssl = None options.ldap_user_class = None options.ldap_user_attr = None options.ldap_group_class = None options.ldap_group_attr = None options.ldap_member_attr = None options.ldap_dn = None options.ldap_base_dn = None options.ldap_manager_dn = None options.ldap_manager_password = None options.ldap_save_settings = None options.ldap_referral = None options.ldap_bind_anonym = None options.ldap_sync_admin_name = None options.ldap_sync_username_collisions_behavior = None options.ldap_sync_admin_password = None options.custom_trust_store = None options.trust_store_type = None options.trust_store_path = None options.trust_store_password = None options.security_option = None options.api_ssl = None options.api_ssl_port = None options.import_cert_path = None options.import_cert_alias = None options.pem_password = None options.import_key_path = None options.master_key = None options.master_key_persist = None options.jaas_principal = None options.jaas_keytab = None return options
apache-2.0
gena/qgis-earthengine-plugin
ee_plugin.py
1
5587
# -*- coding: utf-8 -*- """ Main plugin file. """ from __future__ import absolute_import import configparser import requests import webbrowser from builtins import object import os.path import json from qgis.PyQt.QtCore import QSettings, QTranslator, qVersion, QCoreApplication from qgis.PyQt.QtWidgets import QAction from qgis.PyQt.QtGui import QIcon from qgis.core import QgsProject from ee_plugin import provider from ee_plugin.icons import resources # read the plugin version from metadata cfg = configparser.ConfigParser() cfg.read(os.path.join(os.path.dirname(__file__), 'metadata.txt')) VERSION = cfg.get('general', 'version') version_checked = False class GoogleEarthEnginePlugin(object): """QGIS Plugin Implementation.""" def __init__(self, iface): """Constructor. :param iface: An interface instance that will be passed to this class which provides the hook by which you can manipulate the QGIS application at run time. :type iface: QgsInterface """ # Save reference to the QGIS interface self.iface = iface # initialize plugin directory self.plugin_dir = os.path.dirname(__file__) # initialize locale locale = QSettings().value('locale/userLocale')[0:2] locale_path = os.path.join( self.plugin_dir, 'i18n', 'GoogleEarthEnginePlugin_{}.qm'.format(locale)) if os.path.exists(locale_path): self.translator = QTranslator() self.translator.load(locale_path) if qVersion() > '4.3.3': QCoreApplication.installTranslator(self.translator) self.menu_name_plugin = self.tr("Google Earth Engine Plugin") # Create and register the EE data providers provider.register_data_provider() # noinspection PyMethodMayBeStatic def tr(self, message): """Get the translation for a string using Qt translation API. We implement this ourselves since we do not inherit QObject. :param message: String for translation. :type message: str, QString :returns: Translated version of message. :rtype: QString """ # noinspection PyTypeChecker,PyArgumentList,PyCallByClass return QCoreApplication.translate('GoogleEarthEngine', message) def initGui(self): ### Main dockwidget menu # Create action that will start plugin configuration icon_path = ':/plugins/ee_plugin/icons/earth_engine.svg' self.dockable_action = QAction( QIcon(icon_path), "User Guide", self.iface.mainWindow()) # connect the action to the run method self.dockable_action.triggered.connect(self.run) # Add menu item self.iface.addPluginToMenu(self.menu_name_plugin, self.dockable_action) # Register signal to initialize EE layers on project load self.iface.projectRead.connect(self.updateLayers) def run(self): # open user guide in external web browser webbrowser.open_new( "http://qgis-ee-plugin.appspot.com/user-guide") def check_version(self): global version_checked if version_checked: return try: latest_version = requests.get('https://qgis-ee-plugin.appspot.com/get_latest_version').text if VERSION < latest_version: self.iface.messageBar().pushMessage('Earth Engine plugin:', 'There is a more recent version of the ee_plugin available {0} and you have {1}, please upgrade!'.format(latest_version, VERSION), duration=15) except: print('Error occurred when checking for recent plugin version, skipping ...') finally: version_checked = True def unload(self): # Remove the plugin menu item and icon self.iface.removePluginMenu( self.menu_name_plugin, self.dockable_action) def updateLayers(self): import ee from ee_plugin.utils import add_or_update_ee_layer layers = QgsProject.instance().mapLayers().values() for l in filter(lambda layer: layer.customProperty('ee-layer'), layers): ee_object = l.customProperty('ee-object') ee_object_vis = l.customProperty('ee-object-vis') # check for backward-compatibility, older file formats (before 0.0.3) store ee-objects in ee-script property an no ee-object-vis is stored # also, it seems that JSON representation of persistent object has been changed, making it difficult to read older EE JSON if ee_object is None: print('\nWARNING:\n Map layer saved with older version of EE plugin is detected, backward-compatibility for versions before 0.0.3 is not supported due to changes in EE library, please re-create EE layer by re-running the Python script\n') return ee_object = ee.deserializer.fromJSON(ee_object) if ee_object_vis is not None: ee_object_vis = json.loads(ee_object_vis) # update loaded EE layer # get existing values for name, visibility, and opacity # TODO: this should not be needed, refactor add_or_update_ee_layer to update_ee_layer name = l.name() shown = QgsProject.instance().layerTreeRoot().findLayer(l.id()).itemVisibilityChecked() opacity = l.renderer().opacity() add_or_update_ee_layer(ee_object, ee_object_vis, name, shown, opacity)
mit
ravenland/ycmWinRepo
third_party/ycmd/third_party/jedi/test/test_api/test_unicode.py
27
2147
# -*- coding: utf-8 -*- """ All character set and unicode related tests. """ from jedi import Script from jedi._compatibility import u, unicode def test_unicode_script(): """ normally no unicode objects are being used. (<=2.7) """ s = unicode("import datetime; datetime.timedelta") completions = Script(s).completions() assert len(completions) assert type(completions[0].description) is unicode s = u("author='öä'; author") completions = Script(s).completions() x = completions[0].description assert type(x) is unicode s = u("#-*- coding: iso-8859-1 -*-\nauthor='öä'; author") s = s.encode('latin-1') completions = Script(s).completions() assert type(completions[0].description) is unicode def test_unicode_attribute(): """ github jedi-vim issue #94 """ s1 = u('#-*- coding: utf-8 -*-\nclass Person():\n' ' name = "e"\n\nPerson().name.') completions1 = Script(s1).completions() assert 'strip' in [c.name for c in completions1] s2 = u('#-*- coding: utf-8 -*-\nclass Person():\n' ' name = "é"\n\nPerson().name.') completions2 = Script(s2).completions() assert 'strip' in [c.name for c in completions2] def test_multibyte_script(): """ `jedi.Script` must accept multi-byte string source. """ try: code = u("import datetime; datetime.d") comment = u("# multi-byte comment あいうえおä") s = (u('%s\n%s') % (code, comment)).encode('utf-8') except NameError: pass # python 3 has no unicode method else: assert len(Script(s, 1, len(code)).completions()) def test_goto_definition_at_zero(): """At zero usually sometimes raises unicode issues.""" assert Script("a", 1, 1).goto_definitions() == [] s = Script("str", 1, 1).goto_definitions() assert len(s) == 1 assert list(s)[0].description == 'class str' assert Script("", 1, 0).goto_definitions() == [] def test_complete_at_zero(): s = Script("str", 1, 3).completions() assert len(s) == 1 assert list(s)[0].name == 'str' s = Script("", 1, 0).completions() assert len(s) > 0
gpl-3.0
1905410/Misago
misago/users/management/commands/synchronizeusers.py
1
1305
import time from django.contrib.auth import get_user_model from django.core.management.base import BaseCommand from misago.core.management.progressbar import show_progress from misago.core.pgutils import batch_update class Command(BaseCommand): help = 'Synchronizes users' def handle(self, *args, **options): users_to_sync = get_user_model().objects.count() if not users_to_sync: self.stdout.write('\n\nNo users were found') else: self.sync_users(users_to_sync) def sync_users(self, users_to_sync): message = 'Synchronizing %s users...\n' self.stdout.write(message % users_to_sync) message = '\n\nSynchronized %s users' synchronized_count = 0 show_progress(self, synchronized_count, users_to_sync) start_time = time.time() for user in batch_update(get_user_model().objects.all()): user.threads = user.thread_set.count() user.posts = user.post_set.count() user.followers = user.followed_by.count() user.following = user.follows.count() user.save() synchronized_count += 1 show_progress(self, synchronized_count, users_to_sync, start_time) self.stdout.write(message % synchronized_count)
gpl-2.0
eyadsibai/rep
tests/test_pybrain.py
3
3872
# Copyright 2014-2015 Yandex LLC and contributors <https://yandex.com/> # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # <http://www.apache.org/licenses/LICENSE-2.0> # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import division, print_function, absolute_import from rep.test.test_estimators import check_classifier, check_regression, check_params, \ generate_classification_data, check_classification_reproducibility from rep.estimators.pybrain import PyBrainClassifier, PyBrainRegressor from sklearn.ensemble import BaggingClassifier from rep.estimators import SklearnClassifier __author__ = 'Artem Zhirokhov' classifier_params = { 'has_staged_pp': False, 'has_importances': False, 'supports_weight': False } regressor_params = { 'has_staged_predictions': False, 'has_importances': False, 'supports_weight': False } def test_pybrain_params(): check_params(PyBrainClassifier, layers=[1, 2], epochs=5, use_rprop=True, hiddenclass=['LinearLayer']) check_params(PyBrainRegressor, layers=[1, 2], epochs=5, etaplus=1.3, hiddenclass=['LinearLayer'], learningrate=0.1) def test_pybrain_classification(): clf = PyBrainClassifier(epochs=2) check_classifier(clf, **classifier_params) check_classifier(PyBrainClassifier(epochs=-1, continue_epochs=1, layers=[]), **classifier_params) check_classifier(PyBrainClassifier(epochs=2, layers=[5, 2]), **classifier_params) def test_pybrain_reproducibility(): try: import numpy X, y, _ = generate_classification_data() clf1 = PyBrainClassifier(layers=[4], epochs=2).fit(X, y) clf2 = PyBrainClassifier(layers=[4], epochs=2).fit(X, y) print(clf1.predict_proba(X)-clf2.predict_proba(X)) assert numpy.allclose(clf1.predict_proba(X), clf2.predict_proba(X)), 'different predicitons' check_classification_reproducibility(clf1, X, y) except: # This test fails. Because PyBrain can't reproduce training. pass def test_pybrain_Linear_MDLSTM(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **classifier_params) check_regression(PyBrainRegressor(epochs=3, layers=[10, 2], hiddenclass=['LinearLayer', 'MDLSTMLayer']), **regressor_params) def test_pybrain_SoftMax_Tanh(): check_classifier(PyBrainClassifier(epochs=2, layers=[10, 5, 2], hiddenclass=['SoftmaxLayer', 'SoftmaxLayer', 'TanhLayer'], use_rprop=True), **classifier_params) check_regression(PyBrainRegressor(epochs=2, layers=[10, 5, 2], hiddenclass=['SoftmaxLayer', 'TanhLayer', 'TanhLayer']), **regressor_params) def pybrain_test_partial_fit(): clf = PyBrainClassifier(layers=[4], epochs=2) X, y, _ = generate_classification_data() clf.partial_fit(X, y) clf.partial_fit(X[:2], y[:2]) def test_pybrain_multi_classification(): check_classifier(PyBrainClassifier(), n_classes=4, **classifier_params) def test_pybrain_regression(): check_regression(PyBrainRegressor(), **regressor_params) def test_pybrain_multi_regression(): check_regression(PyBrainRegressor(), n_targets=4, **regressor_params) def test_simple_stacking_pybrain(): base_pybrain = PyBrainClassifier() base_bagging = BaggingClassifier(base_estimator=base_pybrain, n_estimators=3) check_classifier(SklearnClassifier(clf=base_bagging), **classifier_params)
apache-2.0
jamesrobertcarthew/notes
midiutil/MidiFile3.py
12
40732
#----------------------------------------------------------------------------- # Name: MidiFile.py # Purpose: MIDI file manipulation utilities # # Author: Mark Conway Wirt <emergentmusics) at (gmail . com> # # Created: 2008/04/17 # Copyright: (c) 2009 Mark Conway Wirt # License: Please see License.txt for the terms under which this # software is distributed. #----------------------------------------------------------------------------- import struct, sys, math # TICKSPERBEAT is the number of "ticks" (time measurement in the MIDI file) that # corresponds to one beat. This number is somewhat arbitrary, but should be chosen # to provide adequate temporal resolution. TICKSPERBEAT = 960 controllerEventTypes = { 'pan' : 0x0a } class MIDIEvent: ''' The class to contain the MIDI Event (placed on MIDIEventList. ''' def __init__(self): self.type='unknown' self.time=0 self.ord = 0 def __lt__(self, other): ''' Sorting function for events.''' if self.time < other.time: return True elif self.time > other.time: return False else: if self.ord < other.ord: return True elif self.ord > other.ord: return False else: return False def __cmp__(self, other): ''' Sorting function for events.''' if self.time < other.time: return -1 elif self.time > other.time: return 1 else: if self.ord < other.ord: return -1 elif self.ord > other.ord: return 1 else: return 0 class GenericEvent(): '''The event class from which specific events are derived ''' def __init__(self,time): self.time = time self.type = 'Unknown' def __eq__(self, other): ''' Equality operator for Generic Events and derived classes. In the processing of the event list, we have need to remove duplicates. To do this we rely on the fact that the classes are hashable, and must therefore have an equality operator (__hash__() and __eq__() must both be defined). This is the most embarrassing portion of the code, and anyone who knows about OO programming would find this almost unbelievable. Here we have a base class that knows specifics about derived classes, thus breaking the very spirit of OO programming. I suppose I should go back and restructure the code, perhaps removing the derived classes altogether. At some point perhaps I will. ''' if self.time != other.time or self.type != other.type: return False # What follows is code that encodes the concept of equality for each derived # class. Believe it f you dare. if self.type == 'note': if self.pitch != other.pitch or self.channel != other.channel: return False if self.type == 'tempo': if self.tempo != other.tempo: return False if self.type == 'programChange': if self.programNumber != other.programNumber or self.channel != other.channel: return False if self.type == 'trackName': if self.trackName != other.trackName: return False if self.type == 'controllerEvent': if self.parameter1 != other.parameter1 or \ self.channel != other.channel or \ self.eventType != other.eventType: return False if self.type == 'SysEx': if self.manID != other.manID: return False if self.type == 'UniversalSysEx': if self.code != other.code or\ self.subcode != other.subcode or \ self.sysExChannel != other.sysExChannel: return False return True def __hash__(self): ''' Return a hash code for the object. This is needed for the removal of duplicate objects from the event list. The only real requirement for the algorithm is that the hash of equal objects must be equal. There is probably great opportunity for improvements in the hashing function. ''' # Robert Jenkin's 32 bit hash. a = int(self.time) a = (a+0x7ed55d16) + (a<<12) a = (a^0xc761c23c) ^ (a>>19) a = (a+0x165667b1) + (a<<5) a = (a+0xd3a2646c) ^ (a<<9) a = (a+0xfd7046c5) + (a<<3) a = (a^0xb55a4f09) ^ (a>>16) return a class MIDITrack: '''A class that encapsulates a MIDI track ''' # Nested class definitions. class note(GenericEvent): '''A class that encapsulates a note ''' def __init__(self,channel, pitch,time,duration,volume): GenericEvent.__init__(self,time) self.pitch = pitch self.duration = duration self.volume = volume self.type = 'note' self.channel = channel def compare(self, other): '''Compare two notes for equality. ''' if self.pitch == other.pitch and \ self.time == other.time and \ self.duration == other.duration and \ self.volume == other.volume and \ self.type == other.type and \ self.channel == other.channel: return True else: return False class tempo(GenericEvent): '''A class that encapsulates a tempo meta-event ''' def __init__(self,time,tempo): GenericEvent.__init__(self,time) self.type = 'tempo' self.tempo = int(60000000 / tempo) class programChange(GenericEvent): '''A class that encapsulates a program change event. ''' def __init__(self, channel, time, programNumber): GenericEvent.__init__(self, time,) self.type = 'programChange' self.programNumber = programNumber self.channel = channel class SysExEvent(GenericEvent): '''A class that encapsulates a System Exclusive event. ''' def __init__(self, time, manID, payload): GenericEvent.__init__(self, time,) self.type = 'SysEx' self.manID = manID self.payload = payload class UniversalSysExEvent(GenericEvent): '''A class that encapsulates a Universal System Exclusive event. ''' def __init__(self, time, realTime, sysExChannel, code, subcode, payload): GenericEvent.__init__(self, time,) self.type = 'UniversalSysEx' self.realTime = realTime self.sysExChannel = sysExChannel self.code = code self.subcode = subcode self.payload = payload class ControllerEvent(GenericEvent): '''A class that encapsulates a program change event. ''' def __init__(self, channel, time, eventType, parameter1,): GenericEvent.__init__(self, time,) self.type = 'controllerEvent' self.parameter1 = parameter1 self.channel = channel self.eventType = eventType class trackName(GenericEvent): '''A class that encapsulates a program change event. ''' def __init__(self, time, trackName): GenericEvent.__init__(self, time,) self.type = 'trackName' self.trackName = trackName def __init__(self, removeDuplicates, deinterleave): '''Initialize the MIDITrack object. ''' self.headerString = struct.pack('cccc',b'M',b'T',b'r',b'k') self.dataLength = 0 # Is calculated after the data is in place self.MIDIdata = b"" self.closed = False self.eventList = [] self.MIDIEventList = [] self.remdep = removeDuplicates self.deinterleave = deinterleave def addNoteByNumber(self,channel, pitch,time,duration,volume): '''Add a note by chromatic MIDI number ''' self.eventList.append(MIDITrack.note(channel, pitch,time,duration,volume)) def addControllerEvent(self,channel,time,eventType, paramerter1): ''' Add a controller event. ''' self.eventList.append(MIDITrack.ControllerEvent(channel,time,eventType, \ paramerter1)) def addTempo(self,time,tempo): ''' Add a tempo change (or set) event. ''' self.eventList.append(MIDITrack.tempo(time,tempo)) def addSysEx(self,time,manID, payload): ''' Add a SysEx event. ''' self.eventList.append(MIDITrack.SysExEvent(time, manID, payload)) def addUniversalSysEx(self,time,code, subcode, payload, sysExChannel=0x7F, \ realTime=False): ''' Add a Universal SysEx event. ''' self.eventList.append(MIDITrack.UniversalSysExEvent(time, realTime, \ sysExChannel, code, subcode, payload)) def addProgramChange(self,channel, time, program): ''' Add a program change event. ''' self.eventList.append(MIDITrack.programChange(channel, time, program)) def addTrackName(self,time,trackName): ''' Add a track name event. ''' self.eventList.append(MIDITrack.trackName(time,trackName)) def changeNoteTuning(self, tunings, sysExChannel=0x7F, realTime=False, \ tuningProgam=0): '''Change the tuning of MIDI notes ''' payload = struct.pack('>B', tuningProgam) payload = payload + struct.pack('>B', len(tunings)) for (noteNumber, frequency) in tunings: payload = payload + struct.pack('>B', noteNumber) MIDIFreqency = frequencyTransform(frequency) for byte in MIDIFreqency: payload = payload + struct.pack('>B', byte) self.eventList.append(MIDITrack.UniversalSysExEvent(0, realTime, sysExChannel,\ 8, 2, payload)) def processEventList(self): ''' Process the event list, creating a MIDIEventList For each item in the event list, one or more events in the MIDIEvent list are created. ''' # Loop over all items in the eventList for thing in self.eventList: if thing.type == 'note': event = MIDIEvent() event.type = "NoteOn" event.time = thing.time * TICKSPERBEAT event.pitch = thing.pitch event.volume = thing.volume event.channel = thing.channel event.ord = 3 self.MIDIEventList.append(event) event = MIDIEvent() event.type = "NoteOff" event.time = (thing.time + thing.duration) * TICKSPERBEAT event.pitch = thing.pitch event.volume = thing.volume event.channel = thing.channel event.ord = 2 self.MIDIEventList.append(event) elif thing.type == 'tempo': event = MIDIEvent() event.type = "Tempo" event.time = thing.time * TICKSPERBEAT event.tempo = thing.tempo event.ord = 3 self.MIDIEventList.append(event) elif thing.type == 'programChange': event = MIDIEvent() event.type = "ProgramChange" event.time = thing.time * TICKSPERBEAT event.programNumber = thing.programNumber event.channel = thing.channel event.ord = 1 self.MIDIEventList.append(event) elif thing.type == 'trackName': event = MIDIEvent() event.type = "TrackName" event.time = thing.time * TICKSPERBEAT event.trackName = thing.trackName event.ord = 0 self.MIDIEventList.append(event) elif thing.type == 'controllerEvent': event = MIDIEvent() event.type = "ControllerEvent" event.time = thing.time * TICKSPERBEAT event.eventType = thing.eventType event.channel = thing.channel event.paramerter1 = thing.parameter1 event.ord = 1 self.MIDIEventList.append(event) elif thing.type == 'SysEx': event = MIDIEvent() event.type = "SysEx" event.time = thing.time * TICKSPERBEAT event.manID = thing.manID event.payload = thing.payload event.ord = 1 self.MIDIEventList.append(event) elif thing.type == 'UniversalSysEx': event = MIDIEvent() event.type = "UniversalSysEx" event.realTime = thing.realTime event.sysExChannel = thing.sysExChannel event.time = thing.time * TICKSPERBEAT event.code = thing.code event.subcode = thing.subcode event.payload = thing.payload event.ord = 1 self.MIDIEventList.append(event) else: print ("Error in MIDITrack: Unknown event type") sys.exit(2) # Assumptions in the code expect the list to be time-sorted. # self.MIDIEventList.sort(lambda x, y: x.time - y.time) self.MIDIEventList.sort(key=lambda x: (x.time)) if self.deinterleave: self.deInterleaveNotes() def removeDuplicates(self): ''' Remove duplicates from the eventList. This function will remove duplicates from the eventList. This is necessary because we the MIDI event stream can become confused otherwise. ''' # For this algorithm to work, the events in the eventList must be hashable # (that is, they must have a __hash__() and __eq__() function defined). tempDict = {} for item in self.eventList: tempDict[item] = 1 self.eventList = list(tempDict.keys()) # Sort on type, them on time. Necessary because keys() has no requirement to return # things in any order. self.eventList.sort(key=lambda x: (x.type)) self.eventList.sort(key=lambda x: (x.time)) #A bit of a hack. def closeTrack(self): '''Called to close a track before writing This function should be called to "close a track," that is to prepare the actual data stream for writing. Duplicate events are removed from the eventList, and the MIDIEventList is created. Called by the parent MIDIFile object. ''' if self.closed == True: return self.closed = True if self.remdep: self.removeDuplicates() self.processEventList() def writeMIDIStream(self): ''' Write the meta data and note data to the packed MIDI stream. ''' #Process the events in the eventList self.writeEventsToStream() # Write MIDI close event. self.MIDIdata = self.MIDIdata + struct.pack('BBBB',0x00,0xFF, \ 0x2F,0x00) # Calculate the entire length of the data and write to the header self.dataLength = struct.pack('>L',len(self.MIDIdata)) def writeEventsToStream(self): ''' Write the events in MIDIEvents to the MIDI stream. ''' preciseTime = 0.0 # Actual time of event, ignoring round-off actualTime = 0.0 # Time as written to midi stream, include round-off for event in self.MIDIEventList: preciseTime = preciseTime + event.time # Convert the time to variable length and back, to see how much # error is introduced testBuffer = bytes() varTime = writeVarLength(event.time) for timeByte in varTime: testBuffer = testBuffer + struct.pack('>B',timeByte) (roundedVal,discard) = readVarLength(0,testBuffer) roundedTime = actualTime + roundedVal # print "Rounded, Precise: %15.10f %15.10f" % (roundedTime, preciseTime) # Calculate the delta between the two and apply it to the event time. delta = preciseTime - roundedTime event.time = event.time + delta # Now update the actualTime value, using the updated event time. testBuffer = bytes() varTime = writeVarLength(event.time) for timeByte in varTime: testBuffer = testBuffer + struct.pack('>B',timeByte) (roundedVal,discard) = readVarLength(0,testBuffer) actualTime = actualTime + roundedVal for event in self.MIDIEventList: if event.type == "NoteOn": code = 0x9 << 4 | event.channel varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B',code) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.pitch) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.volume) elif event.type == "NoteOff": code = 0x8 << 4 | event.channel varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B',code) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.pitch) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.volume) elif event.type == "Tempo": code = 0xFF subcode = 0x51 fourbite = struct.pack('>L', event.tempo) threebite = fourbite[1:4] # Just discard the MSB varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B',code) self.MIDIdata = self.MIDIdata + struct.pack('>B',subcode) self.MIDIdata = self.MIDIdata + struct.pack('>B', 0x03) # Data length: 3 self.MIDIdata = self.MIDIdata + threebite elif event.type == 'ProgramChange': code = 0xC << 4 | event.channel varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B',code) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.programNumber) elif event.type == 'TrackName': varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('B',0xFF) # Meta-event self.MIDIdata = self.MIDIdata + struct.pack('B',0X03) # Event Type dataLength = len(event.trackName) dataLenghtVar = writeVarLength(dataLength) for i in range(0,len(dataLenghtVar)): self.MIDIdata = self.MIDIdata + struct.pack("b",dataLenghtVar[i]) self.MIDIdata = self.MIDIdata + event.trackName.encode() elif event.type == "ControllerEvent": code = 0xB << 4 | event.channel varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B',code) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.eventType) self.MIDIdata = self.MIDIdata + struct.pack('>B',event.paramerter1) elif event.type == "SysEx": code = 0xF0 varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B', code) payloadLength = writeVarLength(len(event.payload)+2) for lenByte in payloadLength: self.MIDIdata = self.MIDIdata + struct.pack('>B',lenByte) self.MIDIdata = self.MIDIdata + struct.pack('>B', event.manID) self.MIDIdata = self.MIDIdata + event.payload self.MIDIdata = self.MIDIdata + struct.pack('>B',0xF7) elif event.type == "UniversalSysEx": code = 0xF0 varTime = writeVarLength(event.time) for timeByte in varTime: self.MIDIdata = self.MIDIdata + struct.pack('>B',timeByte) self.MIDIdata = self.MIDIdata + struct.pack('>B', code) # Do we need to add a length? payloadLength = writeVarLength(len(event.payload)+5) for lenByte in payloadLength: self.MIDIdata = self.MIDIdata + struct.pack('>B',lenByte) if event.realTime : self.MIDIdata = self.MIDIdata + struct.pack('>B', 0x7F) else: self.MIDIdata = self.MIDIdata + struct.pack('>B', 0x7E) self.MIDIdata = self.MIDIdata + struct.pack('>B', event.sysExChannel) self.MIDIdata = self.MIDIdata + struct.pack('>B', event.code) self.MIDIdata = self.MIDIdata + struct.pack('>B', event.subcode) self.MIDIdata = self.MIDIdata + event.payload self.MIDIdata = self.MIDIdata + struct.pack('>B',0xF7) def deInterleaveNotes(self): '''Correct Interleaved notes. Because we are writing multiple notes in no particular order, we can have notes which are interleaved with respect to their start and stop times. This method will correct that. It expects that the MIDIEventList has been time-ordered. ''' tempEventList = [] stack = {} for event in self.MIDIEventList: if event.type == 'NoteOn': if str(event.pitch)+str(event.channel) in stack: stack[str(event.pitch)+str(event.channel)].append(event.time) else: stack[str(event.pitch)+str(event.channel)] = [event.time] tempEventList.append(event) elif event.type == 'NoteOff': if len(stack[str(event.pitch)+str(event.channel)]) > 1: event.time = stack[str(event.pitch)+str(event.channel)].pop() tempEventList.append(event) else: stack[str(event.pitch)+str(event.channel)].pop() tempEventList.append(event) else: tempEventList.append(event) self.MIDIEventList = tempEventList # A little trickery here. We want to make sure that NoteOff events appear # before NoteOn events, so we'll do two sorts -- on on type, one on time. # This may have to be revisited, as it makes assumptions about how # the internal sort works, and is in essence creating a sort on a primary # and secondary key. self.MIDIEventList.sort(key=lambda x: (x.type)) self.MIDIEventList.sort(key=lambda x: (x.time)) def adjustTime(self,origin): ''' Adjust Times to be relative, and zero-origined ''' if len(self.MIDIEventList) == 0: return tempEventList = [] runningTime = 0 for event in self.MIDIEventList: adjustedTime = event.time - origin event.time = adjustedTime - runningTime runningTime = adjustedTime tempEventList.append(event) self.MIDIEventList = tempEventList def writeTrack(self,fileHandle): ''' Write track to disk. ''' if not self.closed: self.closeTrack() fileHandle.write(self.headerString) fileHandle.write(self.dataLength) fileHandle.write(self.MIDIdata) class MIDIHeader: ''' Class to encapsulate the MIDI header structure. This class encapsulates a MIDI header structure. It isn't used for much, but it will create the appropriately packed identifier string that all MIDI files should contain. It is used by the MIDIFile class to create a complete and well formed MIDI pattern. ''' def __init__(self,numTracks): ''' Initialize the data structures ''' self.headerString = struct.pack('cccc',b'M',b'T',b'h',b'd') self.headerSize = struct.pack('>L',6) # Format 1 = multi-track file self.format = struct.pack('>H',1) self.numTracks = struct.pack('>H',numTracks) self.ticksPerBeat = struct.pack('>H',TICKSPERBEAT) def writeFile(self,fileHandle): fileHandle.write(self.headerString) fileHandle.write(self.headerSize) fileHandle.write(self.format) fileHandle.write(self.numTracks) fileHandle.write(self.ticksPerBeat) class MIDIFile: '''Class that represents a full, well-formed MIDI pattern. This is a container object that contains a header, one or more tracks, and the data associated with a proper and well-formed MIDI pattern. Calling: MyMIDI = MidiFile(tracks, removeDuplicates=True, deinterleave=True) normally MyMIDI = MidiFile(tracks) Arguments: tracks: The number of tracks this object contains removeDuplicates: If true (the default), the software will remove duplicate events which have been added. For example, two notes at the same channel, time, pitch, and duration would be considered duplicate. deinterleave: If True (the default), overlapping notes (same pitch, same channel) will be modified so that they do not overlap. Otherwise the sequencing software will need to figure out how to interpret NoteOff events upon playback. ''' def __init__(self, numTracks, removeDuplicates=True, deinterleave=True): ''' Initialize the class ''' self.header = MIDIHeader(numTracks) self.tracks = list() self.numTracks = numTracks self.closed = False for i in range(0,numTracks): self.tracks.append(MIDITrack(removeDuplicates, deinterleave)) # Public Functions. These (for the most part) wrap the MIDITrack functions, where most # Processing takes place. def addNote(self,track, channel, pitch,time,duration,volume): """ Add notes to the MIDIFile object Use: MyMIDI.addNotes(track,channel,pitch,time, duration, volume) Arguments: track: The track to which the note is added. channel: the MIDI channel to assign to the note. [Integer, 0-15] pitch: the MIDI pitch number [Integer, 0-127]. time: the time (in beats) at which the note sounds [Float]. duration: the duration of the note (in beats) [Float]. volume: the volume (velocity) of the note. [Integer, 0-127]. """ self.tracks[track].addNoteByNumber(channel, pitch, time, duration, volume) def addTrackName(self,track, time,trackName): """ Add a track name to a MIDI track. Use: MyMIDI.addTrackName(track,time,trackName) Argument: track: The track to which the name is added. [Integer, 0-127]. time: The time at which the track name is added, in beats [Float]. trackName: The track name. [String]. """ self.tracks[track].addTrackName(time,trackName) def addTempo(self,track, time,tempo): """ Add a tempo event. Use: MyMIDI.addTempo(track, time, tempo) Arguments: track: The track to which the event is added. [Integer, 0-127]. time: The time at which the event is added, in beats. [Float]. tempo: The tempo, in Beats per Minute. [Integer] """ self.tracks[track].addTempo(time,tempo) def addProgramChange(self,track, channel, time, program): """ Add a MIDI program change event. Use: MyMIDI.addProgramChange(track,channel, time, program) Arguments: track: The track to which the event is added. [Integer, 0-127]. channel: The channel the event is assigned to. [Integer, 0-15]. time: The time at which the event is added, in beats. [Float]. program: the program number. [Integer, 0-127]. """ self.tracks[track].addProgramChange(channel, time, program) def addControllerEvent(self,track, channel,time,eventType, paramerter1): """ Add a MIDI controller event. Use: MyMIDI.addControllerEvent(track, channel, time, eventType, parameter1) Arguments: track: The track to which the event is added. [Integer, 0-127]. channel: The channel the event is assigned to. [Integer, 0-15]. time: The time at which the event is added, in beats. [Float]. eventType: the controller event type. parameter1: The event's parameter. The meaning of which varies by event type. """ self.tracks[track].addControllerEvent(channel,time,eventType, paramerter1) def changeNoteTuning(self, track, tunings, sysExChannel=0x7F, \ realTime=False, tuningProgam=0): """ Change a note's tuning using SysEx change tuning program. Use: MyMIDI.changeNoteTuning(track,[tunings],realTime=False, tuningProgram=0) Arguments: track: The track to which the event is added. [Integer, 0-127]. tunings: A list of tuples in the form (pitchNumber, frequency). [[(Integer,Float]] realTime: Boolean which sets the real-time flag. Defaults to false. sysExChannel: do note use (see below). tuningProgram: Tuning program to assign. Defaults to zero. [Integer, 0-127] In general the sysExChannel should not be changed (parameter will be depreciated). Also note that many software packages and hardware packages do not implement this standard! """ self.tracks[track].changeNoteTuning(tunings, sysExChannel, realTime,\ tuningProgam) def writeFile(self,fileHandle): ''' Write the MIDI File. Use: MyMIDI.writeFile(filehandle) Arguments: filehandle: a file handle that has been opened for binary writing. ''' self.header.writeFile(fileHandle) #Close the tracks and have them create the MIDI event data structures. self.close() #Write the MIDI Events to file. for i in range(0,self.numTracks): self.tracks[i].writeTrack(fileHandle) def addSysEx(self,track, time, manID, payload): """ Add a SysEx event Use: MyMIDI.addSysEx(track,time,ID,payload) Arguments: track: The track to which the event is added. [Integer, 0-127]. time: The time at which the event is added, in beats. [Float]. ID: The SysEx ID number payload: the event payload. Note: This is a low-level MIDI function, so care must be used in constructing the payload. It is recommended that higher-level helper functions be written to wrap this function and construct the payload if a developer finds him or herself using the function heavily. """ self.tracks[track].addSysEx(time,manID, payload) def addUniversalSysEx(self,track, time,code, subcode, payload, \ sysExChannel=0x7F, realTime=False): """ Add a Universal SysEx event. Use: MyMIDI.addUniversalSysEx(track, time, code, subcode, payload,\ sysExChannel=0x7f, realTime=False) Arguments: track: The track to which the event is added. [Integer, 0-127]. time: The time at which the event is added, in beats. [Float]. code: The even code. [Integer] subcode The event sub-code [Integer] payload: The event payload. [Binary string] sysExChannel: The SysEx channel. realTime: Sets the real-time flag. Defaults to zero. Note: This is a low-level MIDI function, so care must be used in constructing the payload. It is recommended that higher-level helper functions be written to wrap this function and construct the payload if a developer finds him or herself using the function heavily. As an example of such a helper function, see the changeNoteTuning function, both here and in MIDITrack. """ self.tracks[track].addUniversalSysEx(time,code, subcode, payload, sysExChannel,\ realTime) def shiftTracks(self, offset=0): """Shift tracks to be zero-origined, or origined at offset. Note that the shifting of the time in the tracks uses the MIDIEventList -- in other words it is assumed to be called in the stage where the MIDIEventList has been created. This function, however, it meant to operate on the eventList itself. """ origin = 1000000 # A little silly, but we'll assume big enough for track in self.tracks: if len(track.eventList) > 0: for event in track.eventList: if event.time < origin: origin = event.time for track in self.tracks: tempEventList = [] #runningTime = 0 for event in track.eventList: adjustedTime = event.time - origin #event.time = adjustedTime - runningTime + offset event.time = adjustedTime + offset #runningTime = adjustedTime tempEventList.append(event) track.eventList = tempEventList #End Public Functions ######################## def close(self): '''Close the MIDIFile for further writing. To close the File for events, we must close the tracks, adjust the time to be zero-origined, and have the tracks write to their MIDI Stream data structure. ''' if self.closed == True: return for i in range(0,self.numTracks): self.tracks[i].closeTrack() # We want things like program changes to come before notes when they are at the # same time, so we sort the MIDI events by their ordinality self.tracks[i].MIDIEventList.sort() origin = self.findOrigin() for i in range(0,self.numTracks): self.tracks[i].adjustTime(origin) self.tracks[i].writeMIDIStream() self.closed = True def findOrigin(self): '''Find the earliest time in the file's tracks.append. ''' origin = 1000000 # A little silly, but we'll assume big enough # Note: This code assumes that the MIDIEventList has been sorted, so this should be insured # before it is called. It is probably a poor design to do this. # TODO: -- Consider making this less efficient but more robust by not assuming the list to be sorted. for track in self.tracks: if len(track.MIDIEventList) > 0: if track.MIDIEventList[0].time < origin: origin = track.MIDIEventList[0].time return origin def writeVarLength(i): '''Accept an input, and write a MIDI-compatible variable length stream The MIDI format is a little strange, and makes use of so-called variable length quantities. These quantities are a stream of bytes. If the most significant bit is 1, then more bytes follow. If it is zero, then the byte in question is the last in the stream ''' input = int(i+0.5) output = [0,0,0,0] reversed = [0,0,0,0] count = 0 result = input & 0x7F output[count] = result count = count + 1 input = input >> 7 while input > 0: result = input & 0x7F result = result | 0x80 output[count] = result count = count + 1 input = input >> 7 reversed[0] = output[3] reversed[1] = output[2] reversed[2] = output[1] reversed[3] = output[0] return reversed[4-count:4] def readVarLength(offset, buffer): '''A function to read a MIDI variable length variable. It returns a tuple of the value read and the number of bytes processed. The input is an offset into the buffer, and the buffer itself. ''' toffset = offset output = 0 bytesRead = 0 while True: output = output << 7 byte = struct.unpack_from('>B',buffer,toffset)[0] toffset = toffset + 1 bytesRead = bytesRead + 1 output = output + (byte & 127) if (byte & 128) == 0: break return (output, bytesRead) def frequencyTransform(freq): '''Returns a three-byte transform of a frequencyTransform ''' resolution = 16384 freq = float(freq) dollars = 69 + 12 * math.log(freq/(float(440)), 2) firstByte = int(dollars) lowerFreq = 440 * pow(2.0, ((float(firstByte) - 69.0)/12.0)) if freq != lowerFreq: centDif = 1200 * math.log( (freq/lowerFreq), 2) else: centDif = 0 cents = round(centDif/100 * resolution) # round? secondByte = min([int(cents)>>7, 0x7F]) thirdByte = cents - (secondByte << 7) thirdByte = min([thirdByte, 0x7f]) if thirdByte == 0x7f and secondByte == 0x7F and firstByte == 0x7F: thirdByte = 0x7e thirdByte = int(thirdByte) return [firstByte, secondByte, thirdByte] def returnFrequency(freqBytes): '''The reverse of frequencyTransform. Given a byte stream, return a frequency. ''' resolution = 16384.0 baseFrequency = 440 * pow(2.0, (float(freqBytes[0]-69.0)/12.0)) frac = (float((int(freqBytes[1]) << 7) + int(freqBytes[2])) * 100.0) / resolution frequency = baseFrequency * pow(2.0, frac/1200.0) return frequency
mit
kustodian/ansible
test/units/modules/network/f5/test_bigip_traffic_selector.py
22
3034
# -*- coding: utf-8 -*- # # Copyright: (c) 2018, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import json import pytest import sys if sys.version_info < (2, 7): pytestmark = pytest.mark.skip("F5 Ansible modules require Python >= 2.7") from ansible.module_utils.basic import AnsibleModule try: from library.modules.bigip_traffic_selector import ApiParameters from library.modules.bigip_traffic_selector import ModuleParameters from library.modules.bigip_traffic_selector import ModuleManager from library.modules.bigip_traffic_selector import ArgumentSpec # In Ansible 2.8, Ansible changed import paths. from test.units.compat import unittest from test.units.compat.mock import Mock from test.units.modules.utils import set_module_args except ImportError: from ansible.modules.network.f5.bigip_traffic_selector import ApiParameters from ansible.modules.network.f5.bigip_traffic_selector import ModuleParameters from ansible.modules.network.f5.bigip_traffic_selector import ModuleManager from ansible.modules.network.f5.bigip_traffic_selector import ArgumentSpec # Ansible 2.8 imports from units.compat import unittest from units.compat.mock import Mock from units.modules.utils import set_module_args fixture_path = os.path.join(os.path.dirname(__file__), 'fixtures') fixture_data = {} def load_fixture(name): path = os.path.join(fixture_path, name) if path in fixture_data: return fixture_data[path] with open(path) as f: data = f.read() try: data = json.loads(data) except Exception: pass fixture_data[path] = data return data class TestParameters(unittest.TestCase): def test_module_parameters(self): args = dict( name='selector1', ) p = ModuleParameters(params=args) assert p.name == 'selector1' def test_api_parameters(self): args = dict( name='selector1', ) p = ApiParameters(params=args) assert p.name == 'selector1' class TestUntypedManager(unittest.TestCase): def setUp(self): self.spec = ArgumentSpec() def test_create(self, *args): set_module_args(dict( name='selector1', provider=dict( server='localhost', password='password', user='admin' ) )) module = AnsibleModule( argument_spec=self.spec.argument_spec, supports_check_mode=self.spec.supports_check_mode ) # Override methods to force specific logic in the module to happen mm = ModuleManager(module=module) mm.create_on_device = Mock(return_value=True) mm.exists = Mock(return_value=False) results = mm.exec_module() assert results['changed'] is True
gpl-3.0
ybalgir/Quantop
Lec7.py
1
1822
import numpy as np import pandas as pd from statsmodels import regression import statsmodels.api as sm import matplotlib.pyplot as plt import math import pandas_datareader.data as web from datetime import datetime def Starter_Lec7(): start = datetime(2014, 1, 1) end = datetime(2015, 1, 1) asset = web.DataReader("TSLA","yahoo",start,end) asset_closingPrice = asset['Close'] benchmark = web.DataReader("SPY","yahoo",start,end) benchmark_closingPrice = benchmark['Close'] r_a = asset_closingPrice.pct_change()[1:] r_b = benchmark_closingPrice.pct_change()[1:] modelSummary = linreg(r_a,r_b) print("{0} {1} \n\n".format(modelSummary,type(modelSummary))) def linreg(X,Y): #running linear regression X = sm.add_constant(X) model = regression.linear_model.OLS(Y,X).fit() a = model.params[0] b = model.params[1] X = pd.DataFrame(X, columns=['Close']) #Y_CMT Neat trick to extract columns from a pandas dataframe # Return summary of the regression and plot results X2 = np.linspace(float(X.min()), float(X.max()), 100) Y_hat = X2 * b + a plt.scatter(X, Y, alpha=0.3) # Plot the raw data plt.plot(X2, Y_hat, 'r', alpha=0.9) # Add the regression line, colored in red plt.xlabel('X Value') plt.ylabel('Y Value') plt.show() return model.summary() def TestPlotting(): N = 8 y = np.zeros(N) x1 = np.linspace(0, 10, N, endpoint=True) x2 = np.linspace(0, 10, N, endpoint=False) plt.plot(x1, y, 'o') plt.plot(x2, y + 0.5, 'o') plt.ylim([-0.5, 1]) plt.show() def NumpyMatrix(): array1 = np.matrix([[1,2,3],[4,5,6],[7,8,9]]) print("{0} {1} \n\n".format(array1[:,2],type(array1))) array1 = array1[:,2] print("{0} {1} \n\n".format(array1,type(array1)))
gpl-3.0
othelarian/arcns
mainscene/mainscene.py
1
75804
# -*- coding: utf-8 -*- from direct.showbase.DirectObject import DirectObject from direct.fsm.FSM import FSM from direct.gui.OnscreenText import OnscreenText from direct.gui.DirectGui import DirectFrame, DGG from direct.stdpy.file import * from direct.actor.Actor import Actor from panda3d.core import Point3, TextNode, CardMaker, BitMask32, Multifile, VirtualFileSystem, Filename, Patchfile from direct.interval.IntervalGlobal import Sequence, Parallel import Tkinter, tkFileDialog, json, sys, lang, os, urllib, shutil, time, scenebuilder class mainScene(FSM,DirectObject): """ **************************** Méthodes pour l'initialisation **************************** """ def __init__(self,app): FSM.__init__(self,"mainScene"); self.defaultTransitions = {"Init":["MainMenu"],"MainMenu":["SubMenu"],"SubMenu":["MainMenu"]} camera.setPos(0,-62,12); camera.setHpr(0,-10,0); self.accept("escape",sys.exit,[0]) self.app = app; self.version = "v0.0"; self.nomove = False if exists("arcns_config.json"): self.app.main_config = json.loads("".join([line.rstrip().lstrip() for line in file("arcns_config.json","rb")])) else: self.app.main_config = {"fullscreen": [False], "lang_chx": 1,"music":True,"sounds":True,"music_vol":1,"sounds_vol":1} try: mcf = open("arcns_config.json","w"); mcf.write(json.dumps(self.app.main_config)); mcf.close() except Exception,e: print e if self.app.main_config["lang_chx"] == 0: self.app.speak = lang.fr.fr_lang elif self.app.main_config["lang_chx"] == 1: self.app.speak = lang.en.en_lang self.states = {"main_chx":0,"main_lst":["campaign","mission","credits","options","quit"],"camp_sel":0,"saves_lst":[]}; self.options = {} for key in self.app.main_config: if key == "fullscreen": self.options[key] = [self.app.main_config[key][0]] else: self.options[key] = self.app.main_config[key] if exists("arcns_saves"): for fsav in os.listdir("arcns_saves"): self.states["saves_lst"].append( json.loads("".join([line.rstrip().lstrip() for line in file("arcns_saves/"+fsav,"rb")]))) self.actscene = scenebuilder.mainscene_builder self.dic_statics, self.dic_dynamics, self.dic_lights = self.app.arcstools.parse_scene(self.actscene) self.dic_sounds = {}; self.loadSfx(); guibuild = self.structureGUI(); self.dic_gui = self.app.arcstools.parse_gui(guibuild) self.dic_arrows= {}; self.loadmodels(); self.dic_anims = {}; self.activeAnim() self.vers_txt = OnscreenText(text=self.version,font=self.app.arcFont,pos=(1.15,-0.95),fg=(0,0,0,1),bg=(1,1,1,0.8)) self.dic_musics = {}; self.loadMusics(); self.dic_musics["mainscene_music"].setLoop(True) if self.app.main_config["music"]: self.dic_musics["mainscene_music"].play() self.mouse_task = taskMgr.add(self.mouseTask,"mainscene mouse task") def loadSfx(self): self.dic_sounds["main_menu_sel"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_sel.wav") self.dic_sounds["main_menu_switch"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_main.wav") self.dic_sounds["main_menu_escape"] = base.loader.loadSfx("mainscene/sounds/son_main_menu_aux.wav") for key in self.dic_sounds: self.dic_sounds[key].setVolume(self.app.main_config["sounds_vol"]) def loadMusics(self): self.dic_musics["mainscene_music"] = base.loader.loadMusic("mainscene/musics/main_music.wav") self.dic_musics["mainscene_music"].setVolume(self.app.main_config["music_vol"]) def structureGUI(self): opt_lang_txt = "" if self.options["lang_chx"] == 0: opt_lang_txt = "Français" elif self.options["lang_chx"] == 1: opt_lang_txt = "English" mainscene_gui = { "main_menu":{ "frame":{ "hide":True,"parent":None,"elts":{ "campaign":{"type":"button","pos":(-0.15,0,-0.2),"cmd":self.actionMainMenu,"scale":0.12,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False}, "mission":{"type":"button","pos":(-0.19,0,-0.34),"cmd":self.actionMainMenu,"scale":0.1,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False}, "credits":{"type":"button","pos":(-0.26,0,-0.47),"cmd":self.actionMainMenu,"scale":0.09,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False}, "options":{"type":"button","pos":(-0.35,0,-0.58),"cmd":self.actionMainMenu,"scale":0.07,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False}, "quit":{"type":"button","pos":(-0.41,0,-0.66),"cmd":self.actionMainMenu,"scale":0.05,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False} } } }, "camp_menu":{ # #"frame":{ # "hide":True,"parent":None,"elts":{ # #"stitre":{"type":"label"} # } #} # }, "mission_menu":{ "frame":{ "hide":True,"parent":None,"elts":{ "stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.15,"algn":TextNode.ALeft,"hide":False}, # # TODO : élément pour le sous-menu "Mission à construire ici # # DEBUG : label temporaire "W.I.P." pour le sous menu "Missions" "wip":{"type":"label","pos":(0,0,0),"scale":0.2,"algn":TextNode.ALeft,"hide":False,"text":"W.I.P."} ### # } } # # TODO : s'il y a d'autres frames à générer pour le sous-menu "Missions", elles seront ici # }, "credits_menu":{ "frame":{ "hide":True,"parent":None,"elts":{ "stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.14,"algn":TextNode.ACenter,"hide":False}, "graph_lab":{"type":"label","pos":(-0.5,0,0.4),"scale":0.1,"algn":TextNode.ACenter,"hide":False}, "graph_name":{"type":"label","pos":(-0.5,0,0.3),"scale":0.08,"algn":TextNode.ACenter,"hide":False}, "dev_lab":{"type":"label","pos":(0.5,0,0.4),"scale":0.1,"algn":TextNode.ACenter,"hide":False}, "dev_name":{"type":"label","pos":(0.5,0,0.3),"scale":0.08,"algn":TextNode.ACenter,"hide":False}, "trad_lab":{"type":"label","pos":(-0.5,0,-0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":False}, "trad_name":{"type":"label","pos":(-0.5,0,-0.2),"scale":0.08,"algn":TextNode.ACenter,"hide":False}, "music_lab":{"type":"label","pos":(0.5,0,-0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":False}, "music_name":{"type":"label","pos":(0.5,0,-0.2),"scale":0.08,"algn":TextNode.ACenter,"hide":False} } } }, "option_menu":{ "frame":{ "hide":True,"parent":None,"elts":{ "stitre":{"type":"label","pos":(-0.8,0,0.7),"scale":0.15,"algn":TextNode.ALeft,"hide":False}, "lst_radio":{"type":"radio","scale":0.08,"algn":TextNode.ALeft, "elts":[ ["windowed",self.options["fullscreen"],[False],self.actionSubMenu,["","change_opt","win"],(-1,0,0.4)], ["fullscreen",self.options["fullscreen"],[True],self.actionSubMenu,["","change_opt","win"],(-1,0,0.3)] ] }, "lang_chx":{"type":"label","pos":(-1.05,0,0.15),"scale":0.08,"algn":TextNode.ALeft,"hide":False}, "opt_optmenu":{"type":"optmenu","pos":(-0.45,0,0.15),"items":["Français","English"],"init":self.options["lang_chx"],"cmd":self.actionSubMenu, "scale":0.08,"change":1,"algn":TextNode.ALeft,"extra":["change_opt","lang"],"hide":False,"text":opt_lang_txt}, "music_vol":{"type":"label","pos":(-1.05,0,-0.2),"scale":0.08,"algn":TextNode.ALeft,"hide":False}, "music_mute":{"type":"checkbox","pos":(0.3,0,-0.2),"cmd":self.actionSubMenu,"val":(1 if self.options["music"] else 0),"scale":0.08, "box":"left","algn":TextNode.ALeft,"extra":["change_opt","music_mute"],"hide":False}, "music_slider":{"type":"slider","pos":(-0.3,0,-0.3),"scale":1,"inter":(0,1),"init":self.options["music_vol"],"pas":0.1, "cmd":self.actionSubMenu,"extra":[None,"change_opt","music_vol"],"orient":DGG.HORIZONTAL,"hide":False}, "sound_vol":{"type":"label","pos":(-1.05,0,-0.5),"scale":0.08,"algn":TextNode.ALeft,"hide":False}, "sound_mute":{"type":"checkbox","pos":(0.3,0,-0.5),"cmd":self.actionSubMenu,"val":(1 if self.options["sounds"] else 0),"scale":0.08, "box":"left","algn":TextNode.ALeft,"extra":["change_opt","sound_mute"],"hide":False}, "sound_slider":{"type":"slider","pos":(-0.3,0,-0.6),"scale":1,"inter":(0,1),"init":self.options["sounds_vol"],"pas":0.1, "cmd":self.actionSubMenu,"extra":[None,"change_opt","sounds_vol"],"orient":DGG.HORIZONTAL,"hide":False}, "maj_verify":{"type":"button","pos":(0.5,0,0.4),"cmd":self.checkMajStarter,"scale":0.08,"algn":TextNode.ALeft,"extra":[],"sound":None,"hide":False,"disabled":False}, "btn_valid":{"type":"button","pos":(-0.9,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["valid_opt"],"sound":None,"hide":False}, "btn_reset":{"type":"button","pos":(-0.5,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["cancel_opt"],"sound":None,"hide":False} } }, "maj_frame":{ "hide":True,"parent":self.app.voile,"elts":{ "maj_stitre":{"type":"label","pos":(0,0,0.4),"scale":0.15,"algn":TextNode.ACenter,"hide":False}, "maj_progress":{"type":"waitbar","pos":(0,0,0),"scale":0.8,"range":4,"val":0,"hide":False}, "maj_err0":{"type":"label","pos":(0,0,0.1),"scale":0.1,"algn":TextNode.ACenter,"hide":True}, "maj_retry":{"type":"button","pos":(-0.3,0,-0.1),"cmd":self.checkMajStarter,"scale":0.08,"algn":TextNode.ACenter,"extra":[],"sound":None,"hide":True}#, # #"maj_cancel":{"type":"button","pos":( # # } } }, "aux_menu":{ "frame":{ "hide":True,"parent":None,"elts":{ "return_btn":{"type":"button","pos":(0,0,-0.8),"cmd":self.actionSubMenu,"scale":0.08,"algn":TextNode.ALeft,"extra":["quit"],"sound":None,"hide":False} } } } } # """ #formulaire de mise à jour tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_cancel"],(0.3,0,-0.1),self.cancelMaj,txtalgn=TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_cancel"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_err1"],(0,0,0.1),0.1,TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_err1"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_nomaj"],(0,0,0.1),0.1,TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_nomaj"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_update"],(0,0,0.1),0.1,TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_update"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_doit"],(-0.3,0,-0.1),self.doMajStarter,txtalgn=TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_doit"] = tmp_gui tmp_gui = self.app.arcWaitBar((0,0,0),0.8,4,0,self.app.speak["option_menu"]["maj_upgrade"]) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_upgrade"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["option_menu"]["maj_success"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_success"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["option_menu"]["maj_quit"],(0,0,-0.4),self.endingMaj,0.11,TextNode.ACenter) tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame2); self.dic_gui["option_menu"]["maj_quit"] = tmp_gui #camp_menu tmp_frame = DirectFrame(); tmp_frame.hide(); self.dic_gui["camp_menu"]["frame"] = tmp_frame tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["stitre"],(-0.8,0,0.7),0.15); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["stitre"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["sel_lab"],(-0.9,0,0.4)); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["sel_lab"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["new_unit"],(0.1,0,0.25)); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["new_unit"] = tmp_gui tmp_gui = self.app.arcEntry((0.2,0,0.1)); tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["entry_unit"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["crea_unit"],(0.9,0,-0.05),self.actionSubMenu, txtalgn=TextNode.ACenter,extraArgs=["launch_game","crea_game"]) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["crea_unit"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["used_name"],(0.15,0,-0.2)); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["used_name"] = tmp_gui; tmp_gui.hide() it = 1; lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04] for elt in self.states["saves_lst"]: pos = None; scale = None if it > 3: pos = lst_pos[2]; scale = lst_scale[2] else: pos = lst_pos[it-1]; scale = lst_scale[it-1] tmp_gui = self.app.arcLabel(elt["name"],pos,scale); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["sav_name_"+str(it)] = tmp_gui if it > 3: tmp_gui.hide() timed = "" if elt["time"] < 60: timed = str(elt["time"])+"s" elif elt["time"] < 3600: timed = str((elt["time"] - elt["time"]%60) / 60)+":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60) elif elt["time"] < 86400: timed = str((elt["time"] - elt["time"]%3600) /3600)+":"+("0" if (elt["time"]%3600) < 600 else "")+str((elt["time"]%3600 - elt["time"]%60)/60) timed += ":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60) else: days = ("days" if self.app.main_config["lang_chx"] == 1 else "jours") timed = str((elt["time"] - elt["time"]%86400)/86400)+" "+days+" "+str((elt["time"]%86400 - elt["time"]%3600)/3600)+":"+("0" if (elt["time"]%3600) < 600 else "") timed += str((elt["time"]%3600 - elt["time"]%60)/60)+":"+("0" if elt["time"]%60 < 10 else "")+str(elt["time"]%60) tmp_gui = self.app.arcLabel(timed,(0.9,0,0.1),txtalgn=TextNode.ARight); tmp_gui.reparentTo(tmp_frame); tmp_gui.hide() self.dic_gui["camp_menu"]["sav_time_"+str(it)] = tmp_gui it += 1 tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["save_import"],(-0.8,0,0.2),self.actionSubMenu,extraArgs=["import_game"]) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_import"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["save_export"],(0.9,0,-0.05),self.actionSubMenu, extraArgs=["export_game"],txtalgn=TextNode.ACenter); tmp_gui.hide(); tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_export"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_unit"],(0.3,0,-0.05),self.actionSubMenu, extraArgs=["supp_game"],txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_unit"] = tmp_gui; tmp_gui.hide() tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["launch"],(-0.3,0,0.2),self.actionSubMenu,extraArgs=["launch_game"]) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["launch"] = tmp_gui #frame d'export tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["export_frame"] = tmp_frame tmp_frame.reparentTo(self.app.voile); tmp_frame.hide() tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_titre"],(0,0,0.5),0.15,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["export_titre"] = tmp_gui tmp_gui = self.app.arcLabel("",(0,0,0.3),0.15,TextNode.ACenter); tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["export_name"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_progress"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["export_progress"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_dupli"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_dupli"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_nowrite"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_nowrite"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["export_success"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["export_success"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["export_return"],(0,0,-.4),self.campaignVoile,txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; tmp_gui.hide(); self.dic_gui["camp_menu"]["export_return"] = tmp_gui #frame d'import tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["import_frame"] = tmp_frame tmp_frame.reparentTo(self.app.voile); tmp_frame.hide() tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_titre"],(0,0,0.5),0.15,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["import_titre"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_progress"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_progress"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_fail"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_fail"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_dupli"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_dupli"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["import_success"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["import_success"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["import_return"],(0,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; tmp_gui.hide(); self.dic_gui["camp_menu"]["import_return"] = tmp_gui #frame de suppression tmp_frame = DirectFrame(); self.dic_gui["camp_menu"]["supp_frame"] = tmp_frame tmp_frame.reparentTo(self.app.voile); tmp_frame.hide() tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_titre"],(0,0,0.5),0.15,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["supp_titre"] = tmp_gui tmp_gui = self.app.arcLabel("test_unit",(0,0,0.3),0.15,TextNode.ACenter); tmp_gui.reparentTo(tmp_frame) self.dic_gui["camp_menu"]["supp_name"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_question"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); self.dic_gui["camp_menu"]["supp_question"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_progress"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["supp_progress"] = tmp_gui tmp_gui = self.app.arcLabel(self.app.speak["camp_menu"]["supp_finish"],(0,0,0),0.1,TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); self.dic_gui["camp_menu"]["supp_finish"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_cancel"],(-0.4,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_cancel"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_valid"],(0.4,0,-0.4),self.suppUnit,txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_valid"] = tmp_gui tmp_gui = self.app.arcButton(self.app.speak["camp_menu"]["supp_return"],(0,0,-0.4),self.campaignVoile,txtalgn=TextNode.ACenter) tmp_gui.reparentTo(tmp_frame); tmp_gui.hide(); tmp_gui["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["supp_return"] = tmp_gui """ # return mainscene_gui def loadmodels(self): #arrows & cards arr_up = render.attachNewNode("main arrow up"); arr_up.setHpr(0,90,0); arr_up.setPos(6.2,1.5,7.3); arr_up.hide() self.app.arrow_mod.instanceTo(arr_up); arr_up.reparentTo(render) arr_up_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_up_crd.node().setIntoCollideMask(BitMask32.bit(1)); arr_up_crd.hide() arr_up_crd.node().setTag("arrow","mainup"); arr_up_crd.reparentTo(self.app.pickly_node); arr_up_crd.setPos(6.2,1.7,7) self.dic_arrows["arrow_up"] = {"node":arr_up,"card":arr_up_crd,"status":0,"posn":[6.2,1.5,7.3],"posh":[6.2,1.7,7.5]} arr_dn = render.attachNewNode("main arrow down"); arr_dn.setHpr(180,-90,0); arr_dn.setPos(6.2,1.5,5); arr_dn.hide() self.app.arrow_mod.instanceTo(arr_dn); arr_dn.reparentTo(render) arr_dn_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_dn_crd.node().setIntoCollideMask(BitMask32.bit(1)); arr_dn_crd.hide() arr_dn_crd.node().setTag("arrow","maindn"); arr_dn_crd.reparentTo(self.app.pickly_node); arr_dn_crd.setPos(6.2,1.7,5.2) self.dic_arrows["arrow_dn"] = {"node":arr_dn,"card":arr_dn_crd,"status":0,"posn":[6.2,1.5,5],"posh":[6.2,1.7,4.8]} arr_camp_up = render.attachNewNode("sub camp arrow up"); arr_camp_up.setHpr(0,90,-90); arr_camp_up.setPos(12,-2.1,7.5) self.app.arrow_mod.instanceTo(arr_camp_up); arr_camp_up.reparentTo(render); arr_camp_up.hide() arr_camp_up_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_camp_up_crd.node().setIntoCollideMask(BitMask32.bit(1)) arr_camp_up_crd.node().setTag("arrow","campup"); arr_camp_up_crd.reparentTo(self.app.pickly_node) arr_camp_up_crd.setPos(12.2,-2.1,7.3); arr_camp_up_crd.setHpr(-90,0,0); arr_camp_up_crd.hide() self.dic_arrows["arrow_camp_up"] = {"node":arr_camp_up,"card":arr_camp_up_crd,"status":0,"posn":[12,-2.1,7.5],"posh":[12,-2.1,7.7]} arr_camp_dn = render.attachNewNode("sub camp arrow down"); arr_camp_dn.setHpr(0,-90,-90); arr_camp_dn.setPos(12,-2.2,1.5) self.app.arrow_mod.instanceTo(arr_camp_dn); arr_camp_dn.reparentTo(render); arr_camp_dn.hide() arr_camp_dn_crd = render.attachNewNode(self.app.card_arrow.generate()); arr_camp_dn_crd.node().setIntoCollideMask(BitMask32.bit(1)) arr_camp_dn_crd.node().setTag("arrow","campdn"); arr_camp_dn_crd.reparentTo(self.app.pickly_node) arr_camp_dn_crd.setPos(12.2,-2.2,1.6); arr_camp_dn_crd.setHpr(-90,0,0); arr_camp_dn_crd.hide() self.dic_arrows["arrow_camp_dn"] = {"node":arr_camp_dn,"card":arr_camp_dn_crd,"status":0,"posn":[12,-2.2,1.5],"posh":[12,-2.2,1.3]} # # TODO : flèches pour le sous-menu "missions" à construire ici # # NOTE : toutes la partie entre ### est temporaire, en attente que le module de parsing fonctionne ### #gates and moving arcs tmp_mod = Actor("mainscene/models/dynamics/main_gates.bam"); tmp_mod.reparentTo(render) tmp_mod.setPos(0,-48.2,9.5); tmp_mod.setHpr(0,80,0); self.dic_dynamics["gates"] = tmp_mod tmp_mod = Actor("mainscene/models/dynamics/main_m_menu.bam"); tmp_mod.reparentTo(render) tmp_mod.pose("load",1); self.dic_dynamics["arcs_main_menu"] = tmp_mod tmp_mod = Actor("mainscene/models/dynamics/main_a_menu.bam"); tmp_mod.reparentTo(render) tmp_mod.pose("load",1); self.dic_dynamics["arcs_aux_menu"] = tmp_mod # #décors additionnels (temporaire) # tmp_mod = base.loader.loadModel("mainscene/models/statics/main_roofs.bam"); tmp_mod.reparentTo(render) tmp_mod.setPos(0,0,0); self.dic_statics["roofs"] = tmp_mod tmp_mod = base.loader.loadModel("mainscene/models/statics/main_arcs_show.bam"); tmp_mod.reparentTo(render) tmp_mod.setPos(0,7.3,3); self.dic_statics["arcs_shower"] = tmp_mod # tmp_mod = base.loader.loadModel("mainscene/models/statics/main_title.bam"); tmp_mod.reparentTo(render) self.dic_statics["arc_title"] = tmp_mod ### # def activeAnim(self): tmp_anim = self.dic_statics["arcs_shower"].hprInterval(5,Point3(360,0,0),startHpr=Point3(0,0,0)) self.dic_anims["arcs_shower_pace"] = Sequence(tmp_anim,name="arcs_shower_pace") self.dic_anims["cam_move_init"] = camera.posInterval(4,Point3(0,-25,12)) self.dic_anims["move_texts"] = None; self.dic_anims["move_saves"] = None self.dic_anims["cam_move_maintosub"] = Parallel(name="main to sub") self.dic_anims["cam_move_maintosub"].append(camera.posInterval(2,Point3(-4,-1,7))) self.dic_anims["cam_move_maintosub"].append(camera.hprInterval(2,Point3(-90,-10,0))) self.dic_anims["cam_move_subtomain"] = Parallel(name="sub to main") self.dic_anims["cam_move_subtomain"].append(camera.posInterval(2,Point3(0,-25,12))) self.dic_anims["cam_move_subtomain"].append(camera.hprInterval(2,Point3(0,-10,0))) self.dic_anims["cam_move_launch"] = Parallel(name="launch the game") self.dic_anims["cam_move_launch"].append(camera.posInterval(4,Point3(0,-62,12))) self.dic_anims["cam_move_launch"].append(camera.hprInterval(2,Point3(0,-10,0))) def mouseTask(self,task): if base.mouseWatcherNode.hasMouse(): mpos = base.mouseWatcherNode.getMouse() self.app.pickerRay.setFromLens(base.camNode,mpos.getX(),mpos.getY()) self.app.mouse_trav.traverse(self.app.pickly_node) if self.app.mouse_hand.getNumEntries() > 0 and self.nomove: tag = self.app.mouse_hand.getEntry(0).getIntoNode().getTag("arrow"); nod = None if self.state == "MainMenu": if tag == "mainup": nod = self.dic_arrows["arrow_up"] elif tag == "maindn": nod = self.dic_arrows["arrow_dn"] elif self.state == "SubMenu": if self.states["main_chx"] == 0: if tag == "campup": nod = self.dic_arrows["arrow_camp_up"] elif tag == "campdn": nod = self.dic_arrows["arrow_camp_dn"] elif self.states["main_chx"] == 1: # # TODO : capture des flèches opur le sous-menu "Missions" # pass if not nod == None: nod["status"] = 2; nod["node"].setPos(nod["posh"][0],nod["posh"][1],nod["posh"][2]) elif self.nomove: for key in self.dic_arrows: if self.dic_arrows[key]["status"] == 2: self.dic_arrows[key]["status"] = 1 self.dic_arrows[key]["node"].setPos(self.dic_arrows[key]["posn"][0],self.dic_arrows[key]["posn"][1],self.dic_arrows[key]["posn"][2]) return task.cont """ **************************** Méthodes pour l'état "Init" **************************** """ def enterInit(self): self.dic_anims["arcs_shower_pace"].loop(); self.dic_dynamics["gates"].play("open_gates") self.task_chx = 0; taskMgr.doMethodLater(6.5,self.initTasks,"cam movement") taskMgr.doMethodLater(9,self.initTasks,"play arcs_main_menu load anim") taskMgr.doMethodLater(11,self.initTasks,"request for the next state") def exitInit(self): pass def initTasks(self,task): if self.task_chx == 0: #moving camera self.dic_anims["cam_move_init"].start(); self.task_chx += 1 elif self.task_chx == 1: #launch arcs_m_menu animation self.dic_dynamics["arcs_main_menu"].play("load"); self.task_chx += 1 elif self.task_chx == 2: self.request("MainMenu") return task.done """ **************************** Méthodes pour l'état "MainMenu" **************************** """ def enterMainMenu(self): self.app.change_cursor("main"); self.dic_gui["main_menu"]["frame"].show(); taskMgr.add(self.reactiveMainMenu,"reactive MainMenu") self.dic_arrows["arrow_up"]["status"] = 1; self.dic_arrows["arrow_dn"]["status"] = 1 self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL def exitMainMenu(self): pass def actionMainMenu(self,value="valid"): if not self.nomove: return if value == "click": if self.dic_arrows["arrow_up"]["status"] == 2: value = "up" elif self.dic_arrows["arrow_dn"]["status"] == 2: value = "down" else: return self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.DISABLED if value == "down" or value == "up": sens = None if value == "down" and self.states["main_chx"] > 0: sens = True elif value == "up" and self.states["main_chx"] < 4: sens = False if sens != None: self.dic_arrows["arrow_up"]["status"] = 1 self.dic_arrows["arrow_up"]["node"].setPos(self.dic_arrows["arrow_up"]["posn"][0] ,self.dic_arrows["arrow_up"]["posn"][1],self.dic_arrows["arrow_up"]["posn"][2]) self.dic_arrows["arrow_dn"]["status"] = 1 self.dic_arrows["arrow_dn"]["node"].setPos(self.dic_arrows["arrow_dn"]["posn"][0] ,self.dic_arrows["arrow_dn"]["posn"][1],self.dic_arrows["arrow_dn"]["posn"][2]) if self.app.main_config["sounds"]: self.dic_sounds["main_menu_switch"].play() self.states["main_chx"] += (-1 if sens else 1) self.dic_arrows["arrow_up"]["node"].hide(); self.dic_arrows["arrow_dn"]["node"].hide() pos_texts = [(-0.41,0,0.31),(-0.35,0,0.22),(-0.26,0,0.1),(-0.19,0,-0.04),(-0.15,0,-0.2), (-0.19,0,-0.34),(-0.26,0,-0.47),(-0.35,0,-0.58),(-0.41,0,-0.66)] scale_texts = [0.05,0.07,0.09,0.1,0.12,0.1,0.09,0.07,0.05] try: self.dic_anims["move_texts"].finish() except: pass self.dic_anims["move_texts"] = None; self.dic_anims["move_texts"] = Parallel(name="MainMenu movement") for it in range(5): tmp_anim = self.dic_gui["main_menu"][self.states["main_lst"][it]].posInterval(0.4,Point3(pos_texts[4-self.states["main_chx"]+it])) self.dic_anims["move_texts"].append(tmp_anim) tmp_anim = self.dic_gui["main_menu"][self.states["main_lst"][it]].scaleInterval(0.4,scale_texts[4-self.states["main_chx"]+it]) self.dic_anims["move_texts"].append(tmp_anim) self.dic_dynamics["arcs_main_menu"].play("state_"+str(self.states["main_chx"]+(1 if sens else -1))+"_"+str(self.states["main_chx"])) self.nomove = False; self.dic_anims["move_texts"].start(); taskMgr.doMethodLater(0.4,self.reactiveMainMenu,"reactive MainMenu") else: self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL elif value == "valid": if self.states["main_chx"] == 1 and len(self.states["saves_lst"]) == 0: return if self.app.main_config["sounds"]: self.dic_sounds["main_menu_sel"].play() self.ignoreAll(); self.accept("escape",sys.exit,[0]); self.nomove = False if self.states["main_chx"] == 4: sys.exit(0) else: self.launchSubMenu() def reactiveMainMenu(self,task): self.nomove = True if self.states["main_chx"] < 4: self.dic_arrows["arrow_up"]["node"].show() if self.states["main_chx"] > 0: self.dic_arrows["arrow_dn"]["node"].show() if self.states["main_chx"] == 1: if len(self.states["saves_lst"]) > 0: self.dic_gui["main_menu"]["mission"]["state"] = DGG.NORMAL else: self.dic_gui["main_menu"][self.states["main_lst"][self.states["main_chx"]]]["state"] = DGG.NORMAL self.accept("mouse1",self. actionMainMenu,["click"]); self.accept("wheel_up",self.actionMainMenu,["up"]) self.accept("wheel_down",self.actionMainMenu,["down"]) self.accept("arrow_up",self.actionMainMenu,["up"]); self.accept("arrow_down",self.actionMainMenu,["down"]) self.accept("enter",self.actionMainMenu,["valid"]) return task.done def launchSubMenu(self): self.app.change_cursor("blank"); self.dic_gui["main_menu"]["frame"].hide() self.dic_arrows["arrow_up"]["status"] = 0; self.dic_arrows["arrow_dn"]["status"] = 0 self.dic_arrows["arrow_up"]["node"].hide(); self.dic_arrows["arrow_dn"]["node"].hide() self.dic_anims["cam_move_maintosub"].start() taskMgr.doMethodLater(1,self.subArcsTask,"anim aux arcs task") taskMgr.doMethodLater(2.5,self.goSubMenuTask,"aff sub menu task") def goSubMenuTask(self,task): self.request("SubMenu"); return task.done """ **************************** Méthodes pour l'état "SubMenu" **************************** """ def enterSubMenu(self): self.app.change_cursor("main"); frame = None; self.accept("escape",self.actionSubMenu,["quit"]) if self.states["main_chx"] == 0: self.dic_arrows["arrow_camp_up"]["status"] = 1; self.dic_arrows["arrow_camp_dn"]["status"] = 1 if self.states["camp_sel"] > 0: self.dic_arrows["arrow_camp_dn"]["node"].show() self.accept("enter",self.actionSubMenu,["launch_game"]) self.accept("arrow_up",self.actionSubMenu,["camp_move","up"]); self.accept("arrow_down",self.actionSubMenu,["camp_move","down"]) self.accept("mouse1",self.actionSubMenu,["camp_move","click"]) self.accept("wheel_up",self.actionSubMenu,["camp_move","up"]); self.accept("wheel_down",self.actionSubMenu,["camp_move","down"]) if len(self.states["saves_lst"]) > 0 and self.states["camp_sel"] != len(self.states["saves_lst"]): self.dic_arrows["arrow_camp_up"]["node"].show() if self.states["camp_sel"] == 0: self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL else: self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.NORMAL self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.NORMAL self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.NORMAL self.dic_gui["camp_menu"]["launch"]["state"] = DGG.NORMAL frame = "camp_menu"; self.nomove = True elif self.states["main_chx"] == 1: # # TODO : self.accept des touches fléchées et de la touche entrée pour le sous menu "Missions" # frame = "mission_menu"; self.nomove = True elif self.states["main_chx"] == 2: frame = "credits_menu" elif self.states["main_chx"] == 3: frame = "option_menu"; self.accept("enter",self.actionSubMenu,["valid_opt"]) self.dic_gui[frame]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show() self.dic_gui["aux_menu"]["return_btn"]["state"] = DGG.NORMAL def exitSubMenu(self): pass def actionSubMenu(self,val1,val2=None,val3=None): if val1 == "quit": if self.app.main_config["sounds"]: self.dic_sounds["main_menu_escape"].play() self.app.change_cursor("blank"); frame = None if self.states["main_chx"] == 0: self.dic_arrows["arrow_camp_up"]["status"] = 0; self.dic_arrows["arrow_camp_dn"]["status"] = 0 self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide() self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["launch"]["state"] = DGG.DISABLED frame = "camp_menu"; self.nomove = False elif self.states["main_chx"] == 1: # # TODO : vérifier qu'il n'y a rien à faire avant de quitter le sous menu "missions" # frame = "mission_menu"; self.nomove = False elif self.states["main_chx"] == 2: frame = "credits_menu" elif self.states["main_chx"] == 3: self.actionSubMenu("cancel_opt"); frame = "option_menu" self.dic_gui[frame]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide() self.ignoreAll(); self.accept("escape",sys.exit,[0]) self.dic_anims["cam_move_subtomain"].start() taskMgr.doMethodLater(1,self.subArcsTask,"anim aux arcs task") taskMgr.doMethodLater(2.5,self.goMainMenuTask,"aff main menu task") self.dic_gui["aux_menu"]["return_btn"]["state"] = DGG.DISABLED elif val1 == "cancel_opt": if self.app.main_config["sounds"]: self.dic_sounds["main_menu_escape"].play() for key in self.options: if key == "fullscreen": if self.options[key][0] != self.app.main_config[key][0]: self.dic_gui["option_menu"]["windowed"]["indicatorValue"] = (0 if self.app.main_config["fullscreen"][0] else 1) self.dic_gui["option_menu"]["windowed"].setIndicatorValue() self.dic_gui["option_menu"]["fullscreen"]["indicatorValue"] = (1 if self.app.main_config["fullscreen"][0] else 0) self.dic_gui["option_menu"]["fullscreen"].setIndicatorValue() else: if self.options[key] != self.app.main_config[key]: if key == "lang_chx": self.dic_gui["option_menu"]["lang_opt"].set(self.app.main_config[key]) elif key == "music": self.dic_gui["option_menu"]["music_mute"]["indicatorValue"] = self.app.main_config[key] self.dic_gui["option_menu"]["music_mute"].setIndicatorValue() elif key == "sounds": self.dic_gui["option_menu"]["sound_mute"]["indicatorValue"] = self.app.main_config[key] self.dic_gui["option_menu"]["sound_mute"].setIndicatorValue() elif key == "music_vol": self.dic_gui["option_menu"]["music_slider"]["value"] = self.app.main_config[key] elif key == "sounds_vol": self.dic_gui["option_menu"]["sound_slider"]["value"] = self.app.main_config[key] for key in self.app.main_config: if key == "fullscreen": self.options[key][0] = self.app.main_config[key][0] else: self.options[key] = self.app.main_config[key] self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED elif val1 == "valid_opt": if self.app.main_config["sounds"]: self.dic_sounds["main_menu_sel"].play() if self.options["lang_chx"] != self.app.main_config["lang_chx"]: old_text = ""; new_text = "" if self.options["lang_chx"] == 0: old_text = "days"; new_text = "jours"; self.app.speak = lang.fr.fr_lang elif self.options["lang_chx"] == 1: old_text = "jours"; new_text = "days"; self.app.speak = lang.en.en_lang for key1 in self.app.speak: for key2 in self.app.speak[key1]: self.dic_gui[key1][key2]["text"] = self.app.speak[key1][key2] for it in range(1,len(self.states["saves_lst"])+1): self.dic_gui["camp_menu"]["sav_time_"+str(it)]["text"] = self.dic_gui["camp_menu"]["sav_time_"+str(it)]["text"].replace(old_text,new_text) # # TODO : ajouter des éléments de traduction certainement présent dans le menu "Mission" # if self.options["music"] != self.app.main_config["music"]: if self.options["music"]: self.dic_music["mainscene_music"].play() else: self.dic_music["mainscene_music"].stop() if self.options["sounds_vol"] != self.app.main_config["sounds_vol"]: for key in self.dic_sounds: self.dic_sounds[key].setVolume(self.options["sounds_vol"]) if self.options["music_vol"] != self.app.main_config["music_vol"]: self.dic_music["mainscene_music"].setVolume(self.options["music_vol"]) for key in self.app.main_config: if key == "fullscreen": self.app.main_config[key][0] = self.options[key][0] else: self.app.main_config[key] = self.options[key] mcf = open("arcns_config.json","w"); mcf.write(json.dumps(self.options)); mcf.close() self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED elif val2 == "change_opt": if val3 == "win": pass elif val3 == "lang": if val1 == "Français": self.options["lang_chx"] = 0 elif val1 == "English": self.options["lang_chx"] = 1 elif val3 == "music_mute": self.options["music"] = bool(val1) elif val3 == "sound_mute": self.options["sounds"] = bool(val1) elif val3 == "music_vol": self.options["music_vol"] = int(self.dic_gui["option_menu"]["music_slider"]["value"]*100)/100.0 elif val3 == "sounds_vol": self.options["sounds_vol"] = int(self.dic_gui["option_menu"]["sound_slider"]["value"]*100)/100.0 self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.DISABLED self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.DISABLED for key in self.options: if not self.options[key] == self.app.main_config[key]: self.dic_gui["option_menu"]["btn_valid"]["state"] = DGG.NORMAL self.dic_gui["option_menu"]["btn_reset"]["state"] = DGG.NORMAL break elif val1 == "launch_game": sav = None; self.dic_gui["camp_menu"]["used_name"].hide() if val2 == "crea_game" or self.states["camp_sel"] == 0: name = self.dic_gui["camp_menu"]["entry_unit"].get() if name == "": return if not exists("arcns_saves"): os.mkdir("arcns_saves") elif len(self.states["saves_lst"]) > 0: for elt in self.states["saves_lst"]: if elt["name"] == name: self.dic_gui["camp_menu"]["used_name"].show(); return dte = time.strftime("%d-%m-%Y_%H%M%S",time.localtime()) sav = {"name":name,"crea_date":dte,"time":0,"saved_place":"firstbase","init":0} fsav = open("arcns_saves/"+dte+".sav","w"); fsav.write(json.dumps(sav)); fsav.close() else: sav = self.states["saves_lst"][self.states["camp_sel"]-1] self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide() self.dic_gui["aux_menu"]["frame"].hide() self.ignoreAll(); self.nomove = False; self.dic_gui["camp_menu"]["frame"].hide(); self.launchGame(sav) elif val1 == "camp_move": if not self.nomove: return if val2 == "click": if self.dic_arrows["arrow_camp_up"]["status"] == 2: val2 = "up" elif self.dic_arrows["arrow_camp_dn"]["status"] == 2: val2 = "down" else: return if val2 == "up": if len(self.states["saves_lst"]) == self.states["camp_sel"]: return if self.states["camp_sel"] == 0: self.dic_gui["camp_menu"]["new_unit"].hide(); self.dic_gui["camp_menu"]["entry_unit"].hide() self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["crea_unit"].hide(); self.dic_gui["camp_menu"]["used_name"].hide() else: self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].hide() self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].hide() self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["supp_unit"].hide() self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED self.dic_anims["move_saves"] = None; self.dic_anims["move_saves"] = Parallel(name="saves movement") lst_pos = [(0.1,0,0.25),(0.4,0,-0.3),(0.6,0,-0.45)]; lst_scale = [0.08,0.07,0.06] it = 0; self.states["camp_sel"] += 1 while (self.states["camp_sel"]+it) <= len(self.states["saves_lst"]) and it < 3: tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].posInterval(0.2,Point3(lst_pos[it])) self.dic_anims["move_saves"].append(tmp_anim) tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].scaleInterval(0.2,lst_scale[it]) self.dic_anims["move_saves"].append(tmp_anim) it += 1 if self.states["camp_sel"]+3 <= len(self.states["saves_lst"]): self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+3)].show() elif val2 == "down": if self.states["camp_sel"] == 0: return self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].hide() self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["supp_unit"].hide() self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED self.dic_anims["move_saves"] = None; self.dic_anims["move_saves"] = Parallel(name="saves movement") lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04]; it = 0 while (self.states["camp_sel"]+it) <= len(self.states["saves_lst"]) and it < 3: tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].posInterval(0.2,Point3(lst_pos[it])) self.dic_anims["move_saves"].append(tmp_anim) tmp_anim = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].scaleInterval(0.2,lst_scale[it]) self.dic_anims["move_saves"].append(tmp_anim) it += 1 self.states["camp_sel"] -= 1 if self.states["camp_sel"]+4 <= len(self.states["saves_lst"]): self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+4)].hide() self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["launch"]["state"] = DGG.DISABLED self.ignore("enter"); self.dic_arrows["arrow_camp_up"]["node"].hide(); self.dic_arrows["arrow_camp_dn"]["node"].hide() self.dic_arrows["arrow_camp_up"]["status"] = 1; self.dic_arrows["arrow_camp_dn"]["status"] = 1 self.dic_arrows["arrow_camp_up"]["node"].setPos(self.dic_arrows["arrow_camp_up"]["posn"][0], self.dic_arrows["arrow_camp_up"]["posn"][1],self.dic_arrows["arrow_camp_up"]["posn"][2]) self.dic_arrows["arrow_camp_dn"]["node"].setPos(self.dic_arrows["arrow_camp_dn"]["posn"][0], self.dic_arrows["arrow_camp_dn"]["posn"][1],self.dic_arrows["arrow_camp_dn"]["posn"][2]) if self.app.main_config["sounds"]: self.dic_sounds["main_menu_switch"].play() self.nomove = False; self.dic_anims["move_saves"].start() # # TODO : animation du décors (valable uniquement avec le nouveau décors) # taskMgr.doMethodLater(0.2,self.reactiveCampaign,"reactive campaign interactions") elif val1 == "export_game": root = Tkinter.Tk(); root.withdraw(); path = tkFileDialog.askdirectory() if path != "": exp_save = self.states["saves_lst"][self.states["camp_sel"]-1] fln = "/arcns_export_("+exp_save["name"]+").sav" self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["frame"].hide(); self.nomove = False self.dic_gui["aux_menu"]["frame"].hide(); self.dic_gui["camp_menu"]["export_frame"].show() self.dic_gui["camp_menu"]["export_name"]["text"] = exp_save["name"] if exists(path+fln): self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide() self.dic_gui["camp_menu"]["export_dupli"].show() self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show() return try: fexp = open(path+fln,"w"); fexp.write(json.dumps(exp_save)); fexp.close() self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide() self.dic_gui["camp_menu"]["export_success"].show() self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show() except Exception,e: print e; self.accept("enter",self.campaignVoile); self.dic_gui["camp_menu"]["export_progress"].hide() self.dic_gui["camp_menu"]["export_nowrite"].show() self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["export_return"].show() elif val1 == "import_game": root = Tkinter.Tk(); root.withdraw(); path = tkFileDialog.askopenfilename(filetypes=[("Saves","*.sav"),("All","*")]) if path != "": self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["frame"].hide(); self.nomove = False self.dic_gui["aux_menu"]["frame"].hide(); self.dic_gui["camp_menu"]["import_frame"].show() try: import_save = json.loads("".join([line.rstrip().lstrip() for line in file(path,"rb")])) for elt in self.states["saves_lst"]: if elt["name"] == import_save["name"]: self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_dupli"].show() self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show() self.accept("enter",self.campaignVoile); return lst_pos = [(0.4,0,-0.3),(0.6,0,-0.45),(0.8,0,-0.6)]; lst_scale = [0.07,0.06,0.04]; act_pos = None; act_scale = None if self.states["camp_sel"] == len(self.states["saves_lst"]): act_pos = lst_pos[0]; act_scale = lst_scale[0] elif self.states["camp_sel"]+1 == len(self.states["saves_lst"]): act_pos = lst_pos[1]; act_scale = lst_scale[1] else: act_pos = lst_pos[2]; act_scale = lst_scale[2] tmp_gui = self.app.arcLabel(import_save["name"],act_pos,act_scale); tmp_gui.reparentTo(self.dic_gui["camp_menu"]["frame"]) self.dic_gui["camp_menu"]["sav_name_"+str(len(self.states["saves_lst"])+1)] = tmp_gui if self.states["camp_sel"]+2 < len(self.states["saves_lst"]): tmp_gui.hide() timed = "" if import_save["time"] < 60: timed = str(import_save["time"])+"s" elif import_save["time"] < 3600: timed = str((import_save["time"] - import_save["time"]%60) / 60)+":" timed += ("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60) elif import_save["time"] < 86400: timed = str((import_save["time"] - import_save["time"]%3600) /3600)+":"+("0" if (import_save["time"]%3600) < 600 else "") timed += str((import_save["time"]%3600 - import_save["time"]%60)/60) timed +":"+("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60) else: days = ("days" if self.app.main_config["lang_chx"] == 1 else "jours") timed = str((import_save["time"] - import_save["time"]%86400)/86400)+" "+days+" " timed += str((import_save["time"]%86400 - import_save["time"]%3600)/3600)+":"+("0" if (import_save["time"]%3600) < 600 else "") timed += str((import_save["time"]%3600 - import_save["time"]%60)/60)+":" timed += ("0" if import_save["time"]%60 < 10 else "")+str(import_save["time"]%60) tmp_gui = self.app.arcLabel(timed,(0.9,0,0.1),txtalgn=TextNode.ARight); tmp_gui.reparentTo(self.dic_gui["camp_menu"]["frame"]) tmp_gui.hide(); self.dic_gui["camp_menu"]["sav_time_"+str(len(self.states["saves_lst"])+1)] = tmp_gui if not import_save.has_key("ori_date"): import_save["ori_date"] = import_save["crea_date"] import_save["crea_date"] = time.strftime("%d-%m-%Y_%H%M%S",time.localtime()) fli = open("arcns_saves/"+import_save["crea_date"]+".sav","w"); fli.write(json.dumps(import_save)); fli.close() self.states["saves_lst"].append(import_save); self.accept("enter",self.campaignVoile) self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_success"].show() self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show() except Exception,e: print e; self.accept("enter",self.campaignVoile) self.dic_gui["camp_menu"]["import_progress"].hide(); self.dic_gui["camp_menu"]["import_fail"].show() self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["import_return"].show() elif val1 == "supp_game": self.ignoreAll(); self.app.voile.show(); self.dic_gui["camp_menu"]["supp_frame"].show() self.dic_gui["camp_menu"]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide() self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.NORMAL self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.NORMAL self.accept("enter",self.suppUnit); self.accept("escape",self.campaignVoile) self.nomove = False # # TODO : reste des actions pour le sous-menu "Missions" à définir ici # print "val1 : "+str(val1) print "val2 : "+str(val2) print "val3 : "+str(val3) # def reactiveCampaign(self,task): if self.states["camp_sel"] > 0: self.dic_arrows["arrow_camp_dn"]["node"].show() if self.states["camp_sel"] < len(self.states["saves_lst"]): self.dic_arrows["arrow_camp_up"]["node"].show() self.accept("enter",self.actionSubMenu,["launch_game"]) if self.states["camp_sel"] == 0: self.dic_gui["camp_menu"]["new_unit"].show(); self.dic_gui["camp_menu"]["entry_unit"].show() self.dic_gui["camp_menu"]["crea_unit"].show(); self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL else: self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].show() self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show() self.dic_gui["camp_menu"]["save_export"].show(); self.dic_gui["camp_menu"]["supp_unit"].show() self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.NORMAL self.dic_gui["camp_menu"]["save_import"].show(); self.dic_gui["camp_menu"]["launch"].show() self.dic_gui["camp_menu"]["save_import"]["state"] = DGG.NORMAL; self.dic_gui["camp_menu"]["launch"]["state"] = DGG.NORMAL self.nomove = True; return task.done def campaignVoile(self): self.app.voile.hide() #nettoyage de la frame d'export self.dic_gui["camp_menu"]["export_frame"].hide(); self.dic_gui["camp_menu"]["export_name"]["text"] = "" self.dic_gui["camp_menu"]["export_progress"].show(); self.dic_gui["camp_menu"]["export_dupli"].hide() self.dic_gui["camp_menu"]["export_nowrite"].hide(); self.dic_gui["camp_menu"]["export_success"].hide() self.dic_gui["camp_menu"]["export_return"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["export_return"].hide() #nettoyage de la frame d'import self.dic_gui["camp_menu"]["import_frame"].hide(); self.dic_gui["camp_menu"]["import_progress"].show() self.dic_gui["camp_menu"]["import_fail"].hide(); self.dic_gui["camp_menu"]["import_dupli"].hide() self.dic_gui["camp_menu"]["import_success"].hide() self.dic_gui["camp_menu"]["import_return"]["state"] = DGG.DISABLED; self.dic_gui["camp_menu"]["import_return"].hide() #nettoyage de la frame de suppression self.dic_gui["camp_menu"]["supp_frame"].hide(); self.dic_gui["camp_menu"]["supp_name"]["text"] = "" self.dic_gui["camp_menu"]["supp_question"].show() self.dic_gui["camp_menu"]["supp_progress"].hide(); self.dic_gui["camp_menu"]["supp_finish"].hide() self.dic_gui["camp_menu"]["supp_return"].hide(); self.dic_gui["camp_menu"]["supp_return"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_cancel"].show(); self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_valid"].show(); self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.DISABLED #remise en place du menu "Campagne" self.dic_gui["camp_menu"]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show() self.accept("escape",self.actionSubMenu,["quit"]); self.accept("enter",self.actionSubMenu,["launch_game"]) self.accept("arrow_up",self.actionSubMenu,["camp_move","up"]); self.accept("arrow_down",self.actionSubMenu,["camp_move","down"]) self.accept("mouse1",self.actionSubMenu,["camp_move","click"]) self.accept("wheel_up",self.actionSubMenu,["camp_move","up"]); self.accept("wheel_down",self.actionSubMenu,["camp_move","down"]) self.nomove = True def suppUnit(self): self.ignoreAll() self.dic_gui["camp_menu"]["supp_cancel"].hide(); self.dic_gui["camp_menu"]["supp_cancel"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_valid"].hide(); self.dic_gui["camp_menu"]["supp_valid"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_question"].hide(); self.dic_gui["camp_menu"]["supp_progress"].show() os.unlink("arcns_saves/"+self.states["saves_lst"][self.states["camp_sel"]-1]["crea_date"]+".sav") self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].removeNode() self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].removeNode() if len(self.states["saves_lst"]) == 1: self.dic_gui["camp_menu"]["new_unit"].show(); self.dic_gui["camp_menu"]["entry_unit"].show() self.dic_gui["camp_menu"]["save_export"].hide(); self.dic_gui["camp_menu"]["save_export"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["supp_unit"].hide(); self.dic_gui["camp_menu"]["supp_unit"]["state"] = DGG.DISABLED self.dic_gui["camp_menu"]["crea_unit"].show(); self.dic_gui["camp_menu"]["crea_unit"]["state"] = DGG.NORMAL self.states["camp_sel"] = 0 elif self.states["camp_sel"] == len(self.states["saves_lst"]): self.states["camp_sel"] -= 1 self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"])].show() self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show() else: it = 0; lst_pos = [(0.1,0,0.25),(0.4,0,-0.3),(0.6,0,-0.45)]; lst_scale = [0.08,0.07,0.06] while (self.states["camp_sel"]+it) < len(self.states["saves_lst"]): self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)] = self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it+1)] if it < 3: self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].setPos(lst_pos[it][0],lst_pos[it][1],lst_pos[it][2]) self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].setScale(lst_scale[it]) elif it == 3: self.dic_gui["camp_menu"]["sav_name_"+str(self.states["camp_sel"]+it)].show() self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"]+it)] = self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"]+it+1)] it += 1 self.dic_gui["camp_menu"]["sav_time_"+str(self.states["camp_sel"])].show() del self.dic_gui["camp_menu"]["sav_name_"+str(len(self.states["saves_lst"]))] del self.dic_gui["camp_menu"]["sav_time_"+str(len(self.states["saves_lst"]))] del self.states["saves_lst"][self.states["camp_sel"]-1] self.dic_gui["camp_menu"]["supp_progress"].hide(); self.dic_gui["camp_menu"]["supp_finish"].show() self.dic_gui["camp_menu"]["supp_return"].show(); self.dic_gui["camp_menu"]["supp_return"]["state"] = DGG.NORMAL self.accept("enter",self.campaignVoile) def checkMajStarter(self): self.dic_gui["option_menu"]["maj_success"].hide(); self.dic_gui["option_menu"]["maj_quit"].hide() self.dic_gui["option_menu"]["maj_retry"].hide(); self.dic_gui["option_menu"]["maj_retry"]["command"] = self.checkMajStarter self.dic_gui["option_menu"]["maj_err0"].hide(); self.dic_gui["option_menu"]["maj_err1"].hide() self.dic_gui["option_menu"]["maj_nomaj"].hide(); self.dic_gui["option_menu"]["maj_update"].hide() self.dic_gui["option_menu"]["maj_doit"].hide(); self.dic_gui["option_menu"]["maj_upgrade"].hide() self.dic_gui["option_menu"]["maj_cancel"].hide(); self.dic_gui["option_menu"]["maj_progress"].show() self.dic_gui["option_menu"]["frame"].hide(); self.dic_gui["aux_menu"]["frame"].hide() self.app.voile.show(); self.ignoreAll(); self.dic_gui["option_menu"]["maj_frame"].show() self.app.change_cursor("blank"); self.dic_gui["option_menu"]["maj_progress"]["value"] = 0 taskMgr.doMethodLater(0.1,self.majTask,"check maj task") def doMajStarter(self): self.dic_gui["option_menu"]["maj_cancel"].hide() self.dic_gui["option_menu"]["maj_retry"].hide(); self.dic_gui["option_menu"]["maj_retry"]["command"] = self.doMajStarter self.dic_gui["option_menu"]["maj_update"].hide(); self.dic_gui["option_menu"]["maj_doit"].hide() self.dic_gui["option_menu"]["maj_err0"].hide(); self.dic_gui["option_menu"]["maj_err1"].hide() lst = listdir("arcns_tmp") for elt in lst: if not elt == "arcns_multifiles.json": os.unlink("arcns_tmp/"+elt) self.dic_gui["option_menu"]["maj_progress"]["value"] = 5; self.dic_gui["option_menu"]["maj_upgrade"].show() self.app.change_cursor("blank"); self.ignoreAll(); self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 0 taskMgr.doMethodLater(0.1,self.majTask,"do maj task") def majTask(self,task): if self.dic_gui["option_menu"]["maj_progress"]["value"] == 0: if not exists("arcns_tmp"): try: os.mkdir("arcns_tmp") except Exception,e: print e; self.labelMaj(); return task.done self.dic_gui["option_menu"]["maj_progress"]["value"] = 1; return task.again elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 1: try: urllib.urlretrieve("http://www.arcns.net/arcns_multifiles.json","arcns_tmp/arcns_multifiles.json") except Exception,e: print e; self.labelMaj(); return task.done self.dic_gui["option_menu"]["maj_progress"]["value"] = 2; return task.again elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 2: try: self.tmp_multifiles = json.loads("".join([line.rstrip().lstrip() for line in file("arcns_tmp/arcns_multifiles.json","rb")])) except Exception,e: print e; self.labelMaj(); return task.done self.dic_gui["option_menu"]["maj_progress"]["value"] = 3; return task.again elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 3: for key in self.app.arcns_multifiles: if self.app.arcns_multifiles[key] < self.tmp_multifiles[key]: self.dic_gui["option_menu"]["maj_progress"]["value"] = 4 else: del self.tmp_multifiles[key] self.labelMaj(); return task.done elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 0: try: for key in self.tmp_multifiles: fln = "patch_"+key+"_r"+str(self.app.arcns_multifiles[key])+"_r"+str(self.tmp_multifiles[key])+".mf" urllib.urlretrieve("http://www.arcns.net/patchs/"+fln,"arcns_tmp/"+fln) self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 1; return task.again except Exception,e: print e; self.labelMaj(); return task.done elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 1: try: for key in self.tmp_multifiles: shutil.copy("arcns_mfs/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf","arcns_tmp") self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 2; return task.again except Exception,e: print e; self.labelMaj(); return task.done elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 2: try: p = Patchfile(); m = Multifile() for key in self.tmp_multifiles: fln = "patch_"+key+"_r"+str(self.app.arcns_multifiles[key])+"_r"+str(self.tmp_multifiles[key]) m.openRead("arcns_tmp/"+fln+".mf"); m.extractSubfile(0,"arcns_tmp/"+fln+".patch"); m.close() rtn = p.apply(Filename("arcns_tmp/"+fln+".patch"),Filename("arcns_tmp/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf")) if not rtn: self.labelMaj(); return task.done else: os.rename("arcns_tmp/"+key+"_r"+str(self.app.arcns_multifiles[key])+".mf","arcns_tmp/"+key+"_r"+str(self.tmp_multifiles[key])+".mf") self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 3; return task.again except Exception,e: print e; self.labelMaj(); return task.done elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 3: try: for key in self.tmp_multifiles: shutil.copy("arcns_tmp/"+key+"_r"+str(self.tmp_multifiles[key])+".mf","arcns_mfs") self.dic_gui["option_menu"]["maj_upgrade"]["value"] = 4; return task.again except Exception,e: print e; self.labelMaj(); return task.done elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 4: for key in self.tmp_multifiles: oldnb = self.app.arcns_multifiles[key]; self.app.arcns_multifiles[key] = self.tmp_multifiles[key] mcm = open("arcns_multifiles.json","w"); mcm.write(json.dumps(self.app.arcns_multifiles)); mcm.close() self.tmp_multifiles[key] = oldnb; os.unlink("arcns_mfs/"+key+"_r"+str(oldnb)+".mf") self.dic_gui["option_menu"]["maj_success"].show(); self.dic_gui["option_menu"]["maj_quit"].show() self.dic_gui["option_menu"]["maj_upgrade"].hide(); self.accept("enter",self.endingMaj) self.app.change_cursor("main"); return task.done def labelMaj(self): self.app.change_cursor("main"); self.accept("escape",self.cancelMaj); val_btn = "retry" self.accept("enter",(self.checkMajStarter if self.dic_gui["option_menu"]["maj_progress"] == 5 else self.doMajStarter)) self.dic_gui["option_menu"]["maj_progress"].hide(); self.dic_gui["option_menu"]["maj_upgrade"].hide() if self.dic_gui["option_menu"]["maj_progress"]["value"] == 0: self.dic_gui["option_menu"]["maj_err0"].show() elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 1: self.dic_gui["option_menu"]["maj_err1"].show() elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 2: self.dic_gui["option_menu"]["maj_err1"].show() elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 3: self.dic_gui["option_menu"]["maj_nomaj"].show() elif self.dic_gui["option_menu"]["maj_progress"]["value"] == 4: val_btn = "doit"; self.dic_gui["option_menu"]["maj_update"].show() elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 0: self.dic_gui["option_menu"]["maj_err1"].show() elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 1: self.dic_gui["option_menu"]["maj_err0"].show() elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 2: self.dic_gui["option_menu"]["maj_err0"].show() elif self.dic_gui["option_menu"]["maj_upgrade"]["value"] == 3: self.dic_gui["option_menu"]["maj_err0"].show() self.dic_gui["option_menu"]["maj_cancel"].show(); self.dic_gui["option_menu"]["maj_"+val_btn].show() def cancelMaj(self): if exists("arcns_tmp"): lst = listdir("arcns_tmp") for elt in lst: os.unlink("arcns_tmp/"+elt) os.rmdir("arcns_tmp") self.app.voile.hide(); self.ignoreAll(); self.dic_gui["option_menu"]["maj_frame"].hide() self.dic_gui["option_menu"]["frame"].show(); self.dic_gui["aux_menu"]["frame"].show() self.accept("enter",self.actionSubMenu,["valid_opt"]); self.accept("escape",self.actionSubMenu,["quit"]) def endingMaj(self): executable = sys.executable; args = sys.argv[:]; args.insert(0, sys.executable); os.execvp(executable, args) def goMainMenuTask(self,task): self.request("MainMenu"); return task.done def subArcsTask(self,task): if self.state == "MainMenu": self.dic_dynamics["arcs_aux_menu"].play("load") elif self.state == "SubMenu": self.dic_dynamics["arcs_aux_menu"].play("unload") return task.done def initGameTask(self,task): self.dic_dynamics["gates"].play("close_gates"); return task.done """ **************************** Méthodes pour la sortie du menu principal **************************** """ def launchGame(self,sav,options=None): self.app.change_cursor("blank"); self.ignoreAll(); self.accept("escape",sys.exit,[0]) self.app.transit = {}; self.app.transit["save"] = sav; self.app.transit["place"] = sav["saved_place"] # # TODO : cas du sous-menu "Missions" # self.dic_dynamics["arcs_aux_menu"].play("unload"); self.dic_anims["cam_move_launch"].start() taskMgr.doMethodLater(3.5,self.initGameTask,"close gates"); taskMgr.doMethodLater(10,self.app.game_screen,"launching the game") def close(self): self.ignoreAll(); taskMgr.remove(self.mouse_task); self.mouse_task = None self.states = None for key in self.dic_anims: try: self.dic_anims[key].finish() except: pass self.dic_anims[key] = None for key in self.dic_lights: render.clearLight(self.dic_lights[key]); self.dic_lights[key].removeNode() for key1 in self.dic_gui: for key2 in self.dic_gui[key1]: for t in self.dic_gui[key1][key2].options(): if t[0] == "command": self.dic_gui[key1][key2]["command"] = None; break self.dic_gui[key1][key2].removeNode() for key in self.dic_arrows: self.dic_arrows[key]["node"].removeNode(); self.dic_arrows[key]["card"].removeNode() for key in self.dic_statics: self.dic_statics[key].removeNode() for key in self.dic_dynamics: self.dic_dynamics[key].delete() for key in self.dic_sounds: self.dic_sounds[key].stop(); self.dic_sounds[key] = None for key in self.dic_musics: self.dic_musics[key].stop(); self.dic_musics[key] = None self.dic_statics = None; self.dic_dynamics = None; self.dic_anims = None self.dic_sounds = None; self.dic_musics = None self.vers_txt.removeNode() # DEBUG : cette fonction n'aura plus d'utilité une fois le code de la scène terminé def __del__(self): print "delete mainscene" ###
lgpl-3.0
nextgis/NextGIS_QGIS_open
python/plugins/processing/algs/gdal/ogr2ogr.py
5
4067
# -*- coding: utf-8 -*- """ *************************************************************************** ogr2ogr.py --------------------- Date : November 2012 Copyright : (C) 2012 by Victor Olaya Email : volayaf at gmail dot com *************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * *************************************************************************** """ __author__ = 'Victor Olaya' __date__ = 'November 2012' __copyright__ = '(C) 2012, Victor Olaya' # This will get replaced with a git SHA1 when you do a git archive __revision__ = '$Format:%H$' import os from processing.core.parameters import ParameterVector from processing.core.parameters import ParameterString from processing.core.parameters import ParameterSelection from processing.core.outputs import OutputVector from processing.tools.system import isWindows from processing.algs.gdal.OgrAlgorithm import OgrAlgorithm from processing.algs.gdal.GdalUtils import GdalUtils FORMATS = [ 'ESRI Shapefile', 'GeoJSON', 'GeoRSS', 'SQLite', 'GMT', 'MapInfo File', 'INTERLIS 1', 'INTERLIS 2', 'GML', 'Geoconcept', 'DXF', 'DGN', 'CSV', 'BNA', 'S57', 'KML', 'GPX', 'PGDump', 'GPSTrackMaker', 'ODS', 'XLSX', 'PDF', ] EXTS = [ '.shp', '.geojson', '.xml', '.sqlite', '.gmt', '.tab', '.ili', '.ili', '.gml', '.txt', '.dxf', '.dgn', '.csv', '.bna', '.000', '.kml', '.gpx', '.pgdump', '.gtm', '.ods', '.xlsx', '.pdf', ] class Ogr2Ogr(OgrAlgorithm): OUTPUT_LAYER = 'OUTPUT_LAYER' INPUT_LAYER = 'INPUT_LAYER' FORMAT = 'FORMAT' OPTIONS = 'OPTIONS' def defineCharacteristics(self): self.name = 'Convert format' self.group = '[OGR] Conversion' self.addParameter(ParameterVector(self.INPUT_LAYER, self.tr('Input layer'), [ParameterVector.VECTOR_TYPE_ANY], False)) self.addParameter(ParameterSelection(self.FORMAT, self.tr('Destination Format'), FORMATS)) self.addParameter(ParameterString(self.OPTIONS, self.tr('Creation options'), '', optional=True)) self.addOutput(OutputVector(self.OUTPUT_LAYER, self.tr('Output layer'))) def processAlgorithm(self, progress): inLayer = self.getParameterValue(self.INPUT_LAYER) ogrLayer = self.ogrConnectionString(inLayer)[1:-1] output = self.getOutputFromName(self.OUTPUT_LAYER) outFile = output.value formatIdx = self.getParameterValue(self.FORMAT) outFormat = FORMATS[formatIdx] ext = EXTS[formatIdx] if not outFile.endswith(ext): outFile += ext output.value = outFile output = self.ogrConnectionString(outFile) options = unicode(self.getParameterValue(self.OPTIONS)) if outFormat == 'SQLite' and os.path.isfile(output): os.remove(output) arguments = [] arguments.append('-f') arguments.append(outFormat) if len(options) > 0: arguments.append(options) arguments.append(output) arguments.append(ogrLayer) arguments.append(self.ogrLayerName(inLayer)) commands = [] if isWindows(): commands = ['cmd.exe', '/C ', 'ogr2ogr.exe', GdalUtils.escapeAndJoin(arguments)] else: commands = ['ogr2ogr', GdalUtils.escapeAndJoin(arguments)] GdalUtils.runGdal(commands, progress)
gpl-2.0
studywolf/pydmps
pydmps/dmp_rhythmic.py
1
5004
""" Copyright (C) 2013 Travis DeWolf This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from pydmps.dmp import DMPs import numpy as np class DMPs_rhythmic(DMPs): """An implementation of discrete DMPs""" def __init__(self, **kwargs): """ """ # call super class constructor super(DMPs_rhythmic, self).__init__(pattern="rhythmic", **kwargs) self.gen_centers() # set variance of Gaussian basis functions # trial and error to find this spacing self.h = np.ones(self.n_bfs) * self.n_bfs # 1.75 self.check_offset() def gen_centers(self): """Set the centre of the Gaussian basis functions be spaced evenly throughout run time""" c = np.linspace(0, 2 * np.pi, self.n_bfs + 1) c = c[0:-1] self.c = c def gen_front_term(self, x, dmp_num): """Generates the front term on the forcing term. For rhythmic DMPs it's non-diminishing, so this function is just a placeholder to return 1. x float: the current value of the canonical system dmp_num int: the index of the current dmp """ if isinstance(x, np.ndarray): return np.ones(x.shape) return 1 def gen_goal(self, y_des): """Generate the goal for path imitation. For rhythmic DMPs the goal is the average of the desired trajectory. y_des np.array: the desired trajectory to follow """ goal = np.zeros(self.n_dmps) for n in range(self.n_dmps): num_idx = ~np.isnan(y_des[n]) # ignore nan's when calculating goal goal[n] = 0.5 * (y_des[n, num_idx].min() + y_des[n, num_idx].max()) return goal def gen_psi(self, x): """Generates the activity of the basis functions for a given canonical system state or path. x float, array: the canonical system state or path """ if isinstance(x, np.ndarray): x = x[:, None] return np.exp(self.h * (np.cos(x - self.c) - 1)) def gen_weights(self, f_target): """Generate a set of weights over the basis functions such that the target forcing term trajectory is matched. f_target np.array: the desired forcing term trajectory """ # calculate x and psi x_track = self.cs.rollout() psi_track = self.gen_psi(x_track) # efficiently calculate BF weights using weighted linear regression for d in range(self.n_dmps): for b in range(self.n_bfs): self.w[d, b] = np.dot(psi_track[:, b], f_target[:, d]) / ( np.sum(psi_track[:, b]) + 1e-10 ) # ============================== # Test code # ============================== if __name__ == "__main__": import matplotlib.pyplot as plt # test normal run dmp = DMPs_rhythmic(n_dmps=1, n_bfs=10, w=np.zeros((1, 10))) y_track, dy_track, ddy_track = dmp.rollout() plt.figure(1, figsize=(6, 3)) plt.plot(np.ones(len(y_track)) * dmp.goal, "r--", lw=2) plt.plot(y_track, lw=2) plt.title("DMP system - no forcing term") plt.xlabel("time (ms)") plt.ylabel("system trajectory") plt.legend(["goal", "system state"], loc="lower right") plt.tight_layout() # test imitation of path run plt.figure(2, figsize=(6, 4)) n_bfs = [10, 30, 50, 100, 10000] # a straight line to target path1 = np.sin(np.arange(0, 2 * np.pi, 0.01) * 5) # a strange path to target path2 = np.zeros(path1.shape) path2[int(len(path2) / 2.0) :] = 0.5 for ii, bfs in enumerate(n_bfs): dmp = DMPs_rhythmic(n_dmps=2, n_bfs=bfs) dmp.imitate_path(y_des=np.array([path1, path2])) y_track, dy_track, ddy_track = dmp.rollout() plt.figure(2) plt.subplot(211) plt.plot(y_track[:, 0], lw=2) plt.subplot(212) plt.plot(y_track[:, 1], lw=2) plt.subplot(211) a = plt.plot(path1, "r--", lw=2) plt.title("DMP imitate path") plt.xlabel("time (ms)") plt.ylabel("system trajectory") plt.legend([a[0]], ["desired path"], loc="lower right") plt.subplot(212) b = plt.plot(path2, "r--", lw=2) plt.title("DMP imitate path") plt.xlabel("time (ms)") plt.ylabel("system trajectory") plt.legend(["%i BFs" % i for i in n_bfs], loc="lower right") plt.tight_layout() plt.show()
gpl-3.0
idrogeno/enigma2
lib/python/Components/NimManager.py
1
85000
from boxbranding import getBoxType from time import localtime, mktime from datetime import datetime import xml.etree.cElementTree from os import path from enigma import eDVBSatelliteEquipmentControl as secClass, \ eDVBSatelliteLNBParameters as lnbParam, \ eDVBSatelliteDiseqcParameters as diseqcParam, \ eDVBSatelliteSwitchParameters as switchParam, \ eDVBSatelliteRotorParameters as rotorParam, \ eDVBResourceManager, eDVBDB, eEnv, iDVBFrontend from Tools.HardwareInfo import HardwareInfo from Tools.BoundFunction import boundFunction from Components.About import about from config import config, ConfigSubsection, ConfigSelection, ConfigFloat, ConfigSatlist, ConfigYesNo, ConfigInteger, ConfigSubList, ConfigNothing, ConfigSubDict, ConfigOnOff, ConfigDateTime, ConfigText maxFixedLnbPositions = 0 # LNB65 3601 All satellites 1 (USALS) # LNB66 3602 All satellites 2 (USALS) # LNB67 3603 All satellites 3 (USALS) # LNB68 3604 All satellites 4 (USALS) # LNB69 3605 Selecting satellites 1 (USALS) # LNB70 3606 Selecting satellites 2 (USALS) MAX_LNB_WILDCARDS = 6 MAX_ORBITPOSITION_WILDCARDS = 6 #magic numbers ORBITPOSITION_LIMIT = 3600 def getConfigSatlist(orbpos, satlist): default_orbpos = None for x in satlist: if x[0] == orbpos: default_orbpos = orbpos break return ConfigSatlist(satlist, default_orbpos) class SecConfigure: def getConfiguredSats(self): return self.configuredSatellites def addSatellite(self, sec, orbpos): sec.addSatellite(orbpos) self.configuredSatellites.add(orbpos) def addLNBSimple(self, sec, slotid, diseqcmode, toneburstmode = diseqcParam.NO, diseqcpos = diseqcParam.SENDNO, orbpos = 0, longitude = 0, latitude = 0, loDirection = 0, laDirection = 0, turningSpeed = rotorParam.FAST, useInputPower=True, inputPowerDelta=50, fastDiSEqC = False, setVoltageTone = True, diseqc13V = False, CircularLNB = False): if orbpos is None or orbpos == 3600 or orbpos == 3601: return #simple defaults if sec.addLNB(): print "No space left on m_lnbs (mac No. 144 LNBs exceeded)" return tunermask = 1 << slotid if self.equal.has_key(slotid): for slot in self.equal[slotid]: tunermask |= (1 << slot) if self.linked.has_key(slotid): for slot in self.linked[slotid]: tunermask |= (1 << slot) sec.setLNBSatCR(-1) sec.setLNBSatCRTuningAlgo(0) sec.setLNBSatCRpositionnumber(1) sec.setLNBLOFL(CircularLNB and 10750000 or 9750000) sec.setLNBLOFH(CircularLNB and 10750000 or 10600000) sec.setLNBThreshold(CircularLNB and 10750000 or 11700000) sec.setLNBIncreasedVoltage(False) sec.setRepeats(0) sec.setFastDiSEqC(fastDiSEqC) sec.setSeqRepeat(False) sec.setCommandOrder(0) #user values sec.setDiSEqCMode(3 if diseqcmode == 4 else diseqcmode) sec.setToneburst(toneburstmode) sec.setCommittedCommand(diseqcpos) sec.setUncommittedCommand(0) # SENDNO if 0 <= diseqcmode < 3: self.addSatellite(sec, orbpos) if setVoltageTone: if diseqc13V: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) sec.setToneMode(switchParam.HILO) else: # noinspection PyProtectedMember sec.setVoltageMode(switchParam._14V) sec.setToneMode(switchParam.OFF) elif 3 <= diseqcmode < 5: # diseqc 1.2 if self.satposdepends.has_key(slotid): for slot in self.satposdepends[slotid]: tunermask |= (1 << slot) sec.setLatitude(latitude) sec.setLaDirection(laDirection) sec.setLongitude(longitude) sec.setLoDirection(loDirection) sec.setUseInputpower(useInputPower) sec.setInputpowerDelta(inputPowerDelta) sec.setRotorTurningSpeed(turningSpeed) user_satList = self.NimManager.satList if diseqcmode == 4: user_satList = [] if orbpos and isinstance(orbpos, str): for user_sat in self.NimManager.satList: if str(user_sat[0]) in orbpos: user_satList.append(user_sat) for x in user_satList: print "Add sat " + str(x[0]) self.addSatellite(sec, int(x[0])) if diseqc13V: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) sec.setToneMode(switchParam.HILO) sec.setRotorPosNum(0) # USALS sec.setLNBSlotMask(tunermask) def setSatposDepends(self, sec, nim1, nim2): print "tuner", nim1, "depends on satpos of", nim2 sec.setTunerDepends(nim1, nim2) def linkInternally(self, slotid): nim = self.NimManager.getNim(slotid) if nim.internallyConnectableTo is not None: nim.setInternalLink() def linkNIMs(self, sec, nim1, nim2): print "link tuner", nim1, "to tuner", nim2 # for internally connect tuner A to B if getBoxType() == 'vusolo2' or nim2 == (nim1 - 1): self.linkInternally(nim1) sec.setTunerLinked(nim1, nim2) def getRoot(self, slotid, connto): visited = [] while self.NimManager.getNimConfig(connto).configMode.value in ("satposdepends", "equal", "loopthrough"): connto = int(self.NimManager.getNimConfig(connto).connectedTo.value) if connto in visited: # prevent endless loop return slotid visited.append(connto) return connto def update(self): sec = secClass.getInstance() self.configuredSatellites = set() for slotid in self.NimManager.getNimListOfType("DVB-S"): if self.NimManager.nimInternallyConnectableTo(slotid) is not None: self.NimManager.nimRemoveInternalLink(slotid) sec.clear() ## this do unlinking NIMs too !! print "sec config cleared" self.linked = { } self.satposdepends = { } self.equal = { } nim_slots = self.NimManager.nim_slots used_nim_slots = [ ] for slot in nim_slots: if slot.type is not None: used_nim_slots.append((slot.slot, slot.description, slot.config.configMode.value != "nothing" and True or False, slot.isCompatible("DVB-S2"), slot.frontend_id is None and -1 or slot.frontend_id)) eDVBResourceManager.getInstance().setFrontendSlotInformations(used_nim_slots) try: for slot in nim_slots: if slot.frontend_id is not None: types = [type for type in ["DVB-C", "DVB-T", "DVB-T2", "DVB-S", "DVB-S2", "ATSC"] if eDVBResourceManager.getInstance().frontendIsCompatible(slot.frontend_id, type)] if "DVB-T2" in types: # DVB-T2 implies DVB-T support types.remove("DVB-T") if "DVB-S2" in types: # DVB-S2 implies DVB-S support types.remove("DVB-S") if len(types) > 1: slot.multi_type = {} for type in types: slot.multi_type[str(types.index(type))] = type except: pass for slot in nim_slots: x = slot.slot nim = slot.config if slot.isCompatible("DVB-S"): # save what nim we link to/are equal to/satposdepends to. # this is stored in the *value* (not index!) of the config list if nim.configMode.value == "equal": connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.equal.has_key(connto): self.equal[connto] = [] self.equal[connto].append(x) elif nim.configMode.value == "loopthrough": self.linkNIMs(sec, x, int(nim.connectedTo.value)) connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.linked.has_key(connto): self.linked[connto] = [] self.linked[connto].append(x) elif nim.configMode.value == "satposdepends": self.setSatposDepends(sec, x, int(nim.connectedTo.value)) connto = self.getRoot(x, int(nim.connectedTo.value)) if not self.satposdepends.has_key(connto): self.satposdepends[connto] = [] self.satposdepends[connto].append(x) for slot in nim_slots: x = slot.slot nim = slot.config hw = HardwareInfo() if slot.isCompatible("DVB-S"): print "slot: " + str(x) + " configmode: " + str(nim.configMode.value) if nim.configMode.value in ( "loopthrough", "satposdepends", "nothing" ): pass else: sec.setSlotNotLinked(x) if nim.configMode.value == "equal": pass elif nim.configMode.value == "simple": #simple config print "diseqcmode: ", nim.diseqcMode.value if nim.diseqcMode.value == "single": #single currentCircular = False if nim.diseqcA.value in ("360", "560"): currentCircular = nim.simpleDiSEqCSetCircularLNB.value if nim.simpleSingleSendDiSEqC.value: self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular) else: self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.NONE, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value, CircularLNB = currentCircular) elif nim.diseqcMode.value == "toneburst_a_b": #Toneburst A/B self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.A, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.B, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.SENDNO, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value == "diseqc_a_b": #DiSEqC A/B fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value == "diseqc_a_b_c_d": #DiSEqC A/B/C/D fastDiSEqC = nim.simpleDiSEqCOnlyOnSatChange.value setVoltageTone = nim.simpleDiSEqCSetVoltageTone.value self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcA.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcB.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.AB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcC.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BA, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) self.addLNBSimple(sec, slotid = x, orbpos = nim.diseqcD.orbital_position, toneburstmode = diseqcParam.NO, diseqcmode = diseqcParam.V1_0, diseqcpos = diseqcParam.BB, fastDiSEqC = fastDiSEqC, setVoltageTone = setVoltageTone, diseqc13V = nim.diseqc13V.value) elif nim.diseqcMode.value in ("positioner", "positioner_select"): #Positioner current_mode = 3 sat = 0 if nim.diseqcMode.value == "positioner_select": current_mode = 4 sat = nim.userSatellitesList.value if nim.latitudeOrientation.value == "north": laValue = rotorParam.NORTH else: laValue = rotorParam.SOUTH if nim.longitudeOrientation.value == "east": loValue = rotorParam.EAST else: loValue = rotorParam.WEST inputPowerDelta=nim.powerThreshold.value useInputPower=False turning_speed=0 if nim.powerMeasurement.value: useInputPower=True turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW } if turn_speed_dict.has_key(nim.turningSpeed.value): turning_speed = turn_speed_dict[nim.turningSpeed.value] else: beg_time = localtime(nim.fastTurningBegin.value) end_time = localtime(nim.fastTurningEnd.value) turning_speed = ((beg_time.tm_hour+1) * 60 + beg_time.tm_min + 1) << 16 turning_speed |= (end_time.tm_hour+1) * 60 + end_time.tm_min + 1 self.addLNBSimple(sec, slotid = x, diseqcmode = current_mode, orbpos = sat, longitude = nim.longitude.float, loDirection = loValue, latitude = nim.latitude.float, laDirection = laValue, turningSpeed = turning_speed, useInputPower = useInputPower, inputPowerDelta = inputPowerDelta, diseqc13V = nim.diseqc13V.value) elif nim.configMode.value == "advanced": #advanced config self.updateAdvanced(sec, x) print "sec config completed" def updateAdvanced(self, sec, slotid): try: if config.Nims[slotid].advanced.unicableconnected is not None: if config.Nims[slotid].advanced.unicableconnected.value: config.Nims[slotid].advanced.unicableconnectedTo.save_forced = True self.linkNIMs(sec, slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value)) connto = self.getRoot(slotid, int(config.Nims[slotid].advanced.unicableconnectedTo.value)) if not self.linked.has_key(connto): self.linked[connto] = [] self.linked[connto].append(slotid) else: config.Nims[slotid].advanced.unicableconnectedTo.save_forced = False except: pass lnbSat = {} for x in range(1, 71): lnbSat[x] = [] #wildcard for all satellites ( for rotor ) for x in range(3601, 3605): lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value) if lnb != 0: for x in self.NimManager.satList: print "add", x[0], "to", lnb lnbSat[lnb].append(x[0]) #wildcard for user satellites ( for rotor ) for x in range(3605, 3607): lnb = int(config.Nims[slotid].advanced.sat[x].lnb.value) if lnb != 0: for user_sat in self.NimManager.satList: if str(user_sat[0]) in config.Nims[slotid].advanced.sat[x].userSatellitesList.value: print "add", user_sat[0], "to", lnb lnbSat[lnb].append(user_sat[0]) for x in self.NimManager.satList: lnb = int(config.Nims[slotid].advanced.sat[x[0]].lnb.value) if lnb != 0: print "add", x[0], "to", lnb lnbSat[lnb].append(x[0]) for x in range(1, 71): if len(lnbSat[x]) > 0: currLnb = config.Nims[slotid].advanced.lnb[x] if sec.addLNB(): print "No space left on m_lnbs (max No. 144 LNBs exceeded)" return posnum = 1; #default if LNB movable if x <= maxFixedLnbPositions: posnum = x; sec.setLNBSatCRpositionnumber(x) # LNB has fixed Position else: sec.setLNBSatCRpositionnumber(0) # or not (movable LNB) tunermask = 1 << slotid if self.equal.has_key(slotid): for slot in self.equal[slotid]: tunermask |= (1 << slot) if self.linked.has_key(slotid): for slot in self.linked[slotid]: tunermask |= (1 << slot) if currLnb.lof.value != "unicable": sec.setLNBSatCR(-1) sec.setLNBSatCRTuningAlgo(0) if currLnb.lof.value == "universal_lnb": sec.setLNBLOFL(9750000) sec.setLNBLOFH(10600000) sec.setLNBThreshold(11700000) elif currLnb.lof.value == "unicable": def setupUnicable(configManufacturer, ProductDict): manufacturer_name = configManufacturer.value manufacturer = ProductDict[manufacturer_name] product_name = manufacturer.product.value if product_name == "None" and manufacturer.product.saved_value != "None": product_name = manufacturer.product.value = manufacturer.product.saved_value manufacturer_scr = manufacturer.scr manufacturer_positions_value = manufacturer.positions[product_name][0].value position_idx = (posnum - 1) % manufacturer_positions_value if product_name in manufacturer_scr: diction = manufacturer.diction[product_name].value positionsoffset = manufacturer.positionsoffset[product_name][0].value if diction !="EN50607" or ((posnum <= (positionsoffset + manufacturer_positions_value) and (posnum > positionsoffset) and x <= maxFixedLnbPositions)): #for every allowed position sec.setLNBSatCRformat(diction =="EN50607" and 1 or 0) sec.setLNBSatCR(manufacturer_scr[product_name].index) sec.setLNBSatCRvco(manufacturer.vco[product_name][manufacturer_scr[product_name].index].value*1000) sec.setLNBSatCRpositions(manufacturer_positions_value) sec.setLNBLOFL(manufacturer.lofl[product_name][position_idx].value * 1000) sec.setLNBLOFH(manufacturer.lofh[product_name][position_idx].value * 1000) sec.setLNBThreshold(manufacturer.loft[product_name][position_idx].value * 1000) sec.setLNBSatCRTuningAlgo(currLnb.unicableTuningAlgo.value == "reliable" and 1 or 0) configManufacturer.save_forced = True manufacturer.product.save_forced = True manufacturer.vco[product_name][manufacturer_scr[product_name].index].save_forced = True else: #positionnumber out of range print "positionnumber out of range" else: print "no product in list" if currLnb.unicable.value == "unicable_user": #TODO satpositions for satcruser if currLnb.dictionuser.value == "EN50607": sec.setLNBSatCRformat(1) sec.setLNBSatCR(currLnb.satcruserEN50607.index) sec.setLNBSatCRvco(currLnb.satcrvcouserEN50607[currLnb.satcruserEN50607.index].value*1000) else: sec.setLNBSatCRformat(0) sec.setLNBSatCR(currLnb.satcruserEN50494.index) sec.setLNBSatCRvco(currLnb.satcrvcouserEN50494[currLnb.satcruserEN50494.index].value*1000) sec.setLNBLOFL(currLnb.lofl.value * 1000) sec.setLNBLOFH(currLnb.lofh.value * 1000) sec.setLNBThreshold(currLnb.threshold.value * 1000) sec.setLNBSatCRpositions(64) elif currLnb.unicable.value == "unicable_matrix": self.reconstructUnicableDate(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix, currLnb) setupUnicable(currLnb.unicableMatrixManufacturer, currLnb.unicableMatrix) elif currLnb.unicable.value == "unicable_lnb": self.reconstructUnicableDate(currLnb.unicableLnbManufacturer, currLnb.unicableLnb, currLnb) setupUnicable(currLnb.unicableLnbManufacturer, currLnb.unicableLnb) elif currLnb.lof.value == "c_band": sec.setLNBLOFL(5150000) sec.setLNBLOFH(5150000) sec.setLNBThreshold(5150000) elif currLnb.lof.value == "user_defined": sec.setLNBLOFL(currLnb.lofl.value * 1000) sec.setLNBLOFH(currLnb.lofh.value * 1000) sec.setLNBThreshold(currLnb.threshold.value * 1000) elif currLnb.lof.value == "circular_lnb": sec.setLNBLOFL(10750000) sec.setLNBLOFH(10750000) sec.setLNBThreshold(10750000) if currLnb.increased_voltage.value: sec.setLNBIncreasedVoltage(True) else: sec.setLNBIncreasedVoltage(False) dm = currLnb.diseqcMode.value if dm == "none": sec.setDiSEqCMode(diseqcParam.NONE) elif dm == "1_0": sec.setDiSEqCMode(diseqcParam.V1_0) elif dm == "1_1": sec.setDiSEqCMode(diseqcParam.V1_1) elif dm == "1_2": sec.setDiSEqCMode(diseqcParam.V1_2) if self.satposdepends.has_key(slotid): for slot in self.satposdepends[slotid]: tunermask |= (1 << slot) if dm != "none": if currLnb.toneburst.value == "none": sec.setToneburst(diseqcParam.NO) elif currLnb.toneburst.value == "A": sec.setToneburst(diseqcParam.A) elif currLnb.toneburst.value == "B": sec.setToneburst(diseqcParam.B) # Committed Diseqc Command cdc = currLnb.commitedDiseqcCommand.value c = { "none": diseqcParam.SENDNO, "AA": diseqcParam.AA, "AB": diseqcParam.AB, "BA": diseqcParam.BA, "BB": diseqcParam.BB } if c.has_key(cdc): sec.setCommittedCommand(c[cdc]) else: sec.setCommittedCommand(long(cdc)) sec.setFastDiSEqC(currLnb.fastDiseqc.value) sec.setSeqRepeat(currLnb.sequenceRepeat.value) if currLnb.diseqcMode.value == "1_0": currCO = currLnb.commandOrder1_0.value sec.setRepeats(0) else: currCO = currLnb.commandOrder.value udc = int(currLnb.uncommittedDiseqcCommand.value) if udc > 0: sec.setUncommittedCommand(0xF0|(udc-1)) else: sec.setUncommittedCommand(0) # SENDNO sec.setRepeats({"none": 0, "one": 1, "two": 2, "three": 3}[currLnb.diseqcRepeats.value]) setCommandOrder = False # 0 "committed, toneburst", # 1 "toneburst, committed", # 2 "committed, uncommitted, toneburst", # 3 "toneburst, committed, uncommitted", # 4 "uncommitted, committed, toneburst" # 5 "toneburst, uncommitted, commmitted" order_map = {"ct": 0, "tc": 1, "cut": 2, "tcu": 3, "uct": 4, "tuc": 5} sec.setCommandOrder(order_map[currCO]) if dm == "1_2": latitude = currLnb.latitude.float sec.setLatitude(latitude) longitude = currLnb.longitude.float sec.setLongitude(longitude) if currLnb.latitudeOrientation.value == "north": sec.setLaDirection(rotorParam.NORTH) else: sec.setLaDirection(rotorParam.SOUTH) if currLnb.longitudeOrientation.value == "east": sec.setLoDirection(rotorParam.EAST) else: sec.setLoDirection(rotorParam.WEST) if currLnb.powerMeasurement.value: sec.setUseInputpower(True) sec.setInputpowerDelta(currLnb.powerThreshold.value) turn_speed_dict = { "fast": rotorParam.FAST, "slow": rotorParam.SLOW } if turn_speed_dict.has_key(currLnb.turningSpeed.value): turning_speed = turn_speed_dict[currLnb.turningSpeed.value] else: beg_time = localtime(currLnb.fastTurningBegin.value) end_time = localtime(currLnb.fastTurningEnd.value) turning_speed = ((beg_time.tm_hour + 1) * 60 + beg_time.tm_min + 1) << 16 turning_speed |= (end_time.tm_hour + 1) * 60 + end_time.tm_min + 1 sec.setRotorTurningSpeed(turning_speed) else: sec.setUseInputpower(False) sec.setLNBSlotMask(tunermask) sec.setLNBPrio(int(currLnb.prio.value)) # finally add the orbital positions for y in lnbSat[x]: self.addSatellite(sec, y) if x > maxFixedLnbPositions: satpos = x > maxFixedLnbPositions and (3606-(70 - x)) or y else: satpos = y currSat = config.Nims[slotid].advanced.sat[satpos] if currSat.voltage.value == "polarization": if config.Nims[slotid].diseqc13V.value: sec.setVoltageMode(switchParam.HV_13) else: sec.setVoltageMode(switchParam.HV) elif currSat.voltage.value == "13V": # noinspection PyProtectedMember sec.setVoltageMode(switchParam._14V) elif currSat.voltage.value == "18V": # noinspection PyProtectedMember sec.setVoltageMode(switchParam._18V) if currSat.tonemode.value == "band": sec.setToneMode(switchParam.HILO) elif currSat.tonemode.value == "on": sec.setToneMode(switchParam.ON) elif currSat.tonemode.value == "off": sec.setToneMode(switchParam.OFF) if not currSat.usals.value and x <= maxFixedLnbPositions: sec.setRotorPosNum(currSat.rotorposition.value) else: sec.setRotorPosNum(0) #USALS def reconstructUnicableDate(self, configManufacturer, ProductDict, currLnb): val = currLnb.content.stored_values if currLnb.unicable.value == "unicable_lnb": ManufacturerName = val.get('unicableLnbManufacturer', 'none') SDict = val.get('unicableLnb', None) elif currLnb.unicable.value == "unicable_matrix": ManufacturerName = val.get('unicableMatrixManufacturer', 'none') SDict = val.get('unicableMatrix', None) else: return # print "[reconstructUnicableDate] SDict %s" % SDict if SDict is None: return print "ManufacturerName %s" % ManufacturerName PDict = SDict.get(ManufacturerName, None) #dict contained last stored device data if PDict is None: return PN = PDict.get('product', None) #product name if PN is None: return if ManufacturerName in ProductDict.keys(): # manufacture are listed, use its ConfigSubsection tmp = ProductDict[ManufacturerName] if PN in tmp.product.choices.choices: return else: #if manufacture not in list, then generate new ConfigSubsection print "[reconstructUnicableDate] Manufacturer %s not in unicable.xml" % ManufacturerName tmp = ConfigSubsection() tmp.scr = ConfigSubDict() tmp.vco = ConfigSubDict() tmp.lofl = ConfigSubDict() tmp.lofh = ConfigSubDict() tmp.loft = ConfigSubDict() tmp.diction = ConfigSubDict() tmp.product = ConfigSelection(choices = [], default = None) if PN not in tmp.product.choices.choices: print "[reconstructUnicableDate] Product %s not in unicable.xml" % PN scrlist = [] SatCR = int(PDict.get('scr', {PN,1}).get(PN,1)) - 1 vco = int(PDict.get('vco', {PN,0}).get(PN,0).get(str(SatCR),1)) positionslist=[1,(9750, 10600, 11700)] ##adenin_todo positions = int(positionslist[0]) tmp.positions = ConfigSubDict() tmp.positions[PN] = ConfigSubList() tmp.positions[PN].append(ConfigInteger(default=positions, limits = (positions, positions))) tmp.vco[PN] = ConfigSubList() for cnt in range(0,SatCR + 1): vcofreq = (cnt == SatCR) and vco or 0 # equivalent to vcofreq = (cnt == SatCR) ? 1432 : 0 if vcofreq == 0 : scrlist.append(("%d" %(cnt+1),"SCR %d " %(cnt+1) +_("not used"))) else: scrlist.append(("%d" %(cnt+1),"SCR %d" %(cnt+1))) print "vcofreq %d" % vcofreq tmp.vco[PN].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq))) tmp.scr[PN] = ConfigSelection(choices = scrlist, default = scrlist[SatCR][0]) tmp.lofl[PN] = ConfigSubList() tmp.lofh[PN] = ConfigSubList() tmp.loft[PN] = ConfigSubList() for cnt in range(1,positions+1): lofl = int(positionslist[cnt][0]) lofh = int(positionslist[cnt][1]) loft = int(positionslist[cnt][2]) tmp.lofl[PN].append(ConfigInteger(default=lofl, limits = (lofl, lofl))) tmp.lofh[PN].append(ConfigInteger(default=lofh, limits = (lofh, lofh))) tmp.loft[PN].append(ConfigInteger(default=loft, limits = (loft, loft))) dictionlist = [("EN50494", "Unicable(EN50494)")] ##adenin_todo tmp.diction[PN] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0]) tmp.product.choices.choices.append(PN) tmp.product.choices.default = PN tmp.scr[PN].save_forced = True tmp.scr.save_forced = True tmp.vco.save_forced = True tmp.product.save_forced = True ProductDict[ManufacturerName] = tmp if ManufacturerName not in configManufacturer.choices.choices: #check if name in choices list configManufacturer.choices.choices.append(ManufacturerName) #add name to choises list def __init__(self, nimmgr): self.NimManager = nimmgr self.configuredSatellites = set() self.update() class NIM(object): def __init__(self, slot, type, description, has_outputs=True, internally_connectable=None, multi_type=None, frontend_id=None, i2c=None, is_empty=False, input_name = None): if not multi_type: multi_type = {} self.slot = slot if type not in ("DVB-S", "DVB-C", "DVB-T", "DVB-S2", "DVB-T2", "DVB-C2", "ATSC", None): print "warning: unknown NIM type %s, not using." % type type = None self.type = type self.description = description self.has_outputs = has_outputs self.internally_connectable = internally_connectable self.multi_type = multi_type self.i2c = i2c self.frontend_id = frontend_id self.__is_empty = is_empty self.input_name = input_name self.compatible = { None: (None,), "DVB-S": ("DVB-S", None), "DVB-C": ("DVB-C", None), "DVB-T": ("DVB-T", None), "DVB-S2": ("DVB-S", "DVB-S2", None), "DVB-C2": ("DVB-C", "DVB-C2", None), "DVB-T2": ("DVB-T", "DVB-T2", None), "ATSC": ("ATSC", None), } def isCompatible(self, what): if not self.isSupported(): return False return what in self.compatible[self.getType()] def canBeCompatible(self, what): if not self.isSupported(): return False if self.isCompatible(what): return True for type in self.multi_type.values(): if what in self.compatible[type]: return True return False def getType(self): try: if self.isMultiType(): return self.multi_type[self.config.multiType.value] except: pass return self.type def connectableTo(self): connectable = { "DVB-S": ("DVB-S", "DVB-S2"), "DVB-C": ("DVB-C", "DVB-C2"), "DVB-T": ("DVB-T","DVB-T2"), "DVB-S2": ("DVB-S", "DVB-S2"), "DVB-C2": ("DVB-C", "DVB-C2"), "DVB-T2": ("DVB-T", "DVB-T2"), "ATSC": "ATSC", } return connectable[self.getType()] def getSlotInputName(self): name = self.input_name if name is None: name = chr(ord('A') + self.slot) return name slot_input_name = property(getSlotInputName) def getSlotName(self): # get a friendly description for a slot name. # we name them "Tuner A/B/C/...", because that's what's usually written on the back # of the device. # for DM7080HD "Tuner A1/A2/B/C/..." descr = _("Tuner ") return descr + self.getSlotInputName() slot_name = property(getSlotName) def getSlotID(self): return chr(ord('A') + self.slot) def getI2C(self): return self.i2c def hasOutputs(self): return self.has_outputs def internallyConnectableTo(self): return self.internally_connectable def setInternalLink(self): if self.internally_connectable is not None: print "setting internal link on frontend id", self.frontend_id f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w") f.write("internal") f.close() def removeInternalLink(self): if self.internally_connectable is not None: print "removing internal link on frontend id", self.frontend_id f = open("/proc/stb/frontend/%d/rf_switch" % self.frontend_id, "w") f.write("external") f.close() def isMultiType(self): return len(self.multi_type) > 0 def isEmpty(self): return self.__is_empty # empty tuners are supported! def isSupported(self): return (self.frontend_id is not None) or self.__is_empty def isMultistream(self): multistream = self.frontend_id and eDVBResourceManager.getInstance().frontendIsMultistream(self.frontend_id) or False # HACK due to poor support for VTUNER_SET_FE_INFO # When vtuner does not accept fe_info we have to fallback to detection using tuner name # More tuner names will be added when confirmed as multistream (FE_CAN_MULTISTREAM) if not multistream and "TBS" in self.description: multistream = True return multistream # returns dict {<slotid>: <type>} def getMultiTypeList(self): return self.multi_type slot_id = property(getSlotID) def getFriendlyType(self): return { "DVB-S": "DVB-S", "DVB-T": "DVB-T", "DVB-C": "DVB-C", "DVB-S2": "DVB-S2", "DVB-T2": "DVB-T2", "DVB-C2": "DVB-C2", "ATSC": "ATSC", None: _("empty") }[self.getType()] friendly_type = property(getFriendlyType) def getFriendlyFullDescription(self): nim_text = self.slot_name + ": " if self.empty: nim_text += _("(empty)") elif not self.isSupported(): nim_text += self.description + " (" + _("not supported") + ")" else: nim_text += self.description + " (" + self.friendly_type + ")" return nim_text friendly_full_description = property(getFriendlyFullDescription) config_mode = property(lambda self: config.Nims[self.slot].configMode.value) config = property(lambda self: config.Nims[self.slot]) empty = property(lambda self: self.getType is None) class NimManager: def getConfiguredSats(self): return self.sec.getConfiguredSats() def getTransponders(self, pos): if self.transponders.has_key(pos): return self.transponders[pos] else: return [] def getTranspondersCable(self, nim): nimConfig = config.Nims[nim] if nimConfig.configMode.value != "nothing" and nimConfig.cable.scan_type.value == "provider": return self.transponderscable[self.cablesList[nimConfig.cable.scan_provider.index][0]] return [ ] def getTranspondersTerrestrial(self, region): return self.transpondersterrestrial[region] def getCableDescription(self, nim): return self.cablesList[config.Nims[nim].scan_provider.index][0] def getCableFlags(self, nim): return self.cablesList[config.Nims[nim].scan_provider.index][1] def getTerrestrialDescription(self, nim): return self.terrestrialsList[config.Nims[nim].terrestrial.index][0] def getTerrestrialFlags(self, nim): return self.terrestrialsList[config.Nims[nim].terrestrial.index][1] def getSatDescription(self, pos): return self.satellites[pos] def sortFunc(self, x): orbpos = x[0] if orbpos > 1800: return orbpos - 3600 else: return orbpos + 1800 def readTransponders(self): self.satellites = { } self.transponders = { } self.transponderscable = { } self.transpondersterrestrial = { } self.transpondersatsc = { } db = eDVBDB.getInstance() if self.hasNimType("DVB-S"): print "Reading satellites.xml" if db.readSatellites(self.satList, self.satellites, self.transponders): self.satList.sort() # sort by orbpos else: #satellites.xml not found or corrupted from Tools.Notifications import AddPopup from Screens.MessageBox import MessageBox def emergencyAid(): if not path.exists("/etc/enigma2/lamedb"): print "/etc/enigma2/lamedb not found" return None f = file("/etc/enigma2/lamedb","r") lamedb = f.readlines() f.close() if lamedb[0].find("/3/") != -1: version = 3 elif lamedb[0].find("/4/") != -1: version = 4 else: print "unknown lamedb version: ",lamedb[0] return False print "import version %d" % version collect = False transponders = [] tp = [] for line in lamedb: if line == "transponders\n": collect = True continue if line == "end\n": break if collect: data = line.strip().split(":") if data[0] == "/": transponders.append(tp) tp = [] else: tp.append(data) t1 = ("namespace","tsid","onid") t2_sv3 = ("frequency", "symbol_rate", "polarization", "fec_inner", "position", "inversion", "system", "modulation", "rolloff", "pilot", ) t2_sv4 = ("frequency", "symbol_rate", "polarization", "fec_inner", "position", "inversion", "flags", "system", "modulation", "rolloff", "pilot" ) tplist = [] for x in transponders: tp = {} if len(x[0]) > len(t1): continue freq = x[1][0].split() if len(freq) != 2: continue x[1][0] = freq[1] if freq[0] == "s" or freq[0] == "S": if ((version == 3) and len(x[1]) > len(t2_sv3)) or ((version == 4) and len(x[1]) > len(t2_sv4)): continue for y in range(0, len(x[0])): tp.update({t1[y]:x[0][y]}) for y in range(0, len(x[1])): if version == 3: tp.update({t2_sv3[y]:x[1][y]}) elif version == 4: tp.update({t2_sv4[y]:x[1][y]}) if ((int(tp.get("namespace"),16) >> 16) & 0xFFF) != int(tp.get("position")): print "Namespace %s and Position %s are not identical"% (tp.get("namespace"), tp.get("position")) continue if version >= 4: tp.update({"supposition":((int(tp.get("namespace","0"),16) >> 24) & 0x0F)}) elif freq[0] == "c" or freq[0] == "C": print "DVB-C" continue elif freq[0] == "t" or freq[0] == "T": print "DVB-T" continue tplist.append(tp) satDict = {} for tp in tplist: freq = int(tp.get("frequency",0)) if freq: tmp_sat = satDict.get(int(tp.get("position")),{}) tmp_tp = self.transponders.get(int(tp.get("position")),[]) sat_pos = int(tp.get("position")) fake_sat_pos = int(tp.get("position")) if sat_pos > 1800: sat_pos -= 1800 dir = 'W' else: dir = 'E' if freq >= 10000000 and freq <= 13000000: fake_sat_pos = sat_pos tmp_sat.update({'name':'%3.1f%c Ku-band satellite' %(sat_pos/10.0, dir)}) #tmp_sat.update({"band":"Ku"}) if freq >= 3000000 and freq <= 4000000: fake_sat_pos = sat_pos + 1 tmp_sat.update({'name':'%3.1f%c C-band satellite' %(sat_pos/10.0, dir)}) #tmp_sat.update({"band":"C"}) if freq >= 17000000 and freq <= 23000000: fake_sat_pos = sat_pos + 2 tmp_sat.update({'name':'%3.1f%c Ka-band satellite' %(sat_pos/10.0, dir)}) #tmp_sat.update({"band":"Ka"}) tmp_tp.append(( 0, #??? int(tp.get("frequency",0)), int(tp.get("symbol_rate",0)), int(tp.get("polarization",0)), int(tp.get("fec_inner",0)), int(tp.get("system",0)), int(tp.get("modulation",0)), int(tp.get("inversion",0)), int(tp.get("rolloff",0)), int(tp.get("pilot",0)), -1, #tsid -1 -> any tsid are valid -1 #onid -1 -> any tsid are valid )) tmp_sat.update({'flags':int(tp.get("flags"))}) satDict.update({fake_sat_pos:tmp_sat}) self.transponders.update({fake_sat_pos:tmp_tp}) for sat_pos in satDict: self.satellites.update({sat_pos: satDict.get(sat_pos).get('name')}) self.satList.append((sat_pos, satDict.get(sat_pos).get('name'), satDict.get(sat_pos).get('flags'))) return True AddPopup(_("satellites.xml not found or corrupted!\nIt is possible to watch TV,\nbut it's not possible to search for new TV channels\nor to configure tuner settings"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "SatellitesLoadFailed") if not emergencyAid(): AddPopup(_("resoring satellites.xml not posibel!"), type = MessageBox.TYPE_ERROR, timeout = 0, id = "SatellitesLoadFailed") return if self.hasNimType("DVB-C") or self.hasNimType("DVB-T") or self.hasNimType("DVB-T2"): print "Reading cables.xml" db.readCables(self.cablesList, self.transponderscable) print "Reading terrestrial.xml" db.readTerrestrials(self.terrestrialsList, self.transpondersterrestrial) def enumerateNIMs(self): # enum available NIMs. This is currently very dreambox-centric and uses the /proc/bus/nim_sockets interface. # the result will be stored into nim_slots. # the content of /proc/bus/nim_sockets looks like: # NIM Socket 0: # Type: DVB-S # Name: BCM4501 DVB-S2 NIM (internal) # NIM Socket 1: # Type: DVB-S # Name: BCM4501 DVB-S2 NIM (internal) # NIM Socket 2: # Type: DVB-T # Name: Philips TU1216 # NIM Socket 3: # Type: DVB-S # Name: Alps BSBE1 702A # # Type will be either "DVB-S", "DVB-S2", "DVB-T", "DVB-C" or None. # nim_slots is an array which has exactly one entry for each slot, even for empty ones. self.nim_slots = [ ] try: nimfile = open("/proc/bus/nim_sockets") except IOError: return current_slot = None entries = {} for line in nimfile: if not line: break line = line.strip() if line.startswith("NIM Socket"): parts = line.split(" ") current_slot = int(parts[2][:-1]) entries[current_slot] = {} elif line.startswith("Type:"): entries[current_slot]["type"] = str(line[6:]) entries[current_slot]["isempty"] = False elif line.strip().startswith("Input_Name:"): entries[current_slot]["input_name"] = str(line.strip()[12:]) elif line.startswith("Name:"): entries[current_slot]["name"] = str(line[6:]) entries[current_slot]["isempty"] = False elif line.startswith("Has_Outputs:"): input = str(line[len("Has_Outputs:") + 1:]) entries[current_slot]["has_outputs"] = (input == "yes") elif line.startswith("Internally_Connectable:"): input = int(line[len("Internally_Connectable:") + 1:]) entries[current_slot]["internally_connectable"] = input elif line.startswith("Frontend_Device:"): input = int(line[len("Frontend_Device:") + 1:]) entries[current_slot]["frontend_device"] = input elif line.startswith("Mode"): # Mode 0: DVB-C # Mode 1: DVB-T # "Mode 1: DVB-T" -> ["Mode 1", "DVB-T"] split = line.split(":") split[1] = split[1].replace(' ','') split2 = split[0].split(" ") modes = entries[current_slot].get("multi_type", {}) modes[split2[1]] = split[1] entries[current_slot]["multi_type"] = modes elif line.startswith("I2C_Device:"): input = int(line[len("I2C_Device:") + 1:]) entries[current_slot]["i2c"] = input elif line.startswith("empty"): entries[current_slot]["type"] = None entries[current_slot]["name"] = _("N/A") entries[current_slot]["isempty"] = True nimfile.close() for id, entry in entries.items(): if not (entry.has_key("name") and entry.has_key("type")): entry["name"] = _("N/A") entry["type"] = None if not (entry.has_key("i2c")): entry["i2c"] = None if not (entry.has_key("has_outputs")): entry["has_outputs"] = True if entry.has_key("frontend_device"): # check if internally connectable if path.exists("/proc/stb/frontend/%d/rf_switch" % entry["frontend_device"]) and ((id > 0) or (getBoxType() == 'vusolo2')): entry["internally_connectable"] = entry["frontend_device"] - 1 else: entry["internally_connectable"] = None else: entry["frontend_device"] = entry["internally_connectable"] = None if not (entry.has_key("multi_type")): if entry["name"] == "DVB-T2/C USB-Stick": # workaround dvbsky hybrit usb stick entry["multi_type"] = {'0': 'DVB-T'} entry["multi_type"] = {'1': 'DVB-C'} else: entry["multi_type"] = {} if not (entry.has_key("input_name")): entry["input_name"] = chr(ord('A') + id) self.nim_slots.append(NIM(slot = id, description = entry["name"], type = entry["type"], has_outputs = entry["has_outputs"], internally_connectable = entry["internally_connectable"], multi_type = entry["multi_type"], frontend_id = entry["frontend_device"], i2c = entry["i2c"], is_empty = entry["isempty"], input_name = entry.get("input_name", None))) def hasNimType(self, chktype): for slot in self.nim_slots: if slot.isCompatible(chktype): return True for type in slot.getMultiTypeList().values(): if chktype == type: return True return False def getNimType(self, slotid): return self.nim_slots[slotid].type def getNimDescription(self, slotid): return self.nim_slots[slotid].friendly_full_description def getNimName(self, slotid): return self.nim_slots[slotid].description def getNimSlotInputName(self, slotid): # returns just "A", "B", ... return self.nim_slots[slotid].slot_input_name def getNim(self, slotid): return self.nim_slots[slotid] def getI2CDevice(self, slotid): return self.nim_slots[slotid].getI2C() def getNimListOfType(self, type, exception = -1): # returns a list of indexes for NIMs compatible to the given type, except for 'exception' list = [] for x in self.nim_slots: if x.isCompatible(type) and x.slot != exception: list.append(x.slot) return list def __init__(self): sec = secClass.getInstance() global maxFixedLnbPositions maxFixedLnbPositions = sec.getMaxFixedLnbPositions() self.satList = [ ] self.cablesList = [] self.terrestrialsList = [] self.atscList = [] self.enumerateNIMs() self.readTransponders() InitNimManager(self) #init config stuff # get a list with the friendly full description def nimList(self): list = [ ] for slot in self.nim_slots: list.append(slot.friendly_full_description) return list def getSlotCount(self): return len(self.nim_slots) def hasOutputs(self, slotid): return self.nim_slots[slotid].hasOutputs() def nimInternallyConnectableTo(self, slotid): return self.nim_slots[slotid].internallyConnectableTo() def nimRemoveInternalLink(self, slotid): self.nim_slots[slotid].removeInternalLink() def canConnectTo(self, slotid): slots = [] if self.nim_slots[slotid].internallyConnectableTo() is not None: slots.append(self.nim_slots[slotid].internallyConnectableTo()) for type in self.nim_slots[slotid].connectableTo(): for slot in self.getNimListOfType(type, exception = slotid): if self.hasOutputs(slot) and slot not in slots: slots.append(slot) # remove nims, that have a conntectedTo reference on for testnim in slots[:]: for nim in self.getNimListOfType("DVB-S", slotid): nimConfig = self.getNimConfig(nim) if nimConfig.content.items.has_key("configMode") and nimConfig.configMode.value == "loopthrough" and int(nimConfig.connectedTo.value) == testnim: slots.remove(testnim) break slots.sort() return slots def canEqualTo(self, slotid): type = self.getNimType(slotid) type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C nimList = self.getNimListOfType(type, slotid) for nim in nimList[:]: mode = self.getNimConfig(nim) if mode.configMode.value == "loopthrough" or mode.configMode.value == "satposdepends": nimList.remove(nim) return nimList def canDependOn(self, slotid): type = self.getNimType(slotid) type = type[:5] # DVB-S2 --> DVB-S, DVB-T2 --> DVB-T, DVB-C2 --> DVB-C nimList = self.getNimListOfType(type, slotid) positionerList = [] for nim in nimList[:]: mode = self.getNimConfig(nim) nimHaveRotor = mode.configMode.value == "simple" and mode.diseqcMode.value in ("positioner", "positioner_select") if not nimHaveRotor and mode.configMode.value == "advanced": for x in range(3601, 3607): lnb = int(mode.advanced.sat[x].lnb.value) if lnb != 0: nimHaveRotor = True break if not nimHaveRotor: for sat in mode.advanced.sat.values(): lnb_num = int(sat.lnb.value) diseqcmode = lnb_num and mode.advanced.lnb[lnb_num].diseqcMode.value or "" if diseqcmode == "1_2": nimHaveRotor = True break if nimHaveRotor: alreadyConnected = False for testnim in nimList: testmode = self.getNimConfig(testnim) if testmode.configMode.value == "satposdepends" and int(testmode.connectedTo.value) == int(nim): alreadyConnected = True break if not alreadyConnected: positionerList.append(nim) return positionerList def getNimConfig(self, slotid): return config.Nims[slotid] def getSatName(self, pos): for sat in self.satList: if sat[0] == pos: return sat[1] return _("N/A") def getSatList(self): return self.satList # returns True if something is configured to be connected to this nim # if slotid == -1, returns if something is connected to ANY nim def somethingConnected(self, slotid = -1): if slotid == -1: connected = False for id in range(self.getSlotCount()): if self.somethingConnected(id): connected = True return connected else: nim = config.Nims[slotid] configMode = nim.configMode.value if self.nim_slots[slotid].isCompatible("DVB-S") or self.nim_slots[slotid].isCompatible("DVB-T") or self.nim_slots[slotid].isCompatible("DVB-C"): return not (configMode == "nothing") def getSatListForNim(self, slotid): list = [] if self.nim_slots[slotid].isCompatible("DVB-S"): nim = config.Nims[slotid] #print "slotid:", slotid #print "self.satellites:", self.satList[config.Nims[slotid].diseqcA.index] #print "diseqcA:", config.Nims[slotid].diseqcA.value configMode = nim.configMode.value if configMode == "equal": slotid = int(nim.connectedTo.value) nim = config.Nims[slotid] configMode = nim.configMode.value elif configMode == "loopthrough": slotid = self.sec.getRoot(slotid, int(nim.connectedTo.value)) nim = config.Nims[slotid] configMode = nim.configMode.value if configMode == "simple": dm = nim.diseqcMode.value if dm in ("single", "toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): if nim.diseqcA.orbital_position < 3600: list.append(self.satList[nim.diseqcA.index - 2]) if dm in ("toneburst_a_b", "diseqc_a_b", "diseqc_a_b_c_d"): if nim.diseqcB.orbital_position < 3600: list.append(self.satList[nim.diseqcB.index - 2]) if dm == "diseqc_a_b_c_d": if nim.diseqcC.orbital_position < 3600: list.append(self.satList[nim.diseqcC.index - 2]) if nim.diseqcD.orbital_position < 3600: list.append(self.satList[nim.diseqcD.index - 2]) if dm == "positioner": for x in self.satList: list.append(x) if dm == "positioner_select": for x in self.satList: if str(x[0]) in nim.userSatellitesList.value: list.append(x) elif configMode == "advanced": for x in range(3601, 3605): if int(nim.advanced.sat[x].lnb.value) != 0: for x in self.satList: list.append(x) if not list: for x in self.satList: if int(nim.advanced.sat[x[0]].lnb.value) != 0: list.append(x) for x in range(3605, 3607): if int(nim.advanced.sat[x].lnb.value) != 0: for user_sat in self.satList: if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list: list.append(user_sat) return list def getRotorSatListForNim(self, slotid): list = [] if self.nim_slots[slotid].isCompatible("DVB-S"): nim = config.Nims[slotid] configMode = nim.configMode.value if configMode == "simple": if nim.diseqcMode.value == "positioner": for x in self.satList: list.append(x) elif nim.diseqcMode.value == "positioner_select": for x in self.satList: if str(x[0]) in nim.userSatellitesList.value: list.append(x) elif configMode == "advanced": for x in range(3601, 3605): if int(nim.advanced.sat[x].lnb.value) != 0: for x in self.satList: list.append(x) if not list: for x in self.satList: lnbnum = int(nim.advanced.sat[x[0]].lnb.value) if lnbnum != 0: lnb = nim.advanced.lnb[lnbnum] if lnb.diseqcMode.value == "1_2": list.append(x) for x in range(3605, 3607): if int(nim.advanced.sat[x].lnb.value) != 0: for user_sat in self.satList: if str(user_sat[0]) in nim.advanced.sat[x].userSatellitesList.value and user_sat not in list: list.append(user_sat) return list def InitSecParams(): config.sec = ConfigSubsection() x = ConfigInteger(default=25, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_CONT_TONE_DISABLE_BEFORE_DISEQC, configElement.value)) config.sec.delay_after_continuous_tone_disable_before_diseqc = x x = ConfigInteger(default=10, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_CONT_TONE_CHANGE, configElement.value)) config.sec.delay_after_final_continuous_tone_change = x x = ConfigInteger(default=10, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_FINAL_VOLTAGE_CHANGE, configElement.value)) config.sec.delay_after_final_voltage_change = x x = ConfigInteger(default=120, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_DISEQC_REPEATS, configElement.value)) config.sec.delay_between_diseqc_repeats = x x = ConfigInteger(default=100, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_LAST_DISEQC_CMD, configElement.value)) config.sec.delay_after_last_diseqc_command = x x = ConfigInteger(default=50, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_TONEBURST, configElement.value)) config.sec.delay_after_toneburst = x x = ConfigInteger(default=75, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_SWITCH_CMDS, configElement.value)) config.sec.delay_after_change_voltage_before_switch_command = x x = ConfigInteger(default=200, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_SWITCH_CMDS, configElement.value)) config.sec.delay_after_enable_voltage_before_switch_command = x x = ConfigInteger(default=700, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BETWEEN_SWITCH_AND_MOTOR_CMD, configElement.value)) config.sec.delay_between_switch_and_motor_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MEASURE_IDLE_INPUTPOWER, configElement.value)) config.sec.delay_after_voltage_change_before_measure_idle_inputpower = x x = ConfigInteger(default=900, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_ENABLE_VOLTAGE_BEFORE_MOTOR_CMD, configElement.value)) config.sec.delay_after_enable_voltage_before_motor_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_MOTOR_STOP_CMD, configElement.value)) config.sec.delay_after_motor_stop_command = x x = ConfigInteger(default=500, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_VOLTAGE_CHANGE_BEFORE_MOTOR_CMD, configElement.value)) config.sec.delay_after_voltage_change_before_motor_command = x x = ConfigInteger(default=70, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_BEFORE_SEQUENCE_REPEAT, configElement.value)) config.sec.delay_before_sequence_repeat = x x = ConfigInteger(default=360, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_RUNNING_TIMEOUT, configElement.value)) config.sec.motor_running_timeout = x x = ConfigInteger(default=1, limits = (0, 5)) x.addNotifier(lambda configElement: secClass.setParam(secClass.MOTOR_COMMAND_RETRIES, configElement.value)) config.sec.motor_command_retries = x x = ConfigInteger(default=50, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_RESET_CMD, configElement.value)) config.sec.delay_after_diseqc_reset_cmd = x x = ConfigInteger(default=150, limits = (0, 9999)) x.addNotifier(lambda configElement: secClass.setParam(secClass.DELAY_AFTER_DISEQC_PERIPHERIAL_POWERON_CMD, configElement.value)) config.sec.delay_after_diseqc_peripherial_poweron_cmd = x # TODO add support for satpos depending nims to advanced nim configuration # so a second/third/fourth cable from a motorized lnb can used behind a # diseqc 1.0 / diseqc 1.1 / toneburst switch # the C(++) part should can handle this # the configElement should be only visible when diseqc 1.2 is disabled jess_alias = ("JESS","UNICABLE2","SCD2","EN50607","EN 50607") lscr = [("scr%d" % i) for i in range(1,33)] def InitNimManager(nimmgr, update_slots = []): hw = HardwareInfo() addNimConfig = False try: config.Nims except: addNimConfig = True if addNimConfig: InitSecParams() config.Nims = ConfigSubList() for x in range(len(nimmgr.nim_slots)): config.Nims.append(ConfigSubsection()) lnb_choices = { "universal_lnb": _("Universal LNB"), "unicable": _("Unicable / JESS"), "c_band": _("C-Band"), "circular_lnb": _("Circular LNB"), "user_defined": _("User defined")} lnb_choices_default = "universal_lnb" unicablelnbproducts = {} unicablematrixproducts = {} file = open(eEnv.resolve("${datadir}/enigma2/unicable.xml"), 'r') doc = xml.etree.cElementTree.parse(file) file.close() root = doc.getroot() entry = root.find("lnb") for manufacturer in entry.getchildren(): m={} m_update = m.update for product in manufacturer.getchildren(): p={} #new dict empty for new product p_update = p.update scr=[] scr_append = scr.append scr_pop = scr.pop for i in range(len(lscr)): scr_append(product.get(lscr[i],"0")) for i in range(len(lscr)): if scr[len(lscr)-i-1] == "0": scr_pop() else: break; p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product diction = product.get("format","EN50494").upper() if diction in jess_alias: diction = "EN50607" else: diction = "EN50494" p_update({"diction":tuple([diction])}) #add diction to dict product positionsoffset = product.get("positionsoffset",0) p_update({"positionsoffset":tuple([positionsoffset])}) #add positionsoffset to dict product positions=[] positions_append = positions.append positions_append(int(product.get("positions",1))) for cnt in range(positions[0]): lof=[] lof.append(int(product.get("lofl",9750))) lof.append(int(product.get("lofh",10600))) lof.append(int(product.get("threshold",11700))) positions_append(tuple(lof)) p_update({"positions":tuple(positions)}) #add positons to dict product m_update({product.get("name"):p}) #add dict product to dict manufacturer unicablelnbproducts.update({manufacturer.get("name"):m}) entry = root.find("matrix") for manufacturer in entry.getchildren(): m={} m_update = m.update for product in manufacturer.getchildren(): p={} #new dict empty for new product p_update = p.update scr=[] scr_append = scr.append scr_pop = scr.pop for i in range(len(lscr)): scr_append(product.get(lscr[i],"0")) for i in range(len(lscr)): if scr[len(lscr)-i-1] == "0": scr_pop() else: break; p_update({"frequencies":tuple(scr)}) #add scr frequencies to dict product diction = product.get("format","EN50494").upper() if diction in jess_alias: diction = "EN50607" else: diction = "EN50494" p_update({"diction":tuple([diction])}) #add diction to dict product positionsoffset = product.get("positionsoffset",0) p_update({"positionsoffset":tuple([positionsoffset])}) #add positionsoffset to dict product positions=[] positions_append = positions.append positions_append(int(product.get("positions",1))) for cnt in range(positions[0]): lof=[] lof.append(int(product.get("lofl",9750))) lof.append(int(product.get("lofh",10600))) lof.append(int(product.get("threshold",11700))) positions_append(tuple(lof)) p_update({"positions":tuple(positions)}) #add positons to dict product m_update({product.get("name"):p}) #add dict product to dict manufacturer unicablematrixproducts.update({manufacturer.get("name"):m}) #add dict manufacturer to dict unicablematrixproducts UnicableLnbManufacturers = unicablelnbproducts.keys() UnicableLnbManufacturers.sort() UnicableMatrixManufacturers = unicablematrixproducts.keys() UnicableMatrixManufacturers.sort() unicable_choices = { "unicable_lnb": _("Unicable LNB"), "unicable_matrix": _("Unicable Matrix"), "unicable_user": "Unicable "+_("User defined")} unicable_choices_default = "unicable_lnb" advanced_lnb_satcr_user_choicesEN50494 = [("%d" % i, "SatCR %d" % i) for i in range(1,9)] advanced_lnb_satcr_user_choicesEN50607 = [("%d" % i, "SatCR %d" % i) for i in range(1,33)] advanced_lnb_diction_user_choices = [("EN50494", "Unicable(EN50494)"), ("EN50607", "JESS(EN50607)")] prio_list = [ ("-1", _("Auto")) ] for prio in range(65)+range(14000,14065)+range(19000,19065): description = "" if prio == 0: description = _(" (disabled)") elif 0 < prio < 65: description = _(" (lower than any auto)") elif 13999 < prio < 14066: description = _(" (higher than rotor any auto)") elif 18999 < prio < 19066: description = _(" (higher than any auto)") prio_list.append((str(prio), str(prio) + description)) advanced_lnb_csw_choices = [("none", _("None")), ("AA", _("Port A")), ("AB", _("Port B")), ("BA", _("Port C")), ("BB", _("Port D"))] advanced_lnb_ucsw_choices = [("0", _("None"))] + [(str(y), "Input " + str(y)) for y in range(1, 17)] diseqc_mode_choices = [ ("single", _("Single")), ("toneburst_a_b", _("Toneburst A/B")), ("diseqc_a_b", "DiSEqC A/B"), ("diseqc_a_b_c_d", "DiSEqC A/B/C/D"), ("positioner", _("Positioner")), ("positioner_select", _("Positioner (selecting satellites)"))] positioner_mode_choices = [("usals", _("USALS")), ("manual", _("manual"))] diseqc_satlist_choices = [(3600, _('automatic'), 1), (3601, _('nothing connected'), 1)] + nimmgr.satList longitude_orientation_choices = [("east", _("East")), ("west", _("West"))] latitude_orientation_choices = [("north", _("North")), ("south", _("South"))] turning_speed_choices = [("fast", _("Fast")), ("slow", _("Slow")), ("fast epoch", _("Fast epoch"))] advanced_satlist_choices = nimmgr.satList + [ (3601, _('All satellites 1 (USALS)'), 1), (3602, _('All satellites 2 (USALS)'), 1), (3603, _('All satellites 3 (USALS)'), 1), (3604, _('All satellites 4 (USALS)'), 1), (3605, _('Selecting satellites 1 (USALS)'), 1), (3606, _('Selecting satellites 2 (USALS)'), 1)] advanced_lnb_choices = [("0", _("not configured"))] + [(str(y), "LNB " + str(y)) for y in range(1, (maxFixedLnbPositions+1))] advanced_voltage_choices = [("polarization", _("Polarization")), ("13V", _("13 V")), ("18V", _("18 V"))] advanced_tonemode_choices = [("band", _("Band")), ("on", _("On")), ("off", _("Off"))] advanced_lnb_toneburst_choices = [("none", _("None")), ("A", _("A")), ("B", _("B"))] advanced_lnb_allsat_diseqcmode_choices = [("1_2", _("1.2"))] advanced_lnb_diseqcmode_choices = [("none", _("None")), ("1_0", _("1.0")), ("1_1", _("1.1")), ("1_2", _("1.2"))] advanced_lnb_commandOrder1_0_choices = [("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0")] advanced_lnb_commandOrder_choices = [ ("ct", "DiSEqC 1.0, toneburst"), ("tc", "toneburst, DiSEqC 1.0"), ("cut", "DiSEqC 1.0, DiSEqC 1.1, toneburst"), ("tcu", "toneburst, DiSEqC 1.0, DiSEqC 1.1"), ("uct", "DiSEqC 1.1, DiSEqC 1.0, toneburst"), ("tuc", "toneburst, DiSEqC 1.1, DiSEqC 1.0")] advanced_lnb_diseqc_repeat_choices = [("none", _("None")), ("one", _("One")), ("two", _("Two")), ("three", _("Three"))] advanced_lnb_fast_turning_btime = mktime(datetime(1970, 1, 1, 7, 0).timetuple()) advanced_lnb_fast_turning_etime = mktime(datetime(1970, 1, 1, 19, 0).timetuple()) def configLOFChanged(configElement): if configElement.value == "unicable": x = configElement.slot_id lnb = configElement.lnb_id nim = config.Nims[x] lnbs = nim.advanced.lnb section = lnbs[lnb] if isinstance(section.unicable, ConfigNothing): if lnb == 1 or lnb > maxFixedLnbPositions: section.unicable = ConfigSelection(unicable_choices, unicable_choices_default) else: section.unicable = ConfigSelection(choices = {"unicable_matrix": _("Unicable Matrix"),"unicable_user": "Unicable "+_("User defined")}, default = "unicable_matrix") def fillUnicableConf(sectionDict, unicableproducts, vco_null_check): for manufacturer in unicableproducts: products = unicableproducts[manufacturer].keys() products.sort() products_valide = [] products_valide_append = products_valide.append tmp = ConfigSubsection() tmp.scr = ConfigSubDict() tmp.vco = ConfigSubDict() tmp.lofl = ConfigSubDict() tmp.lofh = ConfigSubDict() tmp.loft = ConfigSubDict() tmp.positionsoffset = ConfigSubDict() tmp.positions = ConfigSubDict() tmp.diction = ConfigSubDict() for article in products: positionslist = unicableproducts[manufacturer][article].get("positions") positionsoffsetlist = unicableproducts[manufacturer][article].get("positionsoffset") positionsoffset = int(positionsoffsetlist[0]) positions = int(positionslist[0]) dictionlist = [unicableproducts[manufacturer][article].get("diction")] if dictionlist[0][0] !="EN50607" or ((lnb > positionsoffset) and (lnb <= (positions + positionsoffset))): tmp.positionsoffset[article] = ConfigSubList() tmp.positionsoffset[article].append(ConfigInteger(default=positionsoffset, limits = (positionsoffset, positionsoffset))) tmp.positions[article] = ConfigSubList() tmp.positions[article].append(ConfigInteger(default=positions, limits = (positions, positions))) tmp.diction[article] = ConfigSelection(choices = dictionlist, default = dictionlist[0][0]) scrlist = [] scrlist_append = scrlist.append vcolist=unicableproducts[manufacturer][article].get("frequencies") tmp.vco[article] = ConfigSubList() for cnt in range(1,len(vcolist)+1): vcofreq = int(vcolist[cnt-1]) if vcofreq == 0 and vco_null_check: scrlist_append(("%d" %cnt,"SCR %d " %cnt +_("not used"))) else: scrlist_append(("%d" %cnt,"SCR %d" %cnt)) tmp.vco[article].append(ConfigInteger(default=vcofreq, limits = (vcofreq, vcofreq))) tmp.scr[article] = ConfigSelection(choices = scrlist, default = scrlist[0][0]) tmp.lofl[article] = ConfigSubList() tmp.lofh[article] = ConfigSubList() tmp.loft[article] = ConfigSubList() tmp_lofl_article_append = tmp.lofl[article].append tmp_lofh_article_append = tmp.lofh[article].append tmp_loft_article_append = tmp.loft[article].append for cnt in range(1,positions+1): lofl = int(positionslist[cnt][0]) lofh = int(positionslist[cnt][1]) loft = int(positionslist[cnt][2]) tmp_lofl_article_append(ConfigInteger(default=lofl, limits = (lofl, lofl))) tmp_lofh_article_append(ConfigInteger(default=lofh, limits = (lofh, lofh))) tmp_loft_article_append(ConfigInteger(default=loft, limits = (loft, loft))) products_valide_append(article) if len(products_valide)==0: products_valide_append("None") tmp.product = ConfigSelection(choices = products_valide, default = products_valide[0]) sectionDict[manufacturer] = tmp print "MATRIX" section.unicableMatrix = ConfigSubDict() section.unicableMatrixManufacturer = ConfigSelection(UnicableMatrixManufacturers, UnicableMatrixManufacturers[0]) fillUnicableConf(section.unicableMatrix, unicablematrixproducts, True) print "LNB" section.unicableLnb = ConfigSubDict() section.unicableLnbManufacturer = ConfigSelection(UnicableLnbManufacturers, UnicableLnbManufacturers[0]) fillUnicableConf(section.unicableLnb, unicablelnbproducts, False) #TODO satpositions for satcruser section.dictionuser = ConfigSelection(advanced_lnb_diction_user_choices, default="EN50494") section.satcruserEN50494 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50494, default="1") section.satcruserEN50607 = ConfigSelection(advanced_lnb_satcr_user_choicesEN50607, default="1") tmpEN50494 = ConfigSubList() for i in (1284, 1400, 1516, 1632, 1748, 1864, 1980, 2096): tmpEN50494.append(ConfigInteger(default=i, limits = (950, 2150))) section.satcrvcouserEN50494 = tmpEN50494 tmpEN50607 = ConfigSubList() for i in (1210, 1420, 1680, 2040, 984, 1020, 1056, 1092, 1128, 1164, 1256, 1292, 1328, 1364, 1458, 1494, 1530, 1566, 1602, 1638, 1716, 1752, 1788, 1824, 1860, 1896, 1932, 1968, 2004, 2076, 2112, 2148): tmpEN50607.append(ConfigInteger(default=i, limits = (950, 2150))) section.satcrvcouserEN50607 = tmpEN50607 nim.advanced.unicableconnected = ConfigYesNo(default=False) nim.advanced.unicableconnectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x]) if nim.advanced.unicableconnected.value == True and nim.advanced.unicableconnectedTo.value != nim.advanced.unicableconnectedTo.saved_value: from Tools.Notifications import AddPopup from Screens.MessageBox import MessageBox nim.advanced.unicableconnected.value = False nim.advanced.unicableconnected.save() txt = _("Misconfigured unicable connection from tuner %s to tuner %s!\nTuner %s option \"connected to\" are disabled now") % (chr(int(x) + ord('A')), chr(int(nim.advanced.unicableconnectedTo.saved_value) + ord('A')), chr(int(x) + ord('A')),) AddPopup(txt, type = MessageBox.TYPE_ERROR, timeout = 0, id = "UnicableConnectionFailed") section.unicableTuningAlgo = ConfigSelection([("reliable", _("reliable")),("traditional", _("traditional (fast)"))], default="reliable") def configDiSEqCModeChanged(configElement): section = configElement.section if configElement.value == "1_2" and isinstance(section.longitude, ConfigNothing): section.longitude = ConfigFloat(default = [5,100], limits = [(0,359),(0,999)]) section.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east") section.latitude = ConfigFloat(default = [50,767], limits = [(0,359),(0,999)]) section.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north") section.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)]) section.rotorPositions = ConfigInteger(default = 99, limits = [1,999]) section.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)]) section.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)]) section.powerMeasurement = ConfigYesNo(default=True) section.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm7025" and 50 or 15, limits=(0, 100)) section.turningSpeed = ConfigSelection(turning_speed_choices, "fast") section.fastTurningBegin = ConfigDateTime(default=advanced_lnb_fast_turning_btime, formatstring = _("%H:%M"), increment = 600) section.fastTurningEnd = ConfigDateTime(default=advanced_lnb_fast_turning_etime, formatstring = _("%H:%M"), increment = 600) def configLNBChanged(configElement): x = configElement.slot_id nim = config.Nims[x] if isinstance(configElement.value, tuple): lnb = int(configElement.value[0]) else: lnb = int(configElement.value) lnbs = nim.advanced.lnb if lnb and lnb not in lnbs: section = lnbs[lnb] = ConfigSubsection() section.lofl = ConfigInteger(default=9750, limits = (0, 99999)) section.lofh = ConfigInteger(default=10600, limits = (0, 99999)) section.threshold = ConfigInteger(default=11700, limits = (0, 99999)) section.increased_voltage = ConfigYesNo(False) section.toneburst = ConfigSelection(advanced_lnb_toneburst_choices, "none") section.longitude = ConfigNothing() if lnb > maxFixedLnbPositions: tmp = ConfigSelection(advanced_lnb_allsat_diseqcmode_choices, "1_2") tmp.section = section configDiSEqCModeChanged(tmp) else: tmp = ConfigSelection(advanced_lnb_diseqcmode_choices, "none") tmp.section = section tmp.addNotifier(configDiSEqCModeChanged) section.diseqcMode = tmp section.commitedDiseqcCommand = ConfigSelection(advanced_lnb_csw_choices) section.fastDiseqc = ConfigYesNo(False) section.sequenceRepeat = ConfigYesNo(False) section.commandOrder1_0 = ConfigSelection(advanced_lnb_commandOrder1_0_choices, "ct") section.commandOrder = ConfigSelection(advanced_lnb_commandOrder_choices, "ct") section.uncommittedDiseqcCommand = ConfigSelection(advanced_lnb_ucsw_choices) section.diseqcRepeats = ConfigSelection(advanced_lnb_diseqc_repeat_choices, "none") section.prio = ConfigSelection(prio_list, "-1") section.unicable = ConfigNothing() tmp = ConfigSelection(lnb_choices, lnb_choices_default) tmp.slot_id = x tmp.lnb_id = lnb tmp.addNotifier(configLOFChanged, initial_call = False) section.lof = tmp def configModeChanged(configMode): slot_id = configMode.slot_id nim = config.Nims[slot_id] if configMode.value == "advanced" and isinstance(nim.advanced, ConfigNothing): # advanced config: nim.advanced = ConfigSubsection() nim.advanced.sat = ConfigSubDict() nim.advanced.sats = getConfigSatlist(192, advanced_satlist_choices) nim.advanced.lnb = ConfigSubDict() nim.advanced.lnb[0] = ConfigNothing() for x in nimmgr.satList: tmp = ConfigSubsection() tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization") tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band") tmp.usals = ConfigYesNo(True) tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255)) lnb = ConfigSelection(advanced_lnb_choices, "0") lnb.slot_id = slot_id lnb.addNotifier(configLNBChanged, initial_call = False) tmp.lnb = lnb nim.advanced.sat[x[0]] = tmp for x in range(3601, 3607): tmp = ConfigSubsection() tmp.voltage = ConfigSelection(advanced_voltage_choices, "polarization") tmp.tonemode = ConfigSelection(advanced_tonemode_choices, "band") tmp.usals = ConfigYesNo(default=True) tmp.userSatellitesList = ConfigText('[]') tmp.rotorposition = ConfigInteger(default=1, limits=(1, 255)) lnbnum = maxFixedLnbPositions + x - 3600 lnb = ConfigSelection([("0", _("not configured")), (str(lnbnum), "LNB %d"%(lnbnum))], "0") lnb.slot_id = slot_id lnb.addNotifier(configLNBChanged, initial_call = False) tmp.lnb = lnb nim.advanced.sat[x] = tmp def scpcSearchRangeChanged(configElement): fe_id = configElement.fe_id slot_id = configElement.slot_id name = nimmgr.nim_slots[slot_id].description if path.exists("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id): f = open("/proc/stb/frontend/%d/use_scpc_optimized_search_range" % fe_id, "w") f.write(configElement.value) f.close() def ForceLNBPowerChanged(configElement): if path.exists("/proc/stb/frontend/fbc/force_lnbon"): f = open("/proc/stb/frontend/fbc/force_lnbon", "w") f.write(configElement.value) f.close() def ForceToneBurstChanged(configElement): if path.exists("/proc/stb/frontend/fbc/force_toneburst"): f = open("/proc/stb/frontend/fbc/force_toneburst", "w") f.write(configElement.value) f.close() def toneAmplitudeChanged(configElement): fe_id = configElement.fe_id slot_id = configElement.slot_id if path.exists("/proc/stb/frontend/%d/tone_amplitude" % fe_id): f = open("/proc/stb/frontend/%d/tone_amplitude" % fe_id, "w") f.write(configElement.value) f.close() def connectedToChanged(slot_id, nimmgr, configElement): configMode = nimmgr.getNimConfig(slot_id).configMode if configMode.value == 'loopthrough': internally_connectable = nimmgr.nimInternallyConnectableTo(slot_id) dest_slot = configElement.value if internally_connectable is not None and int(internally_connectable) == int(dest_slot): configMode.choices.updateItemDescription(configMode.index, _("internally loopthrough to")) else: configMode.choices.updateItemDescription(configMode.index, _("externally loopthrough to")) def createSatConfig(nim, x, empty_slots): try: nim.toneAmplitude except: nim.toneAmplitude = ConfigSelection([("11", "340mV"), ("10", "360mV"), ("9", "600mV"), ("8", "700mV"), ("7", "800mV"), ("6", "900mV"), ("5", "1100mV")], "7") nim.toneAmplitude.fe_id = x - empty_slots nim.toneAmplitude.slot_id = x nim.toneAmplitude.addNotifier(toneAmplitudeChanged) nim.scpcSearchRange = ConfigSelection([("0", _("no")), ("1", _("yes"))], "0") nim.scpcSearchRange.fe_id = x - empty_slots nim.scpcSearchRange.slot_id = x nim.scpcSearchRange.addNotifier(scpcSearchRangeChanged) nim.forceLnbPower = ConfigSelection(default = "off", choices = [ ("on", _("Yes")), ("off", _("No"))] ) nim.forceLnbPower.addNotifier(ForceLNBPowerChanged) nim.forceToneBurst = ConfigSelection(default = "disable", choices = [ ("enable", _("Yes")), ("disable", _("No"))] ) nim.forceToneBurst.addNotifier(ForceToneBurstChanged) nim.diseqc13V = ConfigYesNo(False) nim.diseqcMode = ConfigSelection(diseqc_mode_choices, "single") nim.connectedTo = ConfigSelection([(str(id), nimmgr.getNimDescription(id)) for id in nimmgr.getNimListOfType("DVB-S") if id != x]) nim.simpleSingleSendDiSEqC = ConfigYesNo(False) nim.simpleDiSEqCSetVoltageTone = ConfigYesNo(True) nim.simpleDiSEqCOnlyOnSatChange = ConfigYesNo(False) nim.simpleDiSEqCSetCircularLNB = ConfigYesNo(True) nim.diseqcA = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcB = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcC = ConfigSatlist(list = diseqc_satlist_choices) nim.diseqcD = ConfigSatlist(list = diseqc_satlist_choices) nim.positionerMode = ConfigSelection(positioner_mode_choices, "usals") nim.userSatellitesList = ConfigText('[]') nim.pressOKtoList = ConfigNothing() nim.longitude = ConfigFloat(default=[5,100], limits=[(0,359),(0,999)]) nim.longitudeOrientation = ConfigSelection(longitude_orientation_choices, "east") nim.latitude = ConfigFloat(default=[50,767], limits=[(0,359),(0,999)]) nim.latitudeOrientation = ConfigSelection(latitude_orientation_choices, "north") nim.tuningstepsize = ConfigFloat(default = [0,360], limits = [(0,9),(0,999)]) nim.rotorPositions = ConfigInteger(default = 99, limits = [1,999]) nim.turningspeedH = ConfigFloat(default = [2,3], limits = [(0,9),(0,9)]) nim.turningspeedV = ConfigFloat(default = [1,7], limits = [(0,9),(0,9)]) nim.powerMeasurement = ConfigYesNo(False) nim.powerThreshold = ConfigInteger(default=hw.get_device_name() == "dm8000" and 15 or 50, limits=(0, 100)) nim.turningSpeed = ConfigSelection(turning_speed_choices, "fast") btime = datetime(1970, 1, 1, 7, 0) nim.fastTurningBegin = ConfigDateTime(default = mktime(btime.timetuple()), formatstring = _("%H:%M"), increment = 900) etime = datetime(1970, 1, 1, 19, 0) nim.fastTurningEnd = ConfigDateTime(default = mktime(etime.timetuple()), formatstring = _("%H:%M"), increment = 900) def createCableConfig(nim, x): try: nim.cable except: list = [ ] n = 0 for x in nimmgr.cablesList: list.append((str(n), x[0])) n += 1 nim.cable = ConfigSubsection() nim.cable.scan_networkid = ConfigInteger(default = 0, limits = (0, 99999)) possible_scan_types = [("bands", _("Frequency bands")), ("steps", _("Frequency steps"))] if n: possible_scan_types.append(("provider", _("Provider"))) nim.cable.scan_provider = ConfigSelection(default = "0", choices = list) nim.cable.scan_type = ConfigSelection(default = "provider", choices = possible_scan_types) nim.cable.scan_band_EU_VHF_I = ConfigYesNo(default = True) nim.cable.scan_band_EU_MID = ConfigYesNo(default = True) nim.cable.scan_band_EU_VHF_III = ConfigYesNo(default = True) nim.cable.scan_band_EU_UHF_IV = ConfigYesNo(default = True) nim.cable.scan_band_EU_UHF_V = ConfigYesNo(default = True) nim.cable.scan_band_EU_SUPER = ConfigYesNo(default = True) nim.cable.scan_band_EU_HYPER = ConfigYesNo(default = True) nim.cable.scan_band_US_LOW = ConfigYesNo(default = False) nim.cable.scan_band_US_MID = ConfigYesNo(default = False) nim.cable.scan_band_US_HIGH = ConfigYesNo(default = False) nim.cable.scan_band_US_SUPER = ConfigYesNo(default = False) nim.cable.scan_band_US_HYPER = ConfigYesNo(default = False) nim.cable.scan_frequency_steps = ConfigInteger(default = 1000, limits = (1000, 10000)) nim.cable.scan_mod_qam16 = ConfigYesNo(default = False) nim.cable.scan_mod_qam32 = ConfigYesNo(default = False) nim.cable.scan_mod_qam64 = ConfigYesNo(default = True) nim.cable.scan_mod_qam128 = ConfigYesNo(default = False) nim.cable.scan_mod_qam256 = ConfigYesNo(default = True) nim.cable.scan_sr_6900 = ConfigYesNo(default = True) nim.cable.scan_sr_6875 = ConfigYesNo(default = True) nim.cable.scan_sr_ext1 = ConfigInteger(default = 0, limits = (0, 7230)) nim.cable.scan_sr_ext2 = ConfigInteger(default = 0, limits = (0, 7230)) def createTerrestrialConfig(nim, x): try: nim.terrestrial except: list = [] n = 0 for x in nimmgr.terrestrialsList: list.append((str(n), x[0])) n += 1 nim.terrestrial = ConfigSelection(choices = list) nim.terrestrial_5V = ConfigOnOff() empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] if slot.isCompatible("DVB-S"): createSatConfig(nim, x, empty_slots) config_mode_choices = [("nothing", _("nothing connected")), ("simple", _("simple")), ("advanced", _("advanced"))] if len(nimmgr.getNimListOfType(slot.type, exception = x)) > 0: config_mode_choices.append(("equal", _("equal to"))) config_mode_choices.append(("satposdepends", _("second cable of motorized LNB"))) if len(nimmgr.canConnectTo(x)) > 0: config_mode_choices.append(("loopthrough", _("loopthrough to"))) nim.advanced = ConfigNothing() tmp = ConfigSelection(config_mode_choices, "simple") tmp.slot_id = x tmp.addNotifier(configModeChanged, initial_call = False) nim.configMode = tmp nim.configMode.connectedToChanged = boundFunction(connectedToChanged, x, nimmgr) nim.connectedTo.addNotifier(boundFunction(connectedToChanged, x, nimmgr), initial_call = False) elif slot.isCompatible("DVB-C"): nim.configMode = ConfigSelection( choices = { "enabled": _("enabled"), "nothing": _("nothing connected"), }, default = "enabled") createCableConfig(nim, x) elif slot.isCompatible("DVB-T"): nim.configMode = ConfigSelection( choices = { "enabled": _("enabled"), "nothing": _("nothing connected"), }, default = "enabled") createTerrestrialConfig(nim, x) else: empty_slots += 1 nim.configMode = ConfigSelection(choices = { "nothing": _("disabled") }, default="nothing") if slot.type is not None: print "pls add support for this frontend type!", slot.type nimmgr.sec = SecConfigure(nimmgr) def tunerTypeChanged(nimmgr, configElement): print "dvb_api_version ",iDVBFrontend.dvb_api_version fe_id = configElement.fe_id eDVBResourceManager.getInstance().setFrontendType(nimmgr.nim_slots[fe_id].frontend_id, nimmgr.nim_slots[fe_id].getType()) frontend = eDVBResourceManager.getInstance().allocateRawChannel(fe_id).getFrontend() if not path.exists("/proc/stb/frontend/%d/mode" % fe_id) and iDVBFrontend.dvb_api_version >= 5: print "api >=5 and new style tuner driver" if frontend: system = configElement.getText() if system == 'DVB-C': ret = frontend.changeType(iDVBFrontend.feCable) elif system in ('DVB-T','DVB-T2'): ret = frontend.changeType(iDVBFrontend.feTerrestrial) elif system in ('DVB-S','DVB-S2'): ret = frontend.changeType(iDVBFrontend.feSatellite) elif system == 'ATSC': ret = frontend.changeType(iDVBFrontend.feATSC) else: ret = False if not ret: print "%d: tunerTypeChange to '%s' failed" %(fe_id, system) else: print "%d: tunerTypeChange to '%s' failed (BUSY)" %(fe_id, configElement.getText()) else: print "api <5 or old style tuner driver" if path.exists("/proc/stb/frontend/%d/mode" % fe_id): cur_type = int(open("/proc/stb/frontend/%d/mode" % fe_id, "r").read()) if cur_type != int(configElement.value): print "tunerTypeChanged feid %d from %d to mode %d" % (fe_id, cur_type, int(configElement.value)) try: oldvalue = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "r").readline() f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w") f.write("0") f.close() except: print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available" frontend.closeFrontend() f = open("/proc/stb/frontend/%d/mode" % fe_id, "w") f.write(configElement.value) f.close() frontend.reopenFrontend() try: f = open("/sys/module/dvb_core/parameters/dvb_shutdown_timeout", "w") f.write(oldvalue) f.close() except: print "[info] no /sys/module/dvb_core/parameters/dvb_shutdown_timeout available" nimmgr.enumerateNIMs() else: print "tuner type is already already %d" %cur_type empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] addMultiType = False try: nim.multiType except: if slot.description.find("Sundtek SkyTV Ultimate III") > -1: print"[NimManager] Sundtek SkyTV Ultimate III detected, multiType = False" addMultiType = False else: addMultiType = True if slot.isMultiType() and addMultiType: typeList = [] for id in slot.getMultiTypeList().keys(): type = slot.getMultiTypeList()[id] typeList.append((id, type)) nim.multiType = ConfigSelection(typeList, "0") nim.multiType.fe_id = x - empty_slots nim.multiType.addNotifier(boundFunction(tunerTypeChanged, nimmgr)) print"[NimManager] slotname = %s, slotdescription = %s, multitype = %s, current type = %s" % (slot.input_name, slot.description,(slot.isMultiType() and addMultiType),slot.getType()) empty_slots = 0 for slot in nimmgr.nim_slots: x = slot.slot nim = config.Nims[x] empty = True if update_slots and (x not in update_slots): continue if slot.canBeCompatible("DVB-S"): createSatConfig(nim, x, empty_slots) empty = False if slot.canBeCompatible("DVB-C"): createCableConfig(nim, x) empty = False if slot.canBeCompatible("DVB-T"): createTerrestrialConfig(nim, x) empty = False if empty: empty_slots += 1 nimmanager = NimManager()
gpl-2.0
cogeorg/BlackRhino
examples/firesales_simple/networkx/generators/random_clustered.py
46
4093
# -*- coding: utf-8 -*- """Generate graphs with given degree and triangle sequence. """ # Copyright (C) 2004-2011 by # Aric Hagberg <hagberg@lanl.gov> # Dan Schult <dschult@colgate.edu> # Pieter Swart <swart@lanl.gov> # All rights reserved. # BSD license. import random import networkx as nx __author__ = "\n".join(['Aric Hagberg (hagberg@lanl.gov)', 'Joel Miller (joel.c.miller.research@gmail.com)']) __all__ = ['random_clustered_graph'] def random_clustered_graph(joint_degree_sequence, create_using=None, seed=None): """Generate a random graph with the given joint degree and triangle degree sequence. This uses a configuration model-like approach to generate a random pseudograph (graph with parallel edges and self loops) by randomly assigning edges to match the given indepdenent edge and triangle degree sequence. Parameters ---------- joint_degree_sequence : list of integer pairs Each list entry corresponds to the independent edge degree and triangle degree of a node. create_using : graph, optional (default MultiGraph) Return graph of this type. The instance will be cleared. seed : hashable object, optional The seed for the random number generator. Returns ------- G : MultiGraph A graph with the specified degree sequence. Nodes are labeled starting at 0 with an index corresponding to the position in deg_sequence. Raises ------ NetworkXError If the independent edge degree sequence sum is not even or the triangle degree sequence sum is not divisible by 3. Notes ----- As described by Miller [1]_ (see also Newman [2]_ for an equivalent description). A non-graphical degree sequence (not realizable by some simple graph) is allowed since this function returns graphs with self loops and parallel edges. An exception is raised if the independent degree sequence does not have an even sum or the triangle degree sequence sum is not divisible by 3. This configuration model-like construction process can lead to duplicate edges and loops. You can remove the self-loops and parallel edges (see below) which will likely result in a graph that doesn't have the exact degree sequence specified. This "finite-size effect" decreases as the size of the graph increases. References ---------- .. [1] J. C. Miller "Percolation and Epidemics on Random Clustered Graphs." Physical Review E, Rapid Communication (to appear). .. [2] M.E.J. Newman, "Random clustered networks". Physical Review Letters (to appear). Examples -------- >>> deg_tri=[[1,0],[1,0],[1,0],[2,0],[1,0],[2,1],[0,1],[0,1]] >>> G = nx.random_clustered_graph(deg_tri) To remove parallel edges: >>> G=nx.Graph(G) To remove self loops: >>> G.remove_edges_from(G.selfloop_edges()) """ if create_using is None: create_using = nx.MultiGraph() elif create_using.is_directed(): raise nx.NetworkXError("Directed Graph not supported") if not seed is None: random.seed(seed) # In Python 3, zip() returns an iterator. Make this into a list. joint_degree_sequence = list(joint_degree_sequence) N = len(joint_degree_sequence) G = nx.empty_graph(N,create_using) ilist = [] tlist = [] for n in G: degrees = joint_degree_sequence[n] for icount in range(degrees[0]): ilist.append(n) for tcount in range(degrees[1]): tlist.append(n) if len(ilist)%2 != 0 or len(tlist)%3 != 0: raise nx.NetworkXError('Invalid degree sequence') random.shuffle(ilist) random.shuffle(tlist) while ilist: G.add_edge(ilist.pop(),ilist.pop()) while tlist: n1 = tlist.pop() n2 = tlist.pop() n3 = tlist.pop() G.add_edges_from([(n1,n2),(n1,n3),(n2,n3)]) G.name = "random_clustered %d nodes %d edges"%(G.order(),G.size()) return G
gpl-3.0
barbuza/django
tests/template_tests/templatetags/custom.py
152
5394
import operator import warnings from django import template from django.template.defaultfilters import stringfilter from django.utils import six from django.utils.html import escape, format_html register = template.Library() @register.filter @stringfilter def trim(value, num): return value[:num] @register.filter def noop(value, param=None): """A noop filter that always return its first argument and does nothing with its second (optional) one. Useful for testing out whitespace in filter arguments (see #19882).""" return value @register.simple_tag(takes_context=True) def context_stack_length(context): return len(context.dicts) @register.simple_tag def no_params(): """Expected no_params __doc__""" return "no_params - Expected result" no_params.anything = "Expected no_params __dict__" @register.simple_tag def one_param(arg): """Expected one_param __doc__""" return "one_param - Expected result: %s" % arg one_param.anything = "Expected one_param __dict__" @register.simple_tag(takes_context=False) def explicit_no_context(arg): """Expected explicit_no_context __doc__""" return "explicit_no_context - Expected result: %s" % arg explicit_no_context.anything = "Expected explicit_no_context __dict__" @register.simple_tag(takes_context=True) def no_params_with_context(context): """Expected no_params_with_context __doc__""" return "no_params_with_context - Expected result (context value: %s)" % context['value'] no_params_with_context.anything = "Expected no_params_with_context __dict__" @register.simple_tag(takes_context=True) def params_and_context(context, arg): """Expected params_and_context __doc__""" return "params_and_context - Expected result (context value: %s): %s" % (context['value'], arg) params_and_context.anything = "Expected params_and_context __dict__" @register.simple_tag def simple_two_params(one, two): """Expected simple_two_params __doc__""" return "simple_two_params - Expected result: %s, %s" % (one, two) simple_two_params.anything = "Expected simple_two_params __dict__" @register.simple_tag def simple_one_default(one, two='hi'): """Expected simple_one_default __doc__""" return "simple_one_default - Expected result: %s, %s" % (one, two) simple_one_default.anything = "Expected simple_one_default __dict__" @register.simple_tag def simple_unlimited_args(one, two='hi', *args): """Expected simple_unlimited_args __doc__""" return "simple_unlimited_args - Expected result: %s" % (', '.join(six.text_type(arg) for arg in [one, two] + list(args))) simple_unlimited_args.anything = "Expected simple_unlimited_args __dict__" @register.simple_tag def simple_only_unlimited_args(*args): """Expected simple_only_unlimited_args __doc__""" return "simple_only_unlimited_args - Expected result: %s" % ', '.join(six.text_type(arg) for arg in args) simple_only_unlimited_args.anything = "Expected simple_only_unlimited_args __dict__" @register.simple_tag def simple_unlimited_args_kwargs(one, two='hi', *args, **kwargs): """Expected simple_unlimited_args_kwargs __doc__""" # Sort the dictionary by key to guarantee the order for testing. sorted_kwarg = sorted(six.iteritems(kwargs), key=operator.itemgetter(0)) return "simple_unlimited_args_kwargs - Expected result: %s / %s" % ( ', '.join(six.text_type(arg) for arg in [one, two] + list(args)), ', '.join('%s=%s' % (k, v) for (k, v) in sorted_kwarg) ) simple_unlimited_args_kwargs.anything = "Expected simple_unlimited_args_kwargs __dict__" @register.simple_tag(takes_context=True) def simple_tag_without_context_parameter(arg): """Expected simple_tag_without_context_parameter __doc__""" return "Expected result" simple_tag_without_context_parameter.anything = "Expected simple_tag_without_context_parameter __dict__" @register.simple_tag(takes_context=True) def escape_naive(context): """A tag that doesn't even think about escaping issues""" return "Hello {0}!".format(context['name']) @register.simple_tag(takes_context=True) def escape_explicit(context): """A tag that uses escape explicitly""" return escape("Hello {0}!".format(context['name'])) @register.simple_tag(takes_context=True) def escape_format_html(context): """A tag that uses format_html""" return format_html("Hello {0}!", context['name']) @register.simple_tag(takes_context=True) def current_app(context): return "%s" % context.current_app @register.simple_tag(takes_context=True) def use_l10n(context): return "%s" % context.use_l10n @register.simple_tag(name='minustwo') def minustwo_overridden_name(value): return value - 2 register.simple_tag(lambda x: x - 1, name='minusone') with warnings.catch_warnings(): warnings.simplefilter('ignore') @register.assignment_tag def assignment_no_params(): """Expected assignment_no_params __doc__""" return "assignment_no_params - Expected result" assignment_no_params.anything = "Expected assignment_no_params __dict__" @register.assignment_tag(takes_context=True) def assignment_tag_without_context_parameter(arg): """Expected assignment_tag_without_context_parameter __doc__""" return "Expected result" assignment_tag_without_context_parameter.anything = "Expected assignment_tag_without_context_parameter __dict__"
bsd-3-clause
michaelray/Iristyle-ChocolateyPackages
EthanBrown.SublimeText2.UtilPackages/tools/PackageCache/EncodingHelper/chardet/escprober.py
215
3029
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is mozilla.org code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### import constants, sys from escsm import HZSMModel, ISO2022CNSMModel, ISO2022JPSMModel, ISO2022KRSMModel from charsetprober import CharSetProber from codingstatemachine import CodingStateMachine class EscCharSetProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mCodingSM = [ \ CodingStateMachine(HZSMModel), CodingStateMachine(ISO2022CNSMModel), CodingStateMachine(ISO2022JPSMModel), CodingStateMachine(ISO2022KRSMModel) ] self.reset() def reset(self): CharSetProber.reset(self) for codingSM in self._mCodingSM: if not codingSM: continue codingSM.active = constants.True codingSM.reset() self._mActiveSM = len(self._mCodingSM) self._mDetectedCharset = None def get_charset_name(self): return self._mDetectedCharset def get_confidence(self): if self._mDetectedCharset: return 0.99 else: return 0.00 def feed(self, aBuf): for c in aBuf: for codingSM in self._mCodingSM: if not codingSM: continue if not codingSM.active: continue codingState = codingSM.next_state(c) if codingState == constants.eError: codingSM.active = constants.False self._mActiveSM -= 1 if self._mActiveSM <= 0: self._mState = constants.eNotMe return self.get_state() elif codingState == constants.eItsMe: self._mState = constants.eFoundIt self._mDetectedCharset = codingSM.get_coding_state_machine() return self.get_state() return self.get_state()
mit
zhangjunli177/sahara
sahara/plugins/vanilla/v2_7_1/edp_engine.py
2
1663
# Copyright (c) 2015 Red Hat, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from sahara.plugins.vanilla import confighints_helper as ch_helper from sahara.plugins.vanilla.hadoop2 import edp_engine from sahara.utils import edp class EdpOozieEngine(edp_engine.EdpOozieEngine): @staticmethod def get_possible_job_config(job_type): if edp.compare_job_type(job_type, edp.JOB_TYPE_HIVE): return {'job_config': ch_helper.get_possible_hive_config_from( 'plugins/vanilla/v2_7_1/resources/hive-default.xml')} if edp.compare_job_type(job_type, edp.JOB_TYPE_MAPREDUCE, edp.JOB_TYPE_MAPREDUCE_STREAMING): return {'job_config': ch_helper.get_possible_mapreduce_config_from( 'plugins/vanilla/v2_7_1/resources/mapred-default.xml')} if edp.compare_job_type(job_type, edp.JOB_TYPE_PIG): return {'job_config': ch_helper.get_possible_pig_config_from( 'plugins/vanilla/v2_7_1/resources/mapred-default.xml')} return edp_engine.EdpOozieEngine.get_possible_job_config(job_type)
apache-2.0
openstack/sahara-dashboard
sahara_dashboard/content/data_processing/clusters/cluster_templates/workflows/create.py
1
16094
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. import json from django import urls from django.utils.translation import ugettext_lazy as _ from saharaclient.api import base as api_base from horizon import exceptions from horizon import forms from horizon import workflows from sahara_dashboard.api import designate as designateclient from sahara_dashboard.api import manila as manilaclient from sahara_dashboard.api import sahara as saharaclient from sahara_dashboard.content.data_processing.utils import helpers from sahara_dashboard.content.data_processing. \ utils import anti_affinity as aa from sahara_dashboard.content.data_processing.utils \ import acl as acl_utils import sahara_dashboard.content.data_processing. \ utils.workflow_helpers as whelpers from sahara_dashboard import utils class SelectPluginAction(workflows.Action, whelpers.PluginAndVersionMixin): hidden_create_field = forms.CharField( required=False, widget=forms.HiddenInput(attrs={"class": "hidden_create_field"})) def __init__(self, request, *args, **kwargs): super(SelectPluginAction, self).__init__(request, *args, **kwargs) sahara = saharaclient.client(request) self._generate_plugin_version_fields(sahara) class Meta(object): name = _("Select plugin and hadoop version for cluster template") help_text_template = ("cluster_templates/" "_create_general_help.html") class SelectPlugin(workflows.Step): action_class = SelectPluginAction class CreateClusterTemplate(workflows.Workflow): slug = "create_cluster_template" name = _("Create Cluster Template") finalize_button_name = _("Next") success_message = _("Created") failure_message = _("Could not create") success_url = "horizon:project:data_processing.clusters:clusters-tab" default_steps = (SelectPlugin, ) def get_success_url(self): url = urls.reverse(self.success_url) return url class GeneralConfigAction(workflows.Action): hidden_configure_field = forms.CharField( required=False, widget=forms.HiddenInput(attrs={"class": "hidden_configure_field"})) hidden_to_delete_field = forms.CharField( required=False, widget=forms.HiddenInput(attrs={"class": "hidden_to_delete_field"})) cluster_template_name = forms.CharField(label=_("Template Name")) description = forms.CharField(label=_("Description"), required=False, widget=forms.Textarea(attrs={'rows': 4})) use_autoconfig = forms.BooleanField( label=_("Auto-configure"), help_text=_("If selected, instances of a cluster will be " "automatically configured during creation. Otherwise you " "should manually specify configuration values"), required=False, widget=forms.CheckboxInput(), initial=True, ) is_public = acl_utils.get_is_public_form(_("cluster template")) is_protected = acl_utils.get_is_protected_form(_("cluster template")) anti_affinity = aa.anti_affinity_field() def __init__(self, request, *args, **kwargs): super(GeneralConfigAction, self).__init__(request, *args, **kwargs) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) self.fields["plugin_name"] = forms.CharField( widget=forms.HiddenInput(), initial=plugin ) self.fields["hadoop_version"] = forms.CharField( widget=forms.HiddenInput(), initial=hadoop_version ) populate_anti_affinity_choices = aa.populate_anti_affinity_choices def get_help_text(self): extra = dict() plugin_name, hadoop_version = whelpers\ .get_plugin_and_hadoop_version(self.request) extra["plugin_name"] = plugin_name extra["hadoop_version"] = hadoop_version plugin = saharaclient.plugin_get_version_details( self.request, plugin_name, hadoop_version) extra["deprecated"] = whelpers.is_version_of_plugin_deprecated( plugin, hadoop_version) return super(GeneralConfigAction, self).get_help_text(extra) def clean(self): cleaned_data = super(GeneralConfigAction, self).clean() if cleaned_data.get("hidden_configure_field", None) \ == "create_nodegroup": self._errors = dict() return cleaned_data class Meta(object): name = _("Details") help_text_template = ("cluster_templates/_configure_general_help.html") class GeneralConfig(workflows.Step): action_class = GeneralConfigAction contributes = ("hidden_configure_field", ) def contribute(self, data, context): for k, v in data.items(): context["general_" + k] = v post = self.workflow.request.POST context['anti_affinity_info'] = post.getlist("anti_affinity") return context class ConfigureNodegroupsAction(workflows.Action): hidden_nodegroups_field = forms.CharField( required=False, widget=forms.HiddenInput(attrs={"class": "hidden_nodegroups_field"})) forms_ids = forms.CharField( required=False, widget=forms.HiddenInput()) def __init__(self, request, *args, **kwargs): super(ConfigureNodegroupsAction, self). \ __init__(request, *args, **kwargs) # when we copy or edit a cluster template then # request contains valuable info in both GET and POST methods req = request.GET.copy() req.update(request.POST) plugin = req.get("plugin_name") version = req.get("hadoop_version", None) or req["plugin_version"] if plugin and not version: version_name = plugin + "_version" version = req.get(version_name) if not plugin or not version: self.templates = saharaclient.nodegroup_template_find(request) else: self.templates = saharaclient.nodegroup_template_find( request, plugin_name=plugin, hadoop_version=version) deletable = req.get("deletable", dict()) if 'forms_ids' in req: self.groups = [] for id in json.loads(req['forms_ids']): group_name = "group_name_" + str(id) template_id = "template_id_" + str(id) count = "count_" + str(id) serialized = "serialized_" + str(id) self.groups.append({"name": req[group_name], "template_id": req[template_id], "count": req[count], "id": id, "deletable": deletable.get( req[group_name], "true"), "serialized": req[serialized]}) whelpers.build_node_group_fields(self, group_name, template_id, count, serialized) def clean(self): cleaned_data = super(ConfigureNodegroupsAction, self).clean() if cleaned_data.get("hidden_nodegroups_field", None) \ == "create_nodegroup": self._errors = dict() return cleaned_data class Meta(object): name = _("Node Groups") class ConfigureNodegroups(workflows.Step): action_class = ConfigureNodegroupsAction contributes = ("hidden_nodegroups_field", ) template_name = ("cluster_templates/cluster_node_groups_template.html") def contribute(self, data, context): for k, v in data.items(): context["ng_" + k] = v return context class SelectClusterSharesAction(workflows.Action): def __init__(self, request, *args, **kwargs): super(SelectClusterSharesAction, self).__init__( request, *args, **kwargs) possible_shares = self.get_possible_shares(request) self.fields["shares"] = whelpers.MultipleShareChoiceField( label=_("Select Shares"), widget=whelpers.ShareWidget(choices=possible_shares), required=False, choices=possible_shares ) def get_possible_shares(self, request): try: shares = manilaclient.share_list(request) choices = [(s.id, s.name) for s in shares] except Exception: exceptions.handle(request, _("Failed to get list of shares")) choices = [] return choices def clean(self): cleaned_data = super(SelectClusterSharesAction, self).clean() self._errors = dict() return cleaned_data class Meta(object): name = _("Shares") help_text = _("Select the manila shares for this cluster") class SelectClusterShares(workflows.Step): action_class = SelectClusterSharesAction def contribute(self, data, context): post = self.workflow.request.POST shares_details = [] for index in range(0, len(self.action.fields['shares'].choices) * 3): if index % 3 == 0: share = post.get("shares_{0}".format(index)) if share: path = post.get("shares_{0}".format(index + 1)) permissions = post.get("shares_{0}".format(index + 2)) shares_details.append({ "id": share, "path": path, "access_level": permissions }) context['ct_shares'] = shares_details return context class SelectDnsDomainsAction(workflows.Action): domain_name = forms.DynamicChoiceField( label=_("Domain Name"), required=False ) def __init__(self, request, *args, **kwargs): super(SelectDnsDomainsAction, self).__init__(request, *args, **kwargs) def _get_domain_choices(self, request): domains = designateclient.get_domain_names(request) choices = [(None, _('No domain is specified'))] choices.extend( [(domain.get('name'), domain.get('name')) for domain in domains]) return choices def populate_domain_name_choices(self, request, context): return self._get_domain_choices(request) class Meta(object): name = _("DNS Domain Names") help_text_template = ( "cluster_templates/_config_domain_names_help.html") class SelectDnsDomains(workflows.Step): action_class = SelectDnsDomainsAction def contribute(self, data, context): for k, v in data.items(): context["dns_" + k] = v return context class ConfigureClusterTemplate(whelpers.ServiceParametersWorkflow, whelpers.StatusFormatMixin): slug = "configure_cluster_template" name = _("Create Cluster Template") finalize_button_name = _("Create") success_message = _("Created Cluster Template %s") name_property = "general_cluster_template_name" success_url = ("horizon:project:data_processing.clusters:" "cluster-templates-tab") default_steps = (GeneralConfig, ConfigureNodegroups) def __init__(self, request, context_seed, entry_point, *args, **kwargs): ConfigureClusterTemplate._cls_registry = [] hlps = helpers.Helpers(request) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) general_parameters = hlps.get_cluster_general_configs( plugin, hadoop_version) service_parameters = hlps.get_targeted_cluster_configs( plugin, hadoop_version) if saharaclient.base.is_service_enabled(request, 'share'): ConfigureClusterTemplate._register_step(self, SelectClusterShares) if saharaclient.base.is_service_enabled(request, 'dns'): ConfigureClusterTemplate._register_step(self, SelectDnsDomains) self._populate_tabs(general_parameters, service_parameters) super(ConfigureClusterTemplate, self).__init__(request, context_seed, entry_point, *args, **kwargs) def is_valid(self): steps_valid = True for step in self.steps: if not step.action.is_valid(): steps_valid = False step.has_errors = True errors_fields = list(step.action.errors.keys()) step.action.errors_fields = errors_fields if not steps_valid: return steps_valid return self.validate(self.context) def handle(self, request, context): try: node_groups = [] configs_dict = whelpers.parse_configs_from_context(context, self.defaults) ids = json.loads(context['ng_forms_ids']) for id in ids: name = context['ng_group_name_' + str(id)] template_id = context['ng_template_id_' + str(id)] count = context['ng_count_' + str(id)] raw_ng = context.get("ng_serialized_" + str(id)) if raw_ng and raw_ng != 'null': ng = json.loads(utils.deserialize(str(raw_ng))) else: ng = dict() ng["name"] = name ng["count"] = count if template_id and template_id != u'None': ng["node_group_template_id"] = template_id node_groups.append(ng) plugin, hadoop_version = whelpers.\ get_plugin_and_hadoop_version(request) ct_shares = [] if "ct_shares" in context: ct_shares = context["ct_shares"] domain = context.get('dns_domain_name', None) if domain == 'None': domain = None # TODO(nkonovalov): Fix client to support default_image_id saharaclient.cluster_template_create( request, context["general_cluster_template_name"], plugin, hadoop_version, context["general_description"], configs_dict, node_groups, context["anti_affinity_info"], use_autoconfig=context['general_use_autoconfig'], shares=ct_shares, is_public=context['general_is_public'], is_protected=context['general_is_protected'], domain_name=domain ) hlps = helpers.Helpers(request) if hlps.is_from_guide(): request.session["guide_cluster_template_name"] = ( context["general_cluster_template_name"]) self.success_url = ( "horizon:project:data_processing.clusters:cluster_guide") return True except api_base.APIException as e: self.error_description = str(e) return False except Exception: exceptions.handle(request, _("Cluster template creation failed")) return False
apache-2.0
bhargavvader/gensim
gensim/models/rpmodel.py
14
4006
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright (C) 2010 Radim Rehurek <radimrehurek@seznam.cz> # Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html import logging import numpy as np from gensim import interfaces, matutils, utils logger = logging.getLogger('gensim.models.rpmodel') class RpModel(interfaces.TransformationABC): """ Objects of this class allow building and maintaining a model for Random Projections (also known as Random Indexing). For theoretical background on RP, see: Kanerva et al.: "Random indexing of text samples for Latent Semantic Analysis." The main methods are: 1. constructor, which creates the random projection matrix 2. the [] method, which transforms a simple count representation into the TfIdf space. >>> rp = RpModel(corpus) >>> print(rp[some_doc]) >>> rp.save('/tmp/foo.rp_model') Model persistency is achieved via its load/save methods. """ def __init__(self, corpus, id2word=None, num_topics=300): """ `id2word` is a mapping from word ids (integers) to words (strings). It is used to determine the vocabulary size, as well as for debugging and topic printing. If not set, it will be determined from the corpus. """ self.id2word = id2word self.num_topics = num_topics if corpus is not None: self.initialize(corpus) def __str__(self): return "RpModel(num_terms=%s, num_topics=%s)" % (self.num_terms, self.num_topics) def initialize(self, corpus): """ Initialize the random projection matrix. """ if self.id2word is None: logger.info("no word id mapping provided; initializing from corpus, assuming identity") self.id2word = utils.dict_from_corpus(corpus) self.num_terms = len(self.id2word) else: self.num_terms = 1 + max([-1] + self.id2word.keys()) shape = self.num_topics, self.num_terms logger.info("constructing %s random matrix" % str(shape)) # Now construct the projection matrix itself. # Here i use a particular form, derived in "Achlioptas: Database-friendly random projection", # and his (1) scenario of Theorem 1.1 in particular (all entries are +1/-1). randmat = 1 - 2 * np.random.binomial(1, 0.5, shape) # convert from 0/1 to +1/-1 self.projection = np.asfortranarray(randmat, dtype=np.float32) # convert from int32 to floats, for faster multiplications # TODO: check whether the Fortran-order shenanigans still make sense. In the original # code (~2010), this made a BIG difference for np BLAS implementations; perhaps now the wrappers # are smarter and this is no longer needed? def __getitem__(self, bow): """ Return RP representation of the input vector and/or corpus. """ # if the input vector is in fact a corpus, return a transformed corpus as result is_corpus, bow = utils.is_corpus(bow) if is_corpus: return self._apply(bow) if getattr(self, 'freshly_loaded', False): # This is a hack to work around a bug in np, where a FORTRAN-order array # unpickled from disk segfaults on using it. self.freshly_loaded = False self.projection = self.projection.copy('F') # simply making a fresh copy fixes the broken array vec = matutils.sparse2full(bow, self.num_terms).reshape(self.num_terms, 1) / np.sqrt(self.num_topics) vec = np.asfortranarray(vec, dtype=np.float32) topic_dist = np.dot(self.projection, vec) # (k, d) * (d, 1) = (k, 1) return [(topicid, float(topicvalue)) for topicid, topicvalue in enumerate(topic_dist.flat) if np.isfinite(topicvalue) and not np.allclose(topicvalue, 0.0)] def __setstate__(self, state): self.__dict__ = state self.freshly_loaded = True #endclass RpModel
lgpl-2.1
harshilasu/LinkurApp
y/google-cloud-sdk/lib/googlecloudsdk/core/remote_completion.py
5
4288
# Copyright 2014 Google Inc. All Rights Reserved. """Remote resource completion and caching.""" import logging import os import time from googlecloudsdk.core import config from googlecloudsdk.core import properties class RemoteCompletion(object): """Class to cache the names of remote resources.""" CACHE_HITS = 0 CACHE_TRIES = 0 _TIMEOUTS = { # Timeouts for resources in seconds 'instances': 600, 'region': 3600*10, 'zone': 3600*10 } _ITEM_NAME_FUN = { 'compute': lambda item: item['name'], 'sql': lambda item: item.instance } def __init__(self): """Set the cache directory.""" self.project = properties.VALUES.core.project.Get(required=True) self.cache_dir = config.Paths().completion_cache_dir def CachePath(self, resource, zoneregion): """Creates a pathname for the resource. Args: resource: The resource as subcommand.resource. zoneregion: The zone or region name. Returns: Returns a pathname for the resource. """ path = os.path.join(self.cache_dir, resource, self.project) if zoneregion: path = os.path.join(path, zoneregion) return path def GetFromCache(self, resource, zoneregion=None): """Return a list of names for the resource and zoneregion. Args: resource: The resource as subcommand.resource. zoneregion: The zone or region name or None. Returns: Returns a list of names if in the cache. """ options = [] RemoteCompletion.CACHE_TRIES += 1 if not zoneregion: zoneregion = '_ALL_ZONES' fpath = self.CachePath(resource, zoneregion) try: if os.path.getmtime(fpath) > time.time(): with open(fpath, 'r') as f: line = f.read().rstrip('\n') options = line.split(' ') RemoteCompletion.CACHE_HITS += 1 return options except Exception: # pylint:disable=broad-except return None return None def StoreInCache(self, resource, options, zoneregion): """Return the list of names for the resource and zoneregion. Args: resource: The resource as subcommand.resource. options: A list of possible completions. zoneregion: The zone or region name, or None if no zone or region. Returns: None """ path = self.CachePath(resource, zoneregion) dirname = os.path.dirname(path) if not os.path.isdir(dirname): os.makedirs(dirname) if options: with open(path, 'w') as f: f.write(' '.join(options) + '\n') now = time.time() if options is None: timeout = 0 else: timeout = RemoteCompletion._TIMEOUTS.get(resource, 300) os.utime(path, (now, now+timeout)) @staticmethod def GetCompleterForResource(resource, cli): """Returns a completer function for the give resource. Args: resource: The resource as subcommand.resource. cli: The calliope instance. Returns: A completer function for the specified resource. """ def RemoteCompleter(parsed_args, **unused_kwargs): """Run list command on resource to generates completion options.""" options = [] try: command = resource.split('.') + ['list'] zoneregion = None if command[0] == 'compute': zoneregion = '_ALL_ZONES' if hasattr(parsed_args, 'zone') and parsed_args.zone: zoneregion = parsed_args.zone command.append('--zone') command.append(zoneregion) if hasattr(parsed_args, 'region') and parsed_args.region: zoneregion = parsed_args.region command.append('--region') command.append(zoneregion) ccache = RemoteCompletion() options = ccache.GetFromCache(resource, zoneregion) if options is None: properties.VALUES.core.user_output_enabled.Set(False) items = list(cli().Execute(command, call_arg_complete=False)) fun = RemoteCompletion._ITEM_NAME_FUN[command[0]] options = [fun(item) for item in items] ccache.StoreInCache(resource, options, zoneregion) except Exception: # pylint:disable=broad-except logging.error(resource + 'completion command failed', exc_info=True) return None return options return RemoteCompleter
gpl-3.0
yongshengwang/hue
build/env/lib/python2.7/site-packages/django_extensions-1.5.0-py2.7.egg/django_extensions/mongodb/fields/json.py
44
2251
""" JSONField automatically serializes most Python terms to JSON data. Creates a TEXT field with a default value of "{}". See test_json.py for more information. from django.db import models from django_extensions.db.fields import json class LOL(models.Model): extra = json.JSONField() """ import six import datetime from decimal import Decimal from django.conf import settings from django.utils import simplejson from mongoengine.fields import StringField class JSONEncoder(simplejson.JSONEncoder): def default(self, obj): if isinstance(obj, Decimal): return str(obj) elif isinstance(obj, datetime.datetime): assert settings.TIME_ZONE == 'UTC' return obj.strftime('%Y-%m-%dT%H:%M:%SZ') return simplejson.JSONEncoder.default(self, obj) def dumps(value): assert isinstance(value, dict) return JSONEncoder().encode(value) def loads(txt): value = simplejson.loads(txt, parse_float=Decimal, encoding=settings.DEFAULT_CHARSET) assert isinstance(value, dict) return value class JSONDict(dict): """ Hack so repr() called by dumpdata will output JSON instead of Python formatted data. This way fixtures will work! """ def __repr__(self): return dumps(self) class JSONField(StringField): """JSONField is a generic textfield that neatly serializes/unserializes JSON objects seamlessly. Main thingy must be a dict object.""" def __init__(self, *args, **kwargs): if 'default' not in kwargs: kwargs['default'] = '{}' StringField.__init__(self, *args, **kwargs) def to_python(self, value): """Convert our string value to JSON after we load it from the DB""" if not value: return {} elif isinstance(value, six.string_types): res = loads(value) assert isinstance(res, dict) return JSONDict(**res) else: return value def get_db_prep_save(self, value): """Convert our JSON object to a string before we save""" if not value: return super(JSONField, self).get_db_prep_save("") else: return super(JSONField, self).get_db_prep_save(dumps(value))
apache-2.0
knkinnard/byte-2
lib/werkzeug/urls.py
77
36634
# -*- coding: utf-8 -*- """ werkzeug.urls ~~~~~~~~~~~~~ ``werkzeug.urls`` used to provide several wrapper functions for Python 2 urlparse, whose main purpose were to work around the behavior of the Py2 stdlib and its lack of unicode support. While this was already a somewhat inconvenient situation, it got even more complicated because Python 3's ``urllib.parse`` actually does handle unicode properly. In other words, this module would wrap two libraries with completely different behavior. So now this module contains a 2-and-3-compatible backport of Python 3's ``urllib.parse``, which is mostly API-compatible. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. """ import os import re from werkzeug._compat import text_type, PY2, to_unicode, \ to_native, implements_to_string, try_coerce_native, \ normalize_string_tuple, make_literal_wrapper, \ fix_tuple_repr from werkzeug._internal import _encode_idna, _decode_idna from werkzeug.datastructures import MultiDict, iter_multi_items from collections import namedtuple # A regular expression for what a valid schema looks like _scheme_re = re.compile(r'^[a-zA-Z0-9+-.]+$') # Characters that are safe in any part of an URL. _always_safe = (b'abcdefghijklmnopqrstuvwxyz' b'ABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789_.-+') _hexdigits = '0123456789ABCDEFabcdef' _hextobyte = dict( ((a + b).encode(), int(a + b, 16)) for a in _hexdigits for b in _hexdigits ) _URLTuple = fix_tuple_repr(namedtuple('_URLTuple', ['scheme', 'netloc', 'path', 'query', 'fragment'])) class BaseURL(_URLTuple): '''Superclass of :py:class:`URL` and :py:class:`BytesURL`.''' __slots__ = () def replace(self, **kwargs): """Return an URL with the same values, except for those parameters given new values by whichever keyword arguments are specified.""" return self._replace(**kwargs) @property def host(self): """The host part of the URL if available, otherwise `None`. The host is either the hostname or the IP address mentioned in the URL. It will not contain the port. """ return self._split_host()[0] @property def ascii_host(self): """Works exactly like :attr:`host` but will return a result that is restricted to ASCII. If it finds a netloc that is not ASCII it will attempt to idna decode it. This is useful for socket operations when the URL might include internationalized characters. """ rv = self.host if rv is not None and isinstance(rv, text_type): rv = _encode_idna(rv) return to_native(rv, 'ascii', 'ignore') @property def port(self): """The port in the URL as an integer if it was present, `None` otherwise. This does not fill in default ports. """ try: rv = int(to_native(self._split_host()[1])) if 0 <= rv <= 65535: return rv except (ValueError, TypeError): pass @property def auth(self): """The authentication part in the URL if available, `None` otherwise. """ return self._split_netloc()[0] @property def username(self): """The username if it was part of the URL, `None` otherwise. This undergoes URL decoding and will always be a unicode string. """ rv = self._split_auth()[0] if rv is not None: return _url_unquote_legacy(rv) @property def raw_username(self): """The username if it was part of the URL, `None` otherwise. Unlike :attr:`username` this one is not being decoded. """ return self._split_auth()[0] @property def password(self): """The password if it was part of the URL, `None` otherwise. This undergoes URL decoding and will always be a unicode string. """ rv = self._split_auth()[1] if rv is not None: return _url_unquote_legacy(rv) @property def raw_password(self): """The password if it was part of the URL, `None` otherwise. Unlike :attr:`password` this one is not being decoded. """ return self._split_auth()[1] def decode_query(self, *args, **kwargs): """Decodes the query part of the URL. Ths is a shortcut for calling :func:`url_decode` on the query argument. The arguments and keyword arguments are forwarded to :func:`url_decode` unchanged. """ return url_decode(self.query, *args, **kwargs) def join(self, *args, **kwargs): """Joins this URL with another one. This is just a convenience function for calling into :meth:`url_join` and then parsing the return value again. """ return url_parse(url_join(self, *args, **kwargs)) def to_url(self): """Returns a URL string or bytes depending on the type of the information stored. This is just a convenience function for calling :meth:`url_unparse` for this URL. """ return url_unparse(self) def decode_netloc(self): """Decodes the netloc part into a string.""" rv = _decode_idna(self.host or '') if ':' in rv: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [ _url_unquote_legacy(self.raw_username or '', '/:%@'), _url_unquote_legacy(self.raw_password or '', '/:%@'), ])) if auth: rv = '%s@%s' % (auth, rv) return rv def to_uri_tuple(self): """Returns a :class:`BytesURL` tuple that holds a URI. This will encode all the information in the URL properly to ASCII using the rules a web browser would follow. It's usually more interesting to directly call :meth:`iri_to_uri` which will return a string. """ return url_parse(iri_to_uri(self).encode('ascii')) def to_iri_tuple(self): """Returns a :class:`URL` tuple that holds a IRI. This will try to decode as much information as possible in the URL without losing information similar to how a web browser does it for the URL bar. It's usually more interesting to directly call :meth:`uri_to_iri` which will return a string. """ return url_parse(uri_to_iri(self)) def get_file_location(self, pathformat=None): """Returns a tuple with the location of the file in the form ``(server, location)``. If the netloc is empty in the URL or points to localhost, it's represented as ``None``. The `pathformat` by default is autodetection but needs to be set when working with URLs of a specific system. The supported values are ``'windows'`` when working with Windows or DOS paths and ``'posix'`` when working with posix paths. If the URL does not point to to a local file, the server and location are both represented as ``None``. :param pathformat: The expected format of the path component. Currently ``'windows'`` and ``'posix'`` are supported. Defaults to ``None`` which is autodetect. """ if self.scheme != 'file': return None, None path = url_unquote(self.path) host = self.netloc or None if pathformat is None: if os.name == 'nt': pathformat = 'windows' else: pathformat = 'posix' if pathformat == 'windows': if path[:1] == '/' and path[1:2].isalpha() and path[2:3] in '|:': path = path[1:2] + ':' + path[3:] windows_share = path[:3] in ('\\' * 3, '/' * 3) import ntpath path = ntpath.normpath(path) # Windows shared drives are represented as ``\\host\\directory``. # That results in a URL like ``file://///host/directory``, and a # path like ``///host/directory``. We need to special-case this # because the path contains the hostname. if windows_share and host is None: parts = path.lstrip('\\').split('\\', 1) if len(parts) == 2: host, path = parts else: host = parts[0] path = '' elif pathformat == 'posix': import posixpath path = posixpath.normpath(path) else: raise TypeError('Invalid path format %s' % repr(pathformat)) if host in ('127.0.0.1', '::1', 'localhost'): host = None return host, path def _split_netloc(self): if self._at in self.netloc: return self.netloc.split(self._at, 1) return None, self.netloc def _split_auth(self): auth = self._split_netloc()[0] if not auth: return None, None if self._colon not in auth: return auth, None return auth.split(self._colon, 1) def _split_host(self): rv = self._split_netloc()[1] if not rv: return None, None if not rv.startswith(self._lbracket): if self._colon in rv: return rv.split(self._colon, 1) return rv, None idx = rv.find(self._rbracket) if idx < 0: return rv, None host = rv[1:idx] rest = rv[idx + 1:] if rest.startswith(self._colon): return host, rest[1:] return host, None @implements_to_string class URL(BaseURL): """Represents a parsed URL. This behaves like a regular tuple but also has some extra attributes that give further insight into the URL. """ __slots__ = () _at = '@' _colon = ':' _lbracket = '[' _rbracket = ']' def __str__(self): return self.to_url() def encode_netloc(self): """Encodes the netloc part to an ASCII safe URL as bytes.""" rv = self.ascii_host or '' if ':' in rv: rv = '[%s]' % rv port = self.port if port is not None: rv = '%s:%d' % (rv, port) auth = ':'.join(filter(None, [ url_quote(self.raw_username or '', 'utf-8', 'strict', '/:%'), url_quote(self.raw_password or '', 'utf-8', 'strict', '/:%'), ])) if auth: rv = '%s@%s' % (auth, rv) return to_native(rv) def encode(self, charset='utf-8', errors='replace'): """Encodes the URL to a tuple made out of bytes. The charset is only being used for the path, query and fragment. """ return BytesURL( self.scheme.encode('ascii'), self.encode_netloc(), self.path.encode(charset, errors), self.query.encode(charset, errors), self.fragment.encode(charset, errors) ) class BytesURL(BaseURL): """Represents a parsed URL in bytes.""" __slots__ = () _at = b'@' _colon = b':' _lbracket = b'[' _rbracket = b']' def __str__(self): return self.to_url().decode('utf-8', 'replace') def encode_netloc(self): """Returns the netloc unchanged as bytes.""" return self.netloc def decode(self, charset='utf-8', errors='replace'): """Decodes the URL to a tuple made out of strings. The charset is only being used for the path, query and fragment. """ return URL( self.scheme.decode('ascii'), self.decode_netloc(), self.path.decode(charset, errors), self.query.decode(charset, errors), self.fragment.decode(charset, errors) ) def _unquote_to_bytes(string, unsafe=''): if isinstance(string, text_type): string = string.encode('utf-8') if isinstance(unsafe, text_type): unsafe = unsafe.encode('utf-8') unsafe = frozenset(bytearray(unsafe)) bits = iter(string.split(b'%')) result = bytearray(next(bits, b'')) for item in bits: try: char = _hextobyte[item[:2]] if char in unsafe: raise KeyError() result.append(char) result.extend(item[2:]) except KeyError: result.extend(b'%') result.extend(item) return bytes(result) def _url_encode_impl(obj, charset, encode_keys, sort, key): iterable = iter_multi_items(obj) if sort: iterable = sorted(iterable, key=key) for key, value in iterable: if value is None: continue if not isinstance(key, bytes): key = text_type(key).encode(charset) if not isinstance(value, bytes): value = text_type(value).encode(charset) yield url_quote_plus(key) + '=' + url_quote_plus(value) def _url_unquote_legacy(value, unsafe=''): try: return url_unquote(value, charset='utf-8', errors='strict', unsafe=unsafe) except UnicodeError: return url_unquote(value, charset='latin1', unsafe=unsafe) def url_parse(url, scheme=None, allow_fragments=True): """Parses a URL from a string into a :class:`URL` tuple. If the URL is lacking a scheme it can be provided as second argument. Otherwise, it is ignored. Optionally fragments can be stripped from the URL by setting `allow_fragments` to `False`. The inverse of this function is :func:`url_unparse`. :param url: the URL to parse. :param scheme: the default schema to use if the URL is schemaless. :param allow_fragments: if set to `False` a fragment will be removed from the URL. """ s = make_literal_wrapper(url) is_text_based = isinstance(url, text_type) if scheme is None: scheme = s('') netloc = query = fragment = s('') i = url.find(s(':')) if i > 0 and _scheme_re.match(to_native(url[:i], errors='replace')): # make sure "iri" is not actually a port number (in which case # "scheme" is really part of the path) rest = url[i + 1:] if not rest or any(c not in s('0123456789') for c in rest): # not a port number scheme, url = url[:i].lower(), rest if url[:2] == s('//'): delim = len(url) for c in s('/?#'): wdelim = url.find(c, 2) if wdelim >= 0: delim = min(delim, wdelim) netloc, url = url[2:delim], url[delim:] if (s('[') in netloc and s(']') not in netloc) or \ (s(']') in netloc and s('[') not in netloc): raise ValueError('Invalid IPv6 URL') if allow_fragments and s('#') in url: url, fragment = url.split(s('#'), 1) if s('?') in url: url, query = url.split(s('?'), 1) result_type = is_text_based and URL or BytesURL return result_type(scheme, netloc, url, query, fragment) def url_quote(string, charset='utf-8', errors='strict', safe='/:', unsafe=''): """URL encode a single string with a given encoding. :param s: the string to quote. :param charset: the charset to be used. :param safe: an optional sequence of safe characters. :param unsafe: an optional sequence of unsafe characters. .. versionadded:: 0.9.2 The `unsafe` parameter was added. """ if not isinstance(string, (text_type, bytes, bytearray)): string = text_type(string) if isinstance(string, text_type): string = string.encode(charset, errors) if isinstance(safe, text_type): safe = safe.encode(charset, errors) if isinstance(unsafe, text_type): unsafe = unsafe.encode(charset, errors) safe = frozenset(bytearray(safe) + _always_safe) - frozenset(bytearray(unsafe)) rv = bytearray() for char in bytearray(string): if char in safe: rv.append(char) else: rv.extend(('%%%02X' % char).encode('ascii')) return to_native(bytes(rv)) def url_quote_plus(string, charset='utf-8', errors='strict', safe=''): """URL encode a single string with the given encoding and convert whitespace to "+". :param s: The string to quote. :param charset: The charset to be used. :param safe: An optional sequence of safe characters. """ return url_quote(string, charset, errors, safe + ' ', '+').replace(' ', '+') def url_unparse(components): """The reverse operation to :meth:`url_parse`. This accepts arbitrary as well as :class:`URL` tuples and returns a URL as a string. :param components: the parsed URL as tuple which should be converted into a URL string. """ scheme, netloc, path, query, fragment = \ normalize_string_tuple(components) s = make_literal_wrapper(scheme) url = s('') # We generally treat file:///x and file:/x the same which is also # what browsers seem to do. This also allows us to ignore a schema # register for netloc utilization or having to differenciate between # empty and missing netloc. if netloc or (scheme and path.startswith(s('/'))): if path and path[:1] != s('/'): path = s('/') + path url = s('//') + (netloc or s('')) + path elif path: url += path if scheme: url = scheme + s(':') + url if query: url = url + s('?') + query if fragment: url = url + s('#') + fragment return url def url_unquote(string, charset='utf-8', errors='replace', unsafe=''): """URL decode a single string with a given encoding. If the charset is set to `None` no unicode decoding is performed and raw bytes are returned. :param s: the string to unquote. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param errors: the error handling for the charset decoding. """ rv = _unquote_to_bytes(string, unsafe) if charset is not None: rv = rv.decode(charset, errors) return rv def url_unquote_plus(s, charset='utf-8', errors='replace'): """URL decode a single string with the given `charset` and decode "+" to whitespace. Per default encoding errors are ignored. If you want a different behavior you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a :exc:`HTTPUnicodeError` is raised. :param s: The string to unquote. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param errors: The error handling for the `charset` decoding. """ if isinstance(s, text_type): s = s.replace(u'+', u' ') else: s = s.replace(b'+', b' ') return url_unquote(s, charset, errors) def url_fix(s, charset='utf-8'): r"""Sometimes you get an URL by a user that just isn't a real URL because it contains unsafe characters like ' ' and so on. This function can fix some of the problems in a similar way browsers handle data entered by the user: >>> url_fix(u'http://de.wikipedia.org/wiki/Elf (Begriffskl\xe4rung)') 'http://de.wikipedia.org/wiki/Elf%20(Begriffskl%C3%A4rung)' :param s: the string with the URL to fix. :param charset: The target charset for the URL if the url was given as unicode string. """ # First step is to switch to unicode processing and to convert # backslashes (which are invalid in URLs anyways) to slashes. This is # consistent with what Chrome does. s = to_unicode(s, charset, 'replace').replace('\\', '/') # For the specific case that we look like a malformed windows URL # we want to fix this up manually: if s.startswith('file://') and s[7:8].isalpha() and s[8:10] in (':/', '|/'): s = 'file:///' + s[7:] url = url_parse(s) path = url_quote(url.path, charset, safe='/%+$!*\'(),') qs = url_quote_plus(url.query, charset, safe=':&%=+$!*\'(),') anchor = url_quote_plus(url.fragment, charset, safe=':&%=+$!*\'(),') return to_native(url_unparse((url.scheme, url.encode_netloc(), path, qs, anchor))) def uri_to_iri(uri, charset='utf-8', errors='replace'): r""" Converts a URI in a given charset to a IRI. Examples for URI versus IRI: >>> uri_to_iri(b'http://xn--n3h.net/') u'http://\u2603.net/' >>> uri_to_iri(b'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th') u'http://\xfcser:p\xe4ssword@\u2603.net/p\xe5th' Query strings are left unchanged: >>> uri_to_iri('/?foo=24&x=%26%2f') u'/?foo=24&x=%26%2f' .. versionadded:: 0.6 :param uri: The URI to convert. :param charset: The charset of the URI. :param errors: The error handling on decode. """ if isinstance(uri, tuple): uri = url_unparse(uri) uri = url_parse(to_unicode(uri, charset)) path = url_unquote(uri.path, charset, errors, '%/;?') query = url_unquote(uri.query, charset, errors, '%;/?:@&=+,$#') fragment = url_unquote(uri.fragment, charset, errors, '%;/?:@&=+,$#') return url_unparse((uri.scheme, uri.decode_netloc(), path, query, fragment)) def iri_to_uri(iri, charset='utf-8', errors='strict', safe_conversion=False): r""" Converts any unicode based IRI to an acceptable ASCII URI. Werkzeug always uses utf-8 URLs internally because this is what browsers and HTTP do as well. In some places where it accepts an URL it also accepts a unicode IRI and converts it into a URI. Examples for IRI versus URI: >>> iri_to_uri(u'http://☃.net/') 'http://xn--n3h.net/' >>> iri_to_uri(u'http://üser:pässword@☃.net/påth') 'http://%C3%BCser:p%C3%A4ssword@xn--n3h.net/p%C3%A5th' There is a general problem with IRI and URI conversion with some protocols that appear in the wild that are in violation of the URI specification. In places where Werkzeug goes through a forced IRI to URI conversion it will set the `safe_conversion` flag which will not perform a conversion if the end result is already ASCII. This can mean that the return value is not an entirely correct URI but it will not destroy such invalid URLs in the process. As an example consider the following two IRIs:: magnet:?xt=uri:whatever itms-services://?action=download-manifest The internal representation after parsing of those URLs is the same and there is no way to reconstruct the original one. If safe conversion is enabled however this function becomes a noop for both of those strings as they both can be considered URIs. .. versionadded:: 0.6 .. versionchanged:: 0.9.6 The `safe_conversion` parameter was added. :param iri: The IRI to convert. :param charset: The charset for the URI. :param safe_conversion: indicates if a safe conversion should take place. For more information see the explanation above. """ if isinstance(iri, tuple): iri = url_unparse(iri) if safe_conversion: try: native_iri = to_native(iri) ascii_iri = to_native(iri).encode('ascii') if ascii_iri.split() == [ascii_iri]: return native_iri except UnicodeError: pass iri = url_parse(to_unicode(iri, charset, errors)) netloc = iri.encode_netloc() path = url_quote(iri.path, charset, errors, '/:~+%') query = url_quote(iri.query, charset, errors, '%&[]:;$*()+,!?*/=') fragment = url_quote(iri.fragment, charset, errors, '=%&[]:;$()+,!?*/') return to_native(url_unparse((iri.scheme, netloc, path, query, fragment))) def url_decode(s, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None): """ Parse a querystring and return it as :class:`MultiDict`. There is a difference in key decoding on different Python versions. On Python 3 keys will always be fully decoded whereas on Python 2, keys will remain bytestrings if they fit into ASCII. On 2.x keys can be forced to be unicode by setting `decode_keys` to `True`. If the charset is set to `None` no unicode decoding will happen and raw bytes will be returned. Per default a missing value for a key will default to an empty key. If you don't want that behavior you can set `include_empty` to `False`. Per default encoding errors are ignored. If you want a different behavior you can set `errors` to ``'replace'`` or ``'strict'``. In strict mode a `HTTPUnicodeError` is raised. .. versionchanged:: 0.5 In previous versions ";" and "&" could be used for url decoding. This changed in 0.5 where only "&" is supported. If you want to use ";" instead a different `separator` can be provided. The `cls` parameter was added. :param s: a string with the query string to decode. :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True` then keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. """ if cls is None: cls = MultiDict if isinstance(s, text_type) and not isinstance(separator, text_type): separator = separator.decode(charset or 'ascii') elif isinstance(s, bytes) and not isinstance(separator, bytes): separator = separator.encode(charset or 'ascii') return cls(_url_decode_impl(s.split(separator), charset, decode_keys, include_empty, errors)) def url_decode_stream(stream, charset='utf-8', decode_keys=False, include_empty=True, errors='replace', separator='&', cls=None, limit=None, return_iterator=False): """Works like :func:`url_decode` but decodes a stream. The behavior of stream and limit follows functions like :func:`~werkzeug.wsgi.make_line_iter`. The generator of pairs is directly fed to the `cls` so you can consume the data while it's parsed. .. versionadded:: 0.8 :param stream: a stream with the encoded querystring :param charset: the charset of the query string. If set to `None` no unicode decoding will take place. :param decode_keys: Used on Python 2.x to control whether keys should be forced to be unicode objects. If set to `True`, keys will be unicode in all cases. Otherwise, they remain `str` if they fit into ASCII. :param include_empty: Set to `False` if you don't want empty values to appear in the dict. :param errors: the decoding error behavior. :param separator: the pair separator to be used, defaults to ``&`` :param cls: an optional dict class to use. If this is not specified or `None` the default :class:`MultiDict` is used. :param limit: the content length of the URL data. Not necessary if a limited stream is provided. :param return_iterator: if set to `True` the `cls` argument is ignored and an iterator over all decoded pairs is returned """ from werkzeug.wsgi import make_chunk_iter if return_iterator: cls = lambda x: x elif cls is None: cls = MultiDict pair_iter = make_chunk_iter(stream, separator, limit) return cls(_url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors)) def _url_decode_impl(pair_iter, charset, decode_keys, include_empty, errors): for pair in pair_iter: if not pair: continue s = make_literal_wrapper(pair) equal = s('=') if equal in pair: key, value = pair.split(equal, 1) else: if not include_empty: continue key = pair value = s('') key = url_unquote_plus(key, charset, errors) if charset is not None and PY2 and not decode_keys: key = try_coerce_native(key) yield key, url_unquote_plus(value, charset, errors) def url_encode(obj, charset='utf-8', encode_keys=False, sort=False, key=None, separator=b'&'): """URL encode a dict/`MultiDict`. If a value is `None` it will not appear in the result string. Per default only values are encoded into the target charset strings. If `encode_keys` is set to ``True`` unicode keys are supported too. If `sort` is set to `True` the items are sorted by `key` or the default sorting algorithm. .. versionadded:: 0.5 `sort`, `key`, and `separator` were added. :param obj: the object to encode into a query string. :param charset: the charset of the query string. :param encode_keys: set to `True` if you have unicode keys. (Ignored on Python 3.x) :param sort: set to `True` if you want parameters to be sorted by `key`. :param separator: the separator to be used for the pairs. :param key: an optional function to be used for sorting. For more details check out the :func:`sorted` documentation. """ separator = to_native(separator, 'ascii') return separator.join(_url_encode_impl(obj, charset, encode_keys, sort, key)) def url_encode_stream(obj, stream=None, charset='utf-8', encode_keys=False, sort=False, key=None, separator=b'&'): """Like :meth:`url_encode` but writes the results to a stream object. If the stream is `None` a generator over all encoded pairs is returned. .. versionadded:: 0.8 :param obj: the object to encode into a query string. :param stream: a stream to write the encoded object into or `None` if an iterator over the encoded pairs should be returned. In that case the separator argument is ignored. :param charset: the charset of the query string. :param encode_keys: set to `True` if you have unicode keys. (Ignored on Python 3.x) :param sort: set to `True` if you want parameters to be sorted by `key`. :param separator: the separator to be used for the pairs. :param key: an optional function to be used for sorting. For more details check out the :func:`sorted` documentation. """ separator = to_native(separator, 'ascii') gen = _url_encode_impl(obj, charset, encode_keys, sort, key) if stream is None: return gen for idx, chunk in enumerate(gen): if idx: stream.write(separator) stream.write(chunk) def url_join(base, url, allow_fragments=True): """Join a base URL and a possibly relative URL to form an absolute interpretation of the latter. :param base: the base URL for the join operation. :param url: the URL to join. :param allow_fragments: indicates whether fragments should be allowed. """ if isinstance(base, tuple): base = url_unparse(base) if isinstance(url, tuple): url = url_unparse(url) base, url = normalize_string_tuple((base, url)) s = make_literal_wrapper(base) if not base: return url if not url: return base bscheme, bnetloc, bpath, bquery, bfragment = \ url_parse(base, allow_fragments=allow_fragments) scheme, netloc, path, query, fragment = \ url_parse(url, bscheme, allow_fragments) if scheme != bscheme: return url if netloc: return url_unparse((scheme, netloc, path, query, fragment)) netloc = bnetloc if path[:1] == s('/'): segments = path.split(s('/')) elif not path: segments = bpath.split(s('/')) if not query: query = bquery else: segments = bpath.split(s('/'))[:-1] + path.split(s('/')) # If the rightmost part is "./" we want to keep the slash but # remove the dot. if segments[-1] == s('.'): segments[-1] = s('') # Resolve ".." and "." segments = [segment for segment in segments if segment != s('.')] while 1: i = 1 n = len(segments) - 1 while i < n: if segments[i] == s('..') and \ segments[i - 1] not in (s(''), s('..')): del segments[i - 1:i + 1] break i += 1 else: break # Remove trailing ".." if the URL is absolute unwanted_marker = [s(''), s('..')] while segments[:2] == unwanted_marker: del segments[1] path = s('/').join(segments) return url_unparse((scheme, netloc, path, query, fragment)) class Href(object): """Implements a callable that constructs URLs with the given base. The function can be called with any number of positional and keyword arguments which than are used to assemble the URL. Works with URLs and posix paths. Positional arguments are appended as individual segments to the path of the URL: >>> href = Href('/foo') >>> href('bar', 23) '/foo/bar/23' >>> href('foo', bar=23) '/foo/foo?bar=23' If any of the arguments (positional or keyword) evaluates to `None` it will be skipped. If no keyword arguments are given the last argument can be a :class:`dict` or :class:`MultiDict` (or any other dict subclass), otherwise the keyword arguments are used for the query parameters, cutting off the first trailing underscore of the parameter name: >>> href(is_=42) '/foo?is=42' >>> href({'foo': 'bar'}) '/foo?foo=bar' Combining of both methods is not allowed: >>> href({'foo': 'bar'}, bar=42) Traceback (most recent call last): ... TypeError: keyword arguments and query-dicts can't be combined Accessing attributes on the href object creates a new href object with the attribute name as prefix: >>> bar_href = href.bar >>> bar_href("blub") '/foo/bar/blub' If `sort` is set to `True` the items are sorted by `key` or the default sorting algorithm: >>> href = Href("/", sort=True) >>> href(a=1, b=2, c=3) '/?a=1&b=2&c=3' .. versionadded:: 0.5 `sort` and `key` were added. """ def __init__(self, base='./', charset='utf-8', sort=False, key=None): if not base: base = './' self.base = base self.charset = charset self.sort = sort self.key = key def __getattr__(self, name): if name[:2] == '__': raise AttributeError(name) base = self.base if base[-1:] != '/': base += '/' return Href(url_join(base, name), self.charset, self.sort, self.key) def __call__(self, *path, **query): if path and isinstance(path[-1], dict): if query: raise TypeError('keyword arguments and query-dicts ' 'can\'t be combined') query, path = path[-1], path[:-1] elif query: query = dict([(k.endswith('_') and k[:-1] or k, v) for k, v in query.items()]) path = '/'.join([to_unicode(url_quote(x, self.charset), 'ascii') for x in path if x is not None]).lstrip('/') rv = self.base if path: if not rv.endswith('/'): rv += '/' rv = url_join(rv, './' + path) if query: rv += '?' + to_unicode(url_encode(query, self.charset, sort=self.sort, key=self.key), 'ascii') return to_native(rv)
apache-2.0
SahilTikale/haas
hil/cli/project.py
2
1764
"""Commands related to projects are in this module""" import click import sys from hil.cli.client_setup import client @click.group() def project(): """Commands related to project""" @project.command(name='create') @click.argument('project') def project_create(project): """Create a new project""" client.project.create(project) @project.command(name='delete') @click.argument('project') def project_delete(project): """Delete a project""" client.project.delete(project) @project.command(name='list') def project_list(): """List all projects""" q = client.project.list() sys.stdout.write('%s Projects : ' % len(q) + " ".join(q) + '\n') @project.command(name='list-networks') @click.argument('project') def project_list_networks(project): """List all networks attached to a <project>""" q = client.project.networks_in(project) sys.stdout.write( "Networks allocated to %s\t: %s\n" % (project, " ".join(q)) ) @project.group(name='node') def project_node(): """Project and node related operations""" @project_node.command(name='list') @click.argument('project') def project_node_list(project): """List all nodes attached to a <project>""" q = client.project.nodes_in(project) sys.stdout.write('Nodes allocated to %s: ' % project + " ".join(q) + '\n') @project_node.command(name='add') @click.argument('project') @click.argument('node') def project_connect_node(project, node): """Add <node> to <project>""" client.project.connect(project, node) @project_node.command(name='remove') @click.argument('project') @click.argument('node') def project_detach_node(project, node): """Remove <node> from <project>""" client.project.detach(project, node)
apache-2.0
vlvkobal/netdata
collectors/python.d.plugin/python_modules/pyyaml3/events.py
8
2476
# SPDX-License-Identifier: MIT # Abstract classes. class Event(object): def __init__(self, start_mark=None, end_mark=None): self.start_mark = start_mark self.end_mark = end_mark def __repr__(self): attributes = [key for key in ['anchor', 'tag', 'implicit', 'value'] if hasattr(self, key)] arguments = ', '.join(['%s=%r' % (key, getattr(self, key)) for key in attributes]) return '%s(%s)' % (self.__class__.__name__, arguments) class NodeEvent(Event): def __init__(self, anchor, start_mark=None, end_mark=None): self.anchor = anchor self.start_mark = start_mark self.end_mark = end_mark class CollectionStartEvent(NodeEvent): def __init__(self, anchor, tag, implicit, start_mark=None, end_mark=None, flow_style=None): self.anchor = anchor self.tag = tag self.implicit = implicit self.start_mark = start_mark self.end_mark = end_mark self.flow_style = flow_style class CollectionEndEvent(Event): pass # Implementations. class StreamStartEvent(Event): def __init__(self, start_mark=None, end_mark=None, encoding=None): self.start_mark = start_mark self.end_mark = end_mark self.encoding = encoding class StreamEndEvent(Event): pass class DocumentStartEvent(Event): def __init__(self, start_mark=None, end_mark=None, explicit=None, version=None, tags=None): self.start_mark = start_mark self.end_mark = end_mark self.explicit = explicit self.version = version self.tags = tags class DocumentEndEvent(Event): def __init__(self, start_mark=None, end_mark=None, explicit=None): self.start_mark = start_mark self.end_mark = end_mark self.explicit = explicit class AliasEvent(NodeEvent): pass class ScalarEvent(NodeEvent): def __init__(self, anchor, tag, implicit, value, start_mark=None, end_mark=None, style=None): self.anchor = anchor self.tag = tag self.implicit = implicit self.value = value self.start_mark = start_mark self.end_mark = end_mark self.style = style class SequenceStartEvent(CollectionStartEvent): pass class SequenceEndEvent(CollectionEndEvent): pass class MappingStartEvent(CollectionStartEvent): pass class MappingEndEvent(CollectionEndEvent): pass
gpl-3.0
FATruden/boto
tests/integration/sns/test_connection.py
3
1448
# Copyright (c) 2013 Amazon.com, Inc. or its affiliates. # All rights reserved. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from tests.unit import unittest from boto.sns import connect_to_region class TestSNSConnection(unittest.TestCase): sns = True def setUp(self): self.connection = connect_to_region('us-west-2') def test_list_platform_applications(self): response = self.connection.list_platform_applications()
mit
charlescearl/VirtualMesos
third_party/boto-2.0b2/boto/mturk/question.py
8
14194
# Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/ # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. class Question(object): QUESTION_XML_TEMPLATE = """<Question><QuestionIdentifier>%s</QuestionIdentifier>%s<IsRequired>%s</IsRequired>%s%s</Question>""" DISPLAY_NAME_XML_TEMPLATE = """<DisplayName>%s</DisplayName>""" def __init__(self, identifier, content, answer_spec, is_required=False, display_name=None): #amount=0.0, currency_code='USD'): self.identifier = identifier self.content = content self.answer_spec = answer_spec self.is_required = is_required self.display_name = display_name def get_as_params(self, label='Question', identifier=None): if identifier is None: raise ValueError("identifier (QuestionIdentifier) is required per MTurk spec.") return { label : self.get_as_xml() } def get_as_xml(self): # add the display name if required display_name_xml = '' if self.display_name: display_name_xml = self.DISPLAY_NAME_XML_TEMPLATE %(self.display_name) ret = Question.QUESTION_XML_TEMPLATE % (self.identifier, display_name_xml, str(self.is_required).lower(), self.content.get_as_xml(), self.answer_spec.get_as_xml()) return ret class ExternalQuestion(object): EXTERNAL_QUESTIONFORM_SCHEMA_LOCATION = "http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2006-07-14/ExternalQuestion.xsd" EXTERNAL_QUESTION_XML_TEMPLATE = """<ExternalQuestion xmlns="%s"><ExternalURL>%s</ExternalURL><FrameHeight>%s</FrameHeight></ExternalQuestion>""" def __init__(self, external_url, frame_height): self.external_url = external_url self.frame_height = frame_height def get_as_params(self, label='ExternalQuestion'): return { label : self.get_as_xml() } def get_as_xml(self): ret = ExternalQuestion.EXTERNAL_QUESTION_XML_TEMPLATE % (ExternalQuestion.EXTERNAL_QUESTIONFORM_SCHEMA_LOCATION, self.external_url, self.frame_height) return ret class OrderedContent(object): def __init__(self): self.items = [] def append(self, field, value): "Expects field type and value" self.items.append((field, value)) def get_binary_xml(self, field, value): return """ <Binary> <MimeType> <Type>%s</Type> <SubType>%s</SubType> </MimeType> <DataURL>%s</DataURL> <AltText>%s</AltText> </Binary>""" % (value['type'], value['subtype'], value['dataurl'], value['alttext']) def get_application_xml(self, field, value): raise NotImplementedError("Application question content is not yet supported.") def get_as_xml(self): default_handler = lambda f,v: '<%s>%s</%s>' % (f,v,f) bulleted_list_handler = lambda _,list: '<List>%s</List>' % ''.join([('<ListItem>%s</ListItem>' % item) for item in list]) formatted_content_handler = lambda _,content: "<FormattedContent><![CDATA[%s]]></FormattedContent>" % content application_handler = self.get_application_xml binary_handler = self.get_binary_xml children = '' for (field,value) in self.items: handler = default_handler if field == 'List': handler = bulleted_list_handler elif field == 'Application': handler = application_handler elif field == 'Binary': handler = binary_handler elif field == 'FormattedContent': handler = formatted_content_handler children = children + handler(field, value) return children class Overview(object): OVERVIEW_XML_TEMPLATE = """<Overview>%s</Overview>""" def __init__(self): self.ordered_content = OrderedContent() def append(self, field, value): self.ordered_content.append(field,value) def get_as_params(self, label='Overview'): return { label : self.get_as_xml() } def get_as_xml(self): ret = Overview.OVERVIEW_XML_TEMPLATE % (self.ordered_content.get_as_xml()) return ret class QuestionForm(object): QUESTIONFORM_SCHEMA_LOCATION = "http://mechanicalturk.amazonaws.com/AWSMechanicalTurkDataSchemas/2005-10-01/QuestionForm.xsd" QUESTIONFORM_XML_TEMPLATE = """<QuestionForm xmlns="%s">%s</QuestionForm>""" def __init__(self): self.items = [] def append(self, item): "Expects field type and value" self.items.append(item) def get_as_xml(self): xml = '' for item in self.items: xml = xml + item.get_as_xml() return QuestionForm.QUESTIONFORM_XML_TEMPLATE % (QuestionForm.QUESTIONFORM_SCHEMA_LOCATION, xml) class QuestionContent(object): QUESTIONCONTENT_XML_TEMPLATE = """<QuestionContent>%s</QuestionContent>""" def __init__(self): self.ordered_content = OrderedContent() def append(self, field, value): self.ordered_content.append(field,value) def get_as_xml(self): ret = QuestionContent.QUESTIONCONTENT_XML_TEMPLATE % (self.ordered_content.get_as_xml()) return ret class AnswerSpecification(object): ANSWERSPECIFICATION_XML_TEMPLATE = """<AnswerSpecification>%s</AnswerSpecification>""" def __init__(self, spec): self.spec = spec def get_as_xml(self): values = () # TODO return AnswerSpecification.ANSWERSPECIFICATION_XML_TEMPLATE % self.spec.get_as_xml() class FreeTextAnswer(object): FREETEXTANSWER_XML_TEMPLATE = """<FreeTextAnswer>%s%s</FreeTextAnswer>""" # (constraints, default) FREETEXTANSWER_CONSTRAINTS_XML_TEMPLATE = """<Constraints>%s%s%s</Constraints>""" # (is_numeric_xml, length_xml, regex_xml) FREETEXTANSWER_LENGTH_XML_TEMPLATE = """<Length %s %s />""" # (min_length_attr, max_length_attr) FREETEXTANSWER_ISNUMERIC_XML_TEMPLATE = """<IsNumeric %s %s />""" # (min_value_attr, max_value_attr) FREETEXTANSWER_DEFAULTTEXT_XML_TEMPLATE = """<DefaultText>%s</DefaultText>""" # (default) def __init__(self, default=None, min_length=None, max_length=None, is_numeric=False, min_value=None, max_value=None, format_regex=None): self.default = default self.min_length = min_length self.max_length = max_length self.is_numeric = is_numeric self.min_value = min_value self.max_value = max_value self.format_regex = format_regex def get_as_xml(self): is_numeric_xml = "" if self.is_numeric: min_value_attr = "" max_value_attr = "" if self.min_value: min_value_attr = """minValue="%d" """ % self.min_value if self.max_value: max_value_attr = """maxValue="%d" """ % self.max_value is_numeric_xml = FreeTextAnswer.FREETEXTANSWER_ISNUMERIC_XML_TEMPLATE % (min_value_attr, max_value_attr) length_xml = "" if self.min_length or self.max_length: min_length_attr = "" max_length_attr = "" if self.min_length: min_length_attr = """minLength="%d" """ if self.max_length: max_length_attr = """maxLength="%d" """ length_xml = FreeTextAnswer.FREETEXTANSWER_LENGTH_XML_TEMPLATE % (min_length_attr, max_length_attr) regex_xml = "" if self.format_regex: format_regex_attribs = '''regex="%s"''' %self.format_regex['regex'] error_text = self.format_regex.get('error_text', None) if error_text: format_regex_attribs += ' errorText="%s"' %error_text flags = self.format_regex.get('flags', None) if flags: format_regex_attribs += ' flags="%s"' %flags regex_xml = """<AnswerFormatRegex %s/>""" %format_regex_attribs constraints_xml = "" if is_numeric_xml or length_xml or regex_xml: constraints_xml = FreeTextAnswer.FREETEXTANSWER_CONSTRAINTS_XML_TEMPLATE % (is_numeric_xml, length_xml, regex_xml) default_xml = "" if self.default is not None: default_xml = FreeTextAnswer.FREETEXTANSWER_DEFAULTTEXT_XML_TEMPLATE % self.default return FreeTextAnswer.FREETEXTANSWER_XML_TEMPLATE % (constraints_xml, default_xml) class FileUploadAnswer(object): FILEUPLOADANSWER_XML_TEMLPATE = """<FileUploadAnswer><MinFileSizeInBytes>%d</MinFileSizeInBytes><MaxFileSizeInBytes>%d</MaxFileSizeInBytes></FileUploadAnswer>""" # (min, max) DEFAULT_MIN_SIZE = 1024 # 1K (completely arbitrary!) DEFAULT_MAX_SIZE = 5 * 1024 * 1024 # 5MB (completely arbitrary!) def __init__(self, min=None, max=None): self.min = min self.max = max if self.min is None: self.min = FileUploadAnswer.DEFAULT_MIN_SIZE if self.max is None: self.max = FileUploadAnswer.DEFAULT_MAX_SIZE def get_as_xml(self): return FileUploadAnswer.FILEUPLOADANSWER_XML_TEMLPATE % (self.min, self.max) class SelectionAnswer(object): """ A class to generate SelectionAnswer XML data structures. Does not yet implement Binary selection options. """ SELECTIONANSWER_XML_TEMPLATE = """<SelectionAnswer>%s%s<Selections>%s</Selections></SelectionAnswer>""" # % (count_xml, style_xml, selections_xml) SELECTION_XML_TEMPLATE = """<Selection><SelectionIdentifier>%s</SelectionIdentifier>%s</Selection>""" # (identifier, value_xml) SELECTION_VALUE_XML_TEMPLATE = """<%s>%s</%s>""" # (type, value, type) STYLE_XML_TEMPLATE = """<StyleSuggestion>%s</StyleSuggestion>""" # (style) MIN_SELECTION_COUNT_XML_TEMPLATE = """<MinSelectionCount>%s</MinSelectionCount>""" # count MAX_SELECTION_COUNT_XML_TEMPLATE = """<MaxSelectionCount>%s</MaxSelectionCount>""" # count ACCEPTED_STYLES = ['radiobutton', 'dropdown', 'checkbox', 'list', 'combobox', 'multichooser'] OTHER_SELECTION_ELEMENT_NAME = 'OtherSelection' def __init__(self, min=1, max=1, style=None, selections=None, type='text', other=False): if style is not None: if style in SelectionAnswer.ACCEPTED_STYLES: self.style_suggestion = style else: raise ValueError("style '%s' not recognized; should be one of %s" % (style, ', '.join(SelectionAnswer.ACCEPTED_STYLES))) else: self.style_suggestion = None if selections is None: raise ValueError("SelectionAnswer.__init__(): selections must be a non-empty list of (content, identifier) tuples") else: self.selections = selections self.min_selections = min self.max_selections = max assert len(selections) >= self.min_selections, "# of selections is less than minimum of %d" % self.min_selections #assert len(selections) <= self.max_selections, "# of selections exceeds maximum of %d" % self.max_selections self.type = type self.other = other def get_as_xml(self): if self.type == 'text': TYPE_TAG = "Text" elif self.type == 'binary': TYPE_TAG = "Binary" else: raise ValueError("illegal type: %s; must be either 'text' or 'binary'" % str(self.type)) # build list of <Selection> elements selections_xml = "" for tpl in self.selections: value_xml = SelectionAnswer.SELECTION_VALUE_XML_TEMPLATE % (TYPE_TAG, tpl[0], TYPE_TAG) selection_xml = SelectionAnswer.SELECTION_XML_TEMPLATE % (tpl[1], value_xml) selections_xml += selection_xml if self.other: # add OtherSelection element as xml if available if hasattr(self.other, 'get_as_xml'): assert type(self.other) == FreeTextAnswer, 'OtherSelection can only be a FreeTextAnswer' selections_xml += self.other.get_as_xml().replace('FreeTextAnswer', 'OtherSelection') else: selections_xml += "<OtherSelection />" if self.style_suggestion is not None: style_xml = SelectionAnswer.STYLE_XML_TEMPLATE % self.style_suggestion else: style_xml = "" if self.style_suggestion != 'radiobutton': count_xml = SelectionAnswer.MIN_SELECTION_COUNT_XML_TEMPLATE %self.min_selections count_xml += SelectionAnswer.MAX_SELECTION_COUNT_XML_TEMPLATE %self.max_selections else: count_xml = "" ret = SelectionAnswer.SELECTIONANSWER_XML_TEMPLATE % (count_xml, style_xml, selections_xml) # return XML return ret
apache-2.0
frouty/odoo_oph
addons/l10n_ec/__openerp__.py
170
1775
# -*- coding: utf-8 -*- ############################################################################## # # Copyright (C) 2010-2012 Cristian Salamea Gnuthink Software Labs Cia. Ltda # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Ecuador - Accounting', 'version': '1.1', 'category': 'Localization/Account Charts', 'description': """ This is the base module to manage the accounting chart for Ecuador in OpenERP. ============================================================================== Accounting chart and localization for Ecuador. """, 'author': 'Gnuthink Co.Ltd.', 'depends': [ 'account', 'base_vat', 'base_iban', 'account_chart', ], 'data': [ 'account_tax_code.xml', 'account_chart.xml', 'account_tax.xml', 'l10n_chart_ec_wizard.xml', ], 'demo': [], 'installable': True, 'images': ['images/config_chart_l10n_ec.jpeg','images/l10n_ec_chart.jpeg'], } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
greenapes/python-blitline
tools/scrape.py
1
1910
import re import string import urllib.request, urllib.error, urllib.parse from datetime import datetime from lxml import etree def function_name(text, rex=re.compile(r'"name"\s*:\s*"([^"]+)"')): match = rex.search(text) if match: return match.group(1) return None parser = etree.HTMLParser() tree = etree.parse(urllib.request.urlopen( "http://www.blitline.com/docs/functions"), parser) functions_data = [] functions = tree.xpath( """id("accordion")/div[contains(@class, "panel")]""") for f in functions: code_nodes = f.xpath("./div[contains(@class, 'panel-collapse')]//pre") if not code_nodes: continue elif len(code_nodes) > 1: raise ValueError("HTML mismatch, too many codes") else: code = code_nodes[0].text fname = function_name(code) if not fname: raise ValueError("HTML mismatch, function name not found") doc_nodes = f.xpath(".//h4//div[contains(@class, 'pull-left')][last()]/p") if not doc_nodes: doc = '' elif len(doc_nodes) > 1: raise ValueError("HTML mismatch, too many descriptions") else: doc = doc_nodes[0].text.strip() functions_data.append((fname, doc)) # some functions not listed in the online page functions_data.extend([ ('vintage', 'Vintage Filter'), ('lomo', 'Lomo Filter'), ('photograph', 'Photograph Filter'), ('savannah', 'Savannah Filter'), ('xpro', 'Xpro Filter'), ('celsius', 'Celsius Filter'), ('stackhouse', 'Stackhouse Filter'), ]) fragments = [ "#autogenerated on %s" % datetime.now(), "from blitline import Function", ] tpl = ''' class {cname}(Function): """ {doc} """ function_name = "{fname}" ''' for fname, doc in functions_data: cname = string.capwords(fname, '_').replace('_', '') fragments.append(tpl.format(cname=cname, fname=fname, doc=doc)) print('\n'.join(fragments))
mit
awemulya/fieldsight-kobocat
onadata/apps/viewer/south_migrations/0003_auto__add_field_datadictionary_user.py
13
8720
# encoding: utf-8 import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'DataDictionary.user' db.add_column('odk_viewer_datadictionary', 'user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'], null=True), keep_default=False) def backwards(self, orm): # Deleting field 'DataDictionary.user' db.delete_column('odk_viewer_datadictionary', 'user_id') models = { 'auth.group': { 'Meta': {'object_name': 'Group'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, 'auth.permission': { 'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'auth.user': { 'Meta': {'object_name': 'User'}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, 'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'logger.instance': { 'Meta': {'object_name': 'Instance'}, 'date': ('django.db.models.fields.DateField', [], {'null': 'True'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'status': ('django.db.models.fields.CharField', [], {'default': "u'submitted_via_web'", 'max_length': '20'}), 'survey_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['logger.SurveyType']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['auth.User']"}), 'xform': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'surveys'", 'null': 'True', 'to': "orm['logger.XForm']"}), 'xml': ('django.db.models.fields.TextField', [], {}) }, 'logger.surveytype': { 'Meta': {'object_name': 'SurveyType'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, 'logger.xform': { 'Meta': {'ordering': "('id_string',)", 'unique_together': "(('user', 'id_string'),)", 'object_name': 'XForm'}, 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'downloadable': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'id_string': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'db_index': 'True'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'xforms'", 'null': 'True', 'to': "orm['auth.User']"}), 'xml': ('django.db.models.fields.TextField', [], {}) }, 'odk_viewer.columnrename': { 'Meta': {'object_name': 'ColumnRename'}, 'column_name': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'xpath': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}) }, 'odk_viewer.datadictionary': { 'Meta': {'object_name': 'DataDictionary'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'json': ('django.db.models.fields.TextField', [], {}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), 'xform': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'data_dictionary'", 'unique': 'True', 'to': "orm['logger.XForm']"}), 'xls': ('django.db.models.fields.files.FileField', [], {'max_length': '100', 'null': 'True'}) }, 'odk_viewer.instancemodification': { 'Meta': {'object_name': 'InstanceModification'}, 'action': ('django.db.models.fields.CharField', [], {'max_length': '50'}), 'date_created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}), 'date_modified': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instance': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'modifications'", 'to': "orm['logger.Instance']"}), 'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']", 'null': 'True'}), 'xpath': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'odk_viewer.parsedinstance': { 'Meta': {'object_name': 'ParsedInstance'}, 'end_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'instance': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'parsed_instance'", 'unique': 'True', 'to': "orm['logger.Instance']"}), 'lat': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'lng': ('django.db.models.fields.FloatField', [], {'null': 'True'}), 'start_time': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}) } } complete_apps = ['viewer']
bsd-2-clause
gojira/tensorflow
tensorflow/contrib/autograph/utils/type_hints.py
12
1477
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """No-op utilities that provide static type hints. These are used when the data type is not known at creation, for instance in the case of empty lists. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function def set_element_type(entity, dtype, shape=None): """Indicates that the entity is expected hold items of specified type. This function is a no-op. Its presence merely marks the data type of its argument. The staged TensorFlow ops will reflect and assert this data type. Args: entity: A Tensor or TensorArray. dtype: TensorFlow dtype value to assert for entity. shape: Optional shape to assert for entity. Returns: The value of entity, unchanged. """ del dtype del shape return entity
apache-2.0
gojira/tensorflow
tensorflow/python/keras/layers/recurrent_test.py
9
20464
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for recurrent layers functionality other than GRU, LSTM, SimpleRNN. See also: lstm_test.py, gru_test.py, simplernn_test.py. """ from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.python import keras from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import state_ops from tensorflow.python.platform import test class RNNTest(test.TestCase): def test_minimal_rnn_cell_non_layer(self): class MinimalRNNCell(object): def __init__(self, units, input_dim): self.units = units self.state_size = units self.kernel = keras.backend.variable( np.random.random((input_dim, units))) def call(self, inputs, states): prev_output = states[0] output = keras.backend.dot(inputs, self.kernel) + prev_output return output, [output] with self.test_session(): # Basic test case. cell = MinimalRNNCell(32, 5) x = keras.Input((None, 5)) layer = keras.layers.RNN(cell) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) # Test stacking. cells = [MinimalRNNCell(8, 5), MinimalRNNCell(32, 8), MinimalRNNCell(32, 32)] layer = keras.layers.RNN(cells) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) def test_minimal_rnn_cell_non_layer_multiple_states(self): class MinimalRNNCell(object): def __init__(self, units, input_dim): self.units = units self.state_size = (units, units) self.kernel = keras.backend.variable( np.random.random((input_dim, units))) def call(self, inputs, states): prev_output_1 = states[0] prev_output_2 = states[1] output = keras.backend.dot(inputs, self.kernel) output += prev_output_1 output -= prev_output_2 return output, [output * 2, output * 3] with self.test_session(): # Basic test case. cell = MinimalRNNCell(32, 5) x = keras.Input((None, 5)) layer = keras.layers.RNN(cell) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) # Test stacking. cells = [MinimalRNNCell(8, 5), MinimalRNNCell(16, 8), MinimalRNNCell(32, 16)] layer = keras.layers.RNN(cells) assert layer.cell.state_size == (32, 32, 16, 16, 8, 8) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) def test_minimal_rnn_cell_layer(self): class MinimalRNNCell(keras.layers.Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(MinimalRNNCell, self).__init__(**kwargs) def build(self, input_shape): self.kernel = self.add_weight(shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.built = True def call(self, inputs, states): prev_output = states[0] h = keras.backend.dot(inputs, self.kernel) output = h + keras.backend.dot(prev_output, self.recurrent_kernel) return output, [output] def get_config(self): config = {'units': self.units} base_config = super(MinimalRNNCell, self).get_config() return dict(list(base_config.items()) + list(config.items())) with self.test_session(): # Test basic case. x = keras.Input((None, 5)) cell = MinimalRNNCell(32) layer = keras.layers.RNN(cell) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) # Test basic case serialization. x_np = np.random.random((6, 5, 5)) y_np = model.predict(x_np) weights = model.get_weights() config = layer.get_config() with keras.utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}): layer = keras.layers.RNN.from_config(config) y = layer(x) model = keras.models.Model(x, y) model.set_weights(weights) y_np_2 = model.predict(x_np) self.assertAllClose(y_np, y_np_2, atol=1e-4) # Test stacking. cells = [MinimalRNNCell(8), MinimalRNNCell(12), MinimalRNNCell(32)] layer = keras.layers.RNN(cells) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch(np.zeros((6, 5, 5)), np.zeros((6, 32))) # Test stacked RNN serialization. x_np = np.random.random((6, 5, 5)) y_np = model.predict(x_np) weights = model.get_weights() config = layer.get_config() with keras.utils.CustomObjectScope({'MinimalRNNCell': MinimalRNNCell}): layer = keras.layers.RNN.from_config(config) y = layer(x) model = keras.models.Model(x, y) model.set_weights(weights) y_np_2 = model.predict(x_np) self.assertAllClose(y_np, y_np_2, atol=1e-4) def test_rnn_cell_with_constants_layer(self): class RNNCellWithConstants(keras.layers.Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input = keras.backend.dot(inputs, self.input_kernel) h_state = keras.backend.dot(prev_output, self.recurrent_kernel) h_const = keras.backend.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) with self.test_session(): # Test basic case. x = keras.Input((None, 5)) c = keras.Input((3,)) cell = RNNCellWithConstants(32) layer = keras.layers.RNN(cell) y = layer(x, constants=c) model = keras.models.Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 32)) ) with self.test_session(): # Test basic case serialization. x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with keras.utils.CustomObjectScope(custom_objects): layer = keras.layers.RNN.from_config(config.copy()) y = layer(x, constants=c) model = keras.models.Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) self.assertAllClose(y_np, y_np_2, atol=1e-4) with self.test_session(): # test flat list inputs. with keras.utils.CustomObjectScope(custom_objects): layer = keras.layers.RNN.from_config(config.copy()) y = layer([x, c]) model = keras.models.Model([x, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, c_np]) self.assertAllClose(y_np, y_np_3, atol=1e-4) with self.test_session(): # Test stacking. cells = [keras.layers.recurrent.GRUCell(8), RNNCellWithConstants(12), RNNCellWithConstants(32)] layer = keras.layers.recurrent.RNN(cells) y = layer(x, constants=c) model = keras.models.Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 32)) ) with self.test_session(): # Test GRUCell reset_after property. x = keras.Input((None, 5)) c = keras.Input((3,)) cells = [keras.layers.recurrent.GRUCell(32, reset_after=True)] layer = keras.layers.recurrent.RNN(cells) y = layer(x, constants=c) model = keras.models.Model([x, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 3))], np.zeros((6, 32)) ) with self.test_session(): # Test stacked RNN serialization x_np = np.random.random((6, 5, 5)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, c_np]) weights = model.get_weights() config = layer.get_config() with keras.utils.CustomObjectScope(custom_objects): layer = keras.layers.recurrent.RNN.from_config(config.copy()) y = layer(x, constants=c) model = keras.models.Model([x, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, c_np]) self.assertAllClose(y_np, y_np_2, atol=1e-4) def test_rnn_cell_with_constants_layer_passing_initial_state(self): class RNNCellWithConstants(keras.layers.Layer): def __init__(self, units, **kwargs): self.units = units self.state_size = units super(RNNCellWithConstants, self).__init__(**kwargs) def build(self, input_shape): if not isinstance(input_shape, list): raise TypeError('expects constants shape') [input_shape, constant_shape] = input_shape # will (and should) raise if more than one constant passed self.input_kernel = self.add_weight( shape=(input_shape[-1], self.units), initializer='uniform', name='kernel') self.recurrent_kernel = self.add_weight( shape=(self.units, self.units), initializer='uniform', name='recurrent_kernel') self.constant_kernel = self.add_weight( shape=(constant_shape[-1], self.units), initializer='uniform', name='constant_kernel') self.built = True def call(self, inputs, states, constants): [prev_output] = states [constant] = constants h_input = keras.backend.dot(inputs, self.input_kernel) h_state = keras.backend.dot(prev_output, self.recurrent_kernel) h_const = keras.backend.dot(constant, self.constant_kernel) output = h_input + h_state + h_const return output, [output] def get_config(self): config = {'units': self.units} base_config = super(RNNCellWithConstants, self).get_config() return dict(list(base_config.items()) + list(config.items())) with self.test_session(): # Test basic case. x = keras.Input((None, 5)) c = keras.Input((3,)) s = keras.Input((32,)) cell = RNNCellWithConstants(32) layer = keras.layers.RNN(cell) y = layer(x, initial_state=s, constants=c) model = keras.models.Model([x, s, c], y) model.compile(optimizer='rmsprop', loss='mse') model.train_on_batch( [np.zeros((6, 5, 5)), np.zeros((6, 32)), np.zeros((6, 3))], np.zeros((6, 32)) ) with self.test_session(): # Test basic case serialization. x_np = np.random.random((6, 5, 5)) s_np = np.random.random((6, 32)) c_np = np.random.random((6, 3)) y_np = model.predict([x_np, s_np, c_np]) weights = model.get_weights() config = layer.get_config() custom_objects = {'RNNCellWithConstants': RNNCellWithConstants} with keras.utils.CustomObjectScope(custom_objects): layer = keras.layers.RNN.from_config(config.copy()) y = layer(x, initial_state=s, constants=c) model = keras.models.Model([x, s, c], y) model.set_weights(weights) y_np_2 = model.predict([x_np, s_np, c_np]) self.assertAllClose(y_np, y_np_2, atol=1e-4) # verify that state is used y_np_2_different_s = model.predict([x_np, s_np + 10., c_np]) with self.assertRaises(AssertionError): self.assertAllClose(y_np, y_np_2_different_s, atol=1e-4) with self.test_session(): # test flat list inputs with keras.utils.CustomObjectScope(custom_objects): layer = keras.layers.RNN.from_config(config.copy()) y = layer([x, s, c]) model = keras.models.Model([x, s, c], y) model.set_weights(weights) y_np_3 = model.predict([x_np, s_np, c_np]) self.assertAllClose(y_np, y_np_3, atol=1e-4) def test_stacked_rnn_attributes(self): cells = [keras.layers.LSTMCell(1), keras.layers.LSTMCell(1)] layer = keras.layers.RNN(cells) layer.build((None, None, 1)) # Test weights self.assertEqual(len(layer.trainable_weights), 6) cells[0].trainable = False self.assertEqual(len(layer.trainable_weights), 3) self.assertEqual(len(layer.non_trainable_weights), 3) # Test `get_losses_for` and `losses` x = keras.Input((None, 1)) loss_1 = math_ops.reduce_sum(x) loss_2 = math_ops.reduce_sum(cells[0].kernel) cells[0].add_loss(loss_1, inputs=x) cells[0].add_loss(loss_2) self.assertEqual(len(layer.losses), 2) self.assertEqual(layer.get_losses_for(None), [loss_2]) self.assertEqual(layer.get_losses_for(x), [loss_1]) # Test `get_updates_for` and `updates` cells = [keras.layers.LSTMCell(1), keras.layers.LSTMCell(1)] layer = keras.layers.RNN(cells) layer.build((None, None, 1)) x = keras.Input((None, 1)) update_1 = state_ops.assign_add(cells[0].kernel, x[0, 0, 0] * cells[0].kernel) update_2 = state_ops.assign_add(cells[0].kernel, array_ops.ones_like(cells[0].kernel)) cells[0].add_update(update_1, inputs=x) cells[0].add_update(update_2) self.assertEqual(len(layer.updates), 2) self.assertEqual(len(layer.get_updates_for(None)), 1) self.assertEqual(len(layer.get_updates_for(x)), 1) def test_rnn_dynamic_trainability(self): layer_class = keras.layers.SimpleRNN embedding_dim = 4 units = 3 layer = layer_class(units) layer.build((None, None, embedding_dim)) self.assertEqual(len(layer.weights), 3) self.assertEqual(len(layer.trainable_weights), 3) self.assertEqual(len(layer.non_trainable_weights), 0) layer.trainable = False self.assertEqual(len(layer.weights), 3) self.assertEqual(len(layer.trainable_weights), 0) self.assertEqual(len(layer.non_trainable_weights), 3) layer.trainable = True self.assertEqual(len(layer.weights), 3) self.assertEqual(len(layer.trainable_weights), 3) self.assertEqual(len(layer.non_trainable_weights), 0) def test_state_reuse_with_dropout(self): layer_class = keras.layers.SimpleRNN embedding_dim = 4 units = 3 timesteps = 2 num_samples = 2 with self.test_session(): input1 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim)) layer = layer_class(units, return_state=True, return_sequences=True, dropout=0.2) state = layer(input1)[1:] input2 = keras.Input(batch_shape=(num_samples, timesteps, embedding_dim)) output = layer_class(units)(input2, initial_state=state) model = keras.Model([input1, input2], output) inputs = [np.random.random((num_samples, timesteps, embedding_dim)), np.random.random((num_samples, timesteps, embedding_dim))] model.predict(inputs) def test_builtin_rnn_cell_serialization(self): for cell_class in [keras.layers.SimpleRNNCell, keras.layers.GRUCell, keras.layers.LSTMCell]: with self.test_session(): # Test basic case. x = keras.Input((None, 5)) cell = cell_class(32) layer = keras.layers.RNN(cell) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') # Test basic case serialization. x_np = np.random.random((6, 5, 5)) y_np = model.predict(x_np) weights = model.get_weights() config = layer.get_config() layer = keras.layers.RNN.from_config(config) y = layer(x) model = keras.models.Model(x, y) model.set_weights(weights) y_np_2 = model.predict(x_np) self.assertAllClose(y_np, y_np_2, atol=1e-4) # Test stacking. cells = [cell_class(8), cell_class(12), cell_class(32)] layer = keras.layers.RNN(cells) y = layer(x) model = keras.models.Model(x, y) model.compile(optimizer='rmsprop', loss='mse') # Test stacked RNN serialization. x_np = np.random.random((6, 5, 5)) y_np = model.predict(x_np) weights = model.get_weights() config = layer.get_config() layer = keras.layers.RNN.from_config(config) y = layer(x) model = keras.models.Model(x, y) model.set_weights(weights) y_np_2 = model.predict(x_np) self.assertAllClose(y_np, y_np_2, atol=1e-4) def test_stacked_rnn_dropout(self): cells = [keras.layers.LSTMCell(3, dropout=0.1, recurrent_dropout=0.1), keras.layers.LSTMCell(3, dropout=0.1, recurrent_dropout=0.1)] layer = keras.layers.RNN(cells) with self.test_session(): x = keras.Input((None, 5)) y = layer(x) model = keras.models.Model(x, y) model.compile('sgd', 'mse') x_np = np.random.random((6, 5, 5)) y_np = np.random.random((6, 3)) model.train_on_batch(x_np, y_np) def test_stacked_rnn_compute_output_shape(self): cells = [keras.layers.LSTMCell(3), keras.layers.LSTMCell(6)] embedding_dim = 4 timesteps = 2 layer = keras.layers.RNN(cells, return_state=True, return_sequences=True) output_shape = layer.compute_output_shape((None, timesteps, embedding_dim)) expected_output_shape = [(None, timesteps, 6), (None, 6), (None, 6), (None, 3), (None, 3)] self.assertEqual( [tuple(o.as_list()) for o in output_shape], expected_output_shape) if __name__ == '__main__': test.main()
apache-2.0
sk413025/thug
src/DOM/UserProfile.py
9
3165
#!/usr/bin/env python # # UserProfile.py # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, # MA 02111-1307 USA class UserProfile(object): vCardSchemas = ("vCard.Business.City", "vCard.Business.Country", "vCard.Business.Fax", "vCard.Business.Phone", "vCard.Business.State", "vCard.Business.StreetAddress", "vCard.Business.URL", "vCard.Business.Zipcode", "vCard.Cellular", "vCard.Company", "vCard.Department", "vCard.DisplayName", "vCard.Email", "vCard.FirstName", "vCard.Gender", "vCard.Home.City", "vCard.Home.Country", "vCard.Home.Fax", "vCard.Home.Phone", "vCard.Home.State", "vCard.Home.StreetAddress", "vCard.Home.Zipcode", "vCard.Homepage", "vCard.JobTitle", "vCard.LastName", "vCard.MiddleName", "vCard.Notes", "vCard.Office", "vCard.Pager") def __init__(self): self._vCard = dict() self._queue = list() def addReadRequest(self, vCardName, reserved = None): for schema in self.vCardSchemas: if schema.lower() == vCardName.lower(): self.queue.append(vCardName) return True return False def doReadRequest(self, usageCode, displayName = None, domain = None, path = None, expiration = None, reserved = None): pass def clearRequest(self): del self._queue[:] def getAttribute(self, vCardName): if vCardName not in self.vCardSchemas: return None if vCardName not in self._vCard: return None return self._vCard[vCardName] def setAttribute(self, vCardName, vCardValue, caseSens = 1): if caseSens: if vCardName not in self.vCardSchemas: return self._vCard[vCardName] = vCardValue return for schema in self.vCardSchemas: if schema.lower() == vCardName.lower(): self._vCard[schema] = vCardValue return
gpl-2.0
python-gitlab/python-gitlab
gitlab/v4/objects/hooks.py
1
3071
from gitlab.base import RequiredOptional, RESTManager, RESTObject from gitlab.mixins import CRUDMixin, NoUpdateMixin, ObjectDeleteMixin, SaveMixin __all__ = [ "Hook", "HookManager", "ProjectHook", "ProjectHookManager", "GroupHook", "GroupHookManager", ] class Hook(ObjectDeleteMixin, RESTObject): _url = "/hooks" _short_print_attr = "url" class HookManager(NoUpdateMixin, RESTManager): _path = "/hooks" _obj_cls = Hook _create_attrs = RequiredOptional(required=("url",)) class ProjectHook(SaveMixin, ObjectDeleteMixin, RESTObject): _short_print_attr = "url" class ProjectHookManager(CRUDMixin, RESTManager): _path = "/projects/%(project_id)s/hooks" _obj_cls = ProjectHook _from_parent_attrs = {"project_id": "id"} _create_attrs = RequiredOptional( required=("url",), optional=( "push_events", "issues_events", "confidential_issues_events", "merge_requests_events", "tag_push_events", "note_events", "job_events", "pipeline_events", "wiki_page_events", "enable_ssl_verification", "token", ), ) _update_attrs = RequiredOptional( required=("url",), optional=( "push_events", "issues_events", "confidential_issues_events", "merge_requests_events", "tag_push_events", "note_events", "job_events", "pipeline_events", "wiki_events", "enable_ssl_verification", "token", ), ) class GroupHook(SaveMixin, ObjectDeleteMixin, RESTObject): _short_print_attr = "url" class GroupHookManager(CRUDMixin, RESTManager): _path = "/groups/%(group_id)s/hooks" _obj_cls = GroupHook _from_parent_attrs = {"group_id": "id"} _create_attrs = RequiredOptional( required=("url",), optional=( "push_events", "issues_events", "confidential_issues_events", "merge_requests_events", "tag_push_events", "note_events", "confidential_note_events", "job_events", "pipeline_events", "wiki_page_events", "deployment_events", "releases_events", "subgroup_events", "enable_ssl_verification", "token", ), ) _update_attrs = RequiredOptional( required=("url",), optional=( "push_events", "issues_events", "confidential_issues_events", "merge_requests_events", "tag_push_events", "note_events", "confidential_note_events", "job_events", "pipeline_events", "wiki_page_events", "deployment_events", "releases_events", "subgroup_events", "enable_ssl_verification", "token", ), )
lgpl-3.0
kxliugang/edx-platform
lms/djangoapps/staticbook/views.py
91
6351
""" Views for serving static textbooks. """ from django.contrib.auth.decorators import login_required from django.http import Http404 from edxmako.shortcuts import render_to_response from opaque_keys.edx.locations import SlashSeparatedCourseKey from xmodule.annotator_token import retrieve_token from courseware.access import has_access from courseware.courses import get_course_with_access from notes.utils import notes_enabled_for_course from static_replace import replace_static_urls @login_required def index(request, course_id, book_index, page=None): """ Serve static image-based textbooks. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'load', course_key) staff_access = bool(has_access(request.user, 'staff', course)) book_index = int(book_index) if book_index < 0 or book_index >= len(course.textbooks): raise Http404("Invalid book index value: {0}".format(book_index)) textbook = course.textbooks[book_index] table_of_contents = textbook.table_of_contents if page is None: page = textbook.start_page return render_to_response( 'staticbook.html', { 'book_index': book_index, 'page': int(page), 'course': course, 'book_url': textbook.book_url, 'table_of_contents': table_of_contents, 'start_page': textbook.start_page, 'end_page': textbook.end_page, 'staff_access': staff_access, }, ) def remap_static_url(original_url, course): """Remap a URL in the ways the course requires.""" # Ick: this should be possible without having to quote and unquote the URL... input_url = "'" + original_url + "'" output_url = replace_static_urls( input_url, getattr(course, 'data_dir', None), course_id=course.id, static_asset_path=course.static_asset_path ) # strip off the quotes again... return output_url[1:-1] @login_required def pdf_index(request, course_id, book_index, chapter=None, page=None): """ Display a PDF textbook. course_id: course for which to display text. The course should have "pdf_textbooks" property defined. book index: zero-based index of which PDF textbook to display. chapter: (optional) one-based index into the chapter array of textbook PDFs to display. Defaults to first chapter. Specifying this assumes that there are separate PDFs for each chapter in a textbook. page: (optional) one-based page number to display within the PDF. Defaults to first page. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'load', course_key) staff_access = bool(has_access(request.user, 'staff', course)) book_index = int(book_index) if book_index < 0 or book_index >= len(course.pdf_textbooks): raise Http404("Invalid book index value: {0}".format(book_index)) textbook = course.pdf_textbooks[book_index] viewer_params = '&file=' current_url = '' if 'url' in textbook: textbook['url'] = remap_static_url(textbook['url'], course) viewer_params += textbook['url'] current_url = textbook['url'] # then remap all the chapter URLs as well, if they are provided. current_chapter = None if 'chapters' in textbook: for entry in textbook['chapters']: entry['url'] = remap_static_url(entry['url'], course) if chapter is not None: current_chapter = textbook['chapters'][int(chapter) - 1] else: current_chapter = textbook['chapters'][0] viewer_params += current_chapter['url'] current_url = current_chapter['url'] viewer_params += '#zoom=page-fit&disableRange=true' if page is not None: viewer_params += '&amp;page={}'.format(page) if request.GET.get('viewer', '') == 'true': template = 'pdf_viewer.html' else: template = 'static_pdfbook.html' return render_to_response( template, { 'book_index': book_index, 'course': course, 'textbook': textbook, 'chapter': chapter, 'page': page, 'viewer_params': viewer_params, 'current_chapter': current_chapter, 'staff_access': staff_access, 'current_url': current_url, }, ) @login_required def html_index(request, course_id, book_index, chapter=None): """ Display an HTML textbook. course_id: course for which to display text. The course should have "html_textbooks" property defined. book index: zero-based index of which HTML textbook to display. chapter: (optional) one-based index into the chapter array of textbook HTML files to display. Defaults to first chapter. Specifying this assumes that there are separate HTML files for each chapter in a textbook. """ course_key = SlashSeparatedCourseKey.from_deprecated_string(course_id) course = get_course_with_access(request.user, 'load', course_key) staff_access = bool(has_access(request.user, 'staff', course)) notes_enabled = notes_enabled_for_course(course) book_index = int(book_index) if book_index < 0 or book_index >= len(course.html_textbooks): raise Http404("Invalid book index value: {0}".format(book_index)) textbook = course.html_textbooks[book_index] if 'url' in textbook: textbook['url'] = remap_static_url(textbook['url'], course) # then remap all the chapter URLs as well, if they are provided. if 'chapters' in textbook: for entry in textbook['chapters']: entry['url'] = remap_static_url(entry['url'], course) student = request.user return render_to_response( 'static_htmlbook.html', { 'book_index': book_index, 'course': course, 'textbook': textbook, 'chapter': chapter, 'student': student, 'staff_access': staff_access, 'notes_enabled': notes_enabled, 'storage': course.annotation_storage_url, 'token': retrieve_token(student.email, course.annotation_token_secret), }, )
agpl-3.0
DistrictDataLabs/topicmaps
topics/models.py
1
3031
# topics.models # Topic modeling for data survey analysis # # Author: Benjamin Bengfort <bbengfort@districtdatalabs.com> # Created: Tue Sep 08 19:43:58 2015 -0400 # # Copyright (C) 2015 District Data Labs # For license information, see LICENSE.txt # # ID: models.py [] benjamin@bengfort.com $ """ Topic modeling for data survey analysis """ ########################################################################## ## Imports ########################################################################## from django.db import models from model_utils import Choices from autoslug import AutoSlugField from model_utils.models import TimeStampedModel from topics.managers import TopicManager, VotingManager ########################################################################## ## Topic Models ########################################################################## class Topic(TimeStampedModel): """ Stores a topic, basically a string like a tag and manages it. """ # Topic fields title = models.CharField(max_length=128) slug = AutoSlugField(populate_from='title', unique=True) link = models.URLField(null=True, blank=True, default=None) refers_to = models.ForeignKey('self', related_name='references', null=True, blank=True, default=None) is_canonical = models.BooleanField(default=True) # Custom topic manager objects = TopicManager() # Topic meta class class Meta: db_table = 'topics' ordering = ('title', ) def __unicode__(self): return self.title def vote_total(self): """ Accumulates the votes via aggregation """ votes = self.votes.aggregate( total=models.Sum('vote') )['total'] if self.is_canonical: for ref in self.references.all(): votes += ref.vote_total() return votes ########################################################################## ## Topic Voting ########################################################################## class Vote(TimeStampedModel): """ Simple voting model that stores an up or down vote for a particular topic associated with a particular IP address (and time of day). """ DATEFMT = "%a %b %d, %Y at %H:%M" BALLOT = Choices((-1, 'downvote', 'downvote'), (1, 'upvote', 'upvote'), (0, 'novote', 'novote')) # Vote fields vote = models.SmallIntegerField(choices=BALLOT, default=BALLOT.upvote) topic = models.ForeignKey(Topic, related_name='votes') ipaddr = models.GenericIPAddressField() # Custom voting manager objects = VotingManager() # Vote meta class class Meta: db_table = 'voting' ordering = ('-created',) def __unicode__(self): action = { -1: "-1", 0: "--", 1: "+1", }[self.vote] return "{} for \"{}\" ({} on {})".format( action, self.topic, self.ipaddr, self.modified.strftime(self.DATEFMT) )
mit
onyxfish/journalism
tests/test_table/test_rename.py
3
4715
#!/usr/bin/env python # -*- coding: utf8 -*- import warnings from agate import Table from agate.testcase import AgateTestCase from agate.data_types import * class TestRename(AgateTestCase): def setUp(self): self.rows = ( (1, 4, 'a'), (2, 3, 'b'), (None, 2, 'c') ) self.number_type = Number() self.text_type = Text() self.column_names = ['one', 'two', 'three'] self.column_types = [self.number_type, self.number_type, self.text_type] def test_rename_row_names(self): table = Table(self.rows, self.column_names, self.column_types) table2 = table.rename(row_names=['a', 'b', 'c']) self.assertSequenceEqual(table2.row_names, ['a', 'b', 'c']) self.assertSequenceEqual(table2.column_names, self.column_names) self.assertIs(table.row_names, None) self.assertSequenceEqual(table.column_names, self.column_names) def test_rename_row_names_dict(self): table = Table(self.rows, self.column_names, self.column_types, row_names=['a', 'b', 'c']) table2 = table.rename(row_names={'b': 'd'}) self.assertSequenceEqual(table2.row_names, ['a', 'd', 'c']) self.assertSequenceEqual(table2.column_names, self.column_names) self.assertSequenceEqual(table.row_names, ['a', 'b', 'c']) self.assertSequenceEqual(table.column_names, self.column_names) def test_rename_column_names(self): table = Table(self.rows, self.column_names, self.column_types) table2 = table.rename(column_names=['d', 'e', 'f']) self.assertIs(table2.row_names, None) self.assertSequenceEqual(table2.column_names, ['d', 'e', 'f']) self.assertIs(table.row_names, None) self.assertSequenceEqual(table.column_names, self.column_names) def test_rename_column_names_dict(self): table = Table(self.rows, self.column_names, self.column_types) table2 = table.rename(column_names={'two': 'second'}) self.assertIs(table2.row_names, None) self.assertSequenceEqual(table2.column_names, ['one', 'second', 'three']) self.assertIs(table.row_names, None) self.assertSequenceEqual(table.column_names, self.column_names) def test_rename_column_names_renames_row_values(self): table = Table(self.rows, self.column_names, self.column_types) new_column_names = ['d', 'e', 'f'] table2 = table.rename(column_names=new_column_names) self.assertColumnNames(table2, new_column_names) def test_rename_slugify_columns(self): strings = ['Test kož', 'test 2', 'test 2'] table = Table(self.rows, self.column_names, self.column_types) table2 = table.rename(strings, slug_columns=True) table3 = table.rename(strings, slug_columns=True, separator='.') self.assertColumnNames(table, ['one', 'two', 'three']) self.assertColumnNames(table2, ['test_koz', 'test_2', 'test_2_2']) self.assertColumnNames(table3, ['test.koz', 'test.2', 'test.2.2']) def test_rename_slugify_rows(self): strings = ['Test kož', 'test 2', 'test 2'] table = Table(self.rows, self.column_names, self.column_types) table2 = table.rename(row_names=strings, slug_rows=True) table3 = table.rename(row_names=strings, slug_rows=True, separator='.') self.assertIs(table.row_names, None) self.assertRowNames(table2, ['test_koz', 'test_2', 'test_2_2']) self.assertRowNames(table3, ['test.koz', 'test.2', 'test.2.2']) def test_rename_slugify_columns_in_place(self): column_names = [u'Test kož', 'test 2', 'test 2'] warnings.simplefilter('ignore') try: table = Table(self.rows, column_names, self.column_types) finally: warnings.resetwarnings() table2 = table.rename(slug_columns=True) table3 = table.rename(slug_columns=True, separator='.') self.assertColumnNames(table, [u'Test kož', 'test 2', 'test 2_2']) self.assertColumnNames(table2, ['test_koz', 'test_2', 'test_2_2']) self.assertColumnNames(table3, ['test.koz', 'test.2', 'test.2.2']) def test_rename_slugify_rows_in_place(self): strings = ['Test kož', 'test 2', 'test 2'] table = Table(self.rows, self.column_names, self.column_types, row_names=strings) table2 = table.rename(slug_rows=True) table3 = table.rename(slug_rows=True, separator='.') self.assertRowNames(table, ['Test kož', 'test 2', 'test 2']) self.assertRowNames(table2, ['test_koz', 'test_2', 'test_2_2']) self.assertRowNames(table3, ['test.koz', 'test.2', 'test.2.2'])
mit
iw3hxn/server
openerp/tools/yaml_tag.py
14
6064
import yaml import logging class YamlTag(object): """ Superclass for constructors of custom tags defined in yaml file. __str__ is overriden in subclass and used for serialization in module recorder. """ def __init__(self, **kwargs): self.__dict__.update(kwargs) def __getitem__(self, key): return getattr(self, key) def __getattr__(self, attr): return None def __repr__(self): return "<%s %s>" % (self.__class__.__name__, sorted(self.__dict__.items())) class Assert(YamlTag): def __init__(self, model, id=None, severity=logging.WARNING, string="NONAME", **kwargs): self.model = model self.id = id self.severity = severity self.string = string super(Assert, self).__init__(**kwargs) class Record(YamlTag): def __init__(self, model, id, use='id', view=True, **kwargs): self.model = model self.id = id self.view = view super(Record, self).__init__(**kwargs) def __str__(self): return '!record {model: %s, id: %s}:' % (str(self.model,), str(self.id,)) class Python(YamlTag): def __init__(self, model, severity=logging.ERROR, name="", **kwargs): self.model= model self.severity = severity self.name = name super(Python, self).__init__(**kwargs) def __str__(self): return '!python {model: %s}: |' % (str(self.model), ) class Menuitem(YamlTag): def __init__(self, id, name, **kwargs): self.id = id self.name = name super(Menuitem, self).__init__(**kwargs) class Workflow(YamlTag): def __init__(self, model, action, ref=None, **kwargs): self.model = model self.action = action self.ref = ref super(Workflow, self).__init__(**kwargs) def __str__(self): return '!workflow {model: %s, action: %s, ref: %s}' % (str(self.model,), str(self.action,), str(self.ref,)) class ActWindow(YamlTag): def __init__(self, **kwargs): super(ActWindow, self).__init__(**kwargs) class Function(YamlTag): def __init__(self, model, name, **kwargs): self.model = model self.name = name super(Function, self).__init__(**kwargs) class Report(YamlTag): def __init__(self, model, name, string, **kwargs): self.model = model self.name = name self.string = string super(Report, self).__init__(**kwargs) class Delete(YamlTag): def __init__(self, **kwargs): super(Delete, self).__init__(**kwargs) class Context(YamlTag): def __init__(self, **kwargs): super(Context, self).__init__(**kwargs) class Url(YamlTag): def __init__(self, **kwargs): super(Url, self).__init__(**kwargs) class Eval(YamlTag): def __init__(self, expression): self.expression = expression super(Eval, self).__init__() def __str__(self): return '!eval %s' % str(self.expression) class Ref(YamlTag): def __init__(self, expr="False", *args, **kwargs): self.expr = expr super(Ref, self).__init__(*args, **kwargs) def __str__(self): return 'ref(%s)' % repr(self.expr) class IrSet(YamlTag): def __init__(self): super(IrSet, self).__init__() def assert_constructor(loader, node): kwargs = loader.construct_mapping(node) return Assert(**kwargs) def record_constructor(loader, node): kwargs = loader.construct_mapping(node) return Record(**kwargs) def python_constructor(loader, node): kwargs = loader.construct_mapping(node) return Python(**kwargs) def menuitem_constructor(loader, node): kwargs = loader.construct_mapping(node) return Menuitem(**kwargs) def workflow_constructor(loader, node): kwargs = loader.construct_mapping(node) return Workflow(**kwargs) def act_window_constructor(loader, node): kwargs = loader.construct_mapping(node) return ActWindow(**kwargs) def function_constructor(loader, node): kwargs = loader.construct_mapping(node) return Function(**kwargs) def report_constructor(loader, node): kwargs = loader.construct_mapping(node) return Report(**kwargs) def delete_constructor(loader, node): kwargs = loader.construct_mapping(node) return Delete(**kwargs) def context_constructor(loader, node): kwargs = loader.construct_mapping(node) return Context(**kwargs) def url_constructor(loader, node): kwargs = loader.construct_mapping(node) return Url(**kwargs) def eval_constructor(loader, node): expression = loader.construct_scalar(node) return Eval(expression) def ref_constructor(loader, tag_suffix, node): if tag_suffix == "id": kwargs = {"id": loader.construct_scalar(node)} else: kwargs = loader.construct_mapping(node) return Ref(**kwargs) def ir_set_constructor(loader, node): kwargs = loader.construct_mapping(node) return IrSet(**kwargs) # Registers constructors for custom tags. # Constructors are actually defined globally: do not redefined them in another # class/file/package. This means that module recorder need import this file. def add_constructors(): yaml.add_constructor(u"!assert", assert_constructor) yaml.add_constructor(u"!record", record_constructor) yaml.add_constructor(u"!python", python_constructor) yaml.add_constructor(u"!menuitem", menuitem_constructor) yaml.add_constructor(u"!workflow", workflow_constructor) yaml.add_constructor(u"!act_window", act_window_constructor) yaml.add_constructor(u"!function", function_constructor) yaml.add_constructor(u"!report", report_constructor) yaml.add_constructor(u"!context", context_constructor) yaml.add_constructor(u"!delete", delete_constructor) yaml.add_constructor(u"!url", url_constructor) yaml.add_constructor(u"!eval", eval_constructor) yaml.add_multi_constructor(u"!ref", ref_constructor) yaml.add_constructor(u"!ir_set", ir_set_constructor) add_constructors() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
edmorley/treeherder
treeherder/services/pulse/consumers.py
1
5645
import logging import newrelic.agent from kombu import (Exchange, Queue) from kombu.mixins import ConsumerMixin from treeherder.etl.common import fetch_json from treeherder.etl.tasks.pulse_tasks import (store_pulse_jobs, store_pulse_pushes) from .exchange import get_exchange logger = logging.getLogger(__name__) # Used for making API calls to Pulse Guardian, such as detecting bindings on # the current ingestion queue. PULSE_GUARDIAN_URL = "https://pulseguardian.mozilla.org/" class PulseConsumer(ConsumerMixin): """ Consume jobs from Pulse exchanges """ def __init__(self, connection): self.connection = connection self.consumers = [] self.queue = None self.queue_name = "queue/{}/{}".format(connection.userid, self.queue_suffix) def get_consumers(self, Consumer, channel): return [ Consumer(**c) for c in self.consumers ] def bind_to(self, exchange, routing_key): if not self.queue: self.queue = Queue( name=self.queue_name, channel=self.connection.channel(), exchange=exchange, routing_key=routing_key, durable=True, auto_delete=False, ) self.consumers.append(dict(queues=self.queue, callbacks=[self.on_message])) # just in case the queue does not already exist on Pulse self.queue.declare() else: self.queue.bind_to(exchange=exchange, routing_key=routing_key) def unbind_from(self, exchange, routing_key): self.queue.unbind_from(exchange, routing_key) def close(self): self.connection.release() def prune_bindings(self, new_bindings): # get the existing bindings for the queue bindings = [] try: bindings = self.get_bindings(self.queue_name)["bindings"] except Exception: logger.error("Unable to fetch existing bindings for %s. Data ingestion may proceed, " "but no bindings will be pruned", self.queue_name) # Now prune any bindings from the queue that were not # established above. # This indicates that they are no longer in the config, and should # therefore be removed from the durable queue bindings list. for binding in bindings: if binding["source"]: binding_str = self.get_binding_str(binding["source"], binding["routing_key"]) if binding_str not in new_bindings: self.unbind_from(Exchange(binding["source"]), binding["routing_key"]) logger.info("Unbound from: %s", binding_str) def get_binding_str(self, exchange, routing_key): """Use consistent string format for binding comparisons""" return "{} {}".format(exchange, routing_key) def get_bindings(self, queue_name): """Get list of bindings from the pulse API""" return fetch_json("{}queue/{}/bindings".format(PULSE_GUARDIAN_URL, queue_name)) class JobConsumer(PulseConsumer): queue_suffix = "jobs" @newrelic.agent.background_task(name='pulse-listener-jobs.on_message', group='Pulse Listener') def on_message(self, body, message): exchange = message.delivery_info['exchange'] routing_key = message.delivery_info['routing_key'] logger.info('received job message from %s#%s', exchange, routing_key) store_pulse_jobs.apply_async( args=[body, exchange, routing_key], queue='store_pulse_jobs' ) message.ack() class PushConsumer(PulseConsumer): queue_suffix = "resultsets" @newrelic.agent.background_task(name='pulse-listener-pushes.on_message', group='Pulse Listener') def on_message(self, body, message): exchange = message.delivery_info['exchange'] routing_key = message.delivery_info['routing_key'] logger.info('received push message from %s#%s', exchange, routing_key) store_pulse_pushes.apply_async( args=[body, exchange, routing_key], queue='store_pulse_pushes' ) message.ack() def bind_to(consumer, exchange, routing_key): # bind the given consumer to the current exchange with a routing key consumer.bind_to(exchange=exchange, routing_key=routing_key) # get the binding key for this consumer binding = consumer.get_binding_str(exchange.name, routing_key) logger.info("Pulse queue {} bound to: {}".format(consumer.queue_name, binding)) return binding def prepare_consumer(connection, consumer_cls, sources, build_routing_key=None): consumer = consumer_cls(connection) bindings = [] for source in sources: # split source string into exchange and routing key sections exchange, _, routing_keys = source.partition('.') # built an exchange object with our connection and exchange name exchange = get_exchange(connection, exchange) # split the routing keys up using the delimiter for routing_key in routing_keys.split(':'): if build_routing_key is not None: # build routing key routing_key = build_routing_key(routing_key) binding = bind_to(consumer, exchange, routing_key) bindings.append(binding) # prune stale queues using the binding strings consumer.prune_bindings(bindings) return consumer
mpl-2.0
kursitet/edx-platform
common/test/acceptance/pages/lms/courseware.py
26
6723
""" Courseware page. """ from .course_page import CoursePage from selenium.webdriver.common.action_chains import ActionChains class CoursewarePage(CoursePage): """ Course info. """ url_path = "courseware/" xblock_component_selector = '.vert .xblock' section_selector = '.chapter' subsection_selector = '.chapter-content-container a' def is_browser_on_page(self): return self.q(css='body.courseware').present @property def num_sections(self): """ Return the number of sections in the sidebar on the page """ return len(self.q(css=self.section_selector)) @property def num_subsections(self): """ Return the number of subsections in the sidebar on the page, including in collapsed sections """ return len(self.q(css=self.subsection_selector)) @property def xblock_components(self): """ Return the xblock components within the unit on the page. """ return self.q(css=self.xblock_component_selector) @property def num_xblock_components(self): """ Return the number of rendered xblocks within the unit on the page """ return len(self.xblock_components) def xblock_component_type(self, index=0): """ Extract rendered xblock component type. Returns: str: xblock module type index: which xblock to query, where the index is the vertical display within the page (default is 0) """ return self.q(css=self.xblock_component_selector).attrs('data-block-type')[index] def xblock_component_html_content(self, index=0): """ Extract rendered xblock component html content. Returns: str: xblock module html content index: which xblock to query, where the index is the vertical display within the page (default is 0) """ # When Student Notes feature is enabled, it looks for the content inside # `.edx-notes-wrapper-content` element (Otherwise, you will get an # additional html related to Student Notes). element = self.q(css='{} .edx-notes-wrapper-content'.format(self.xblock_component_selector)) if element.first: return element.attrs('innerHTML')[index].strip() else: return self.q(css=self.xblock_component_selector).attrs('innerHTML')[index].strip() def tooltips_displayed(self): """ Verify if sequence navigation bar tooltips are being displayed upon mouse hover. """ for index, tab in enumerate(self.q(css='#sequence-list > li')): ActionChains(self.browser).move_to_element(tab).perform() if not self.q(css='#tab_{index} > p'.format(index=index)).visible: return False return True @property def course_license(self): """ Returns the course license text, if present. Else returns None. """ element = self.q(css="#content .container-footer .course-license") if element.is_present(): return element.text[0] return None def get_active_subsection_url(self): """ return the url of the active subsection in the left nav """ return self.q(css='.chapter-content-container .menu-item.active a').attrs('href')[0] @property def can_start_proctored_exam(self): """ Returns True if the timed/proctored exam timer bar is visible on the courseware. """ return self.q(css='button.start-timed-exam[data-start-immediately="false"]').is_present() def start_timed_exam(self): """ clicks the start this timed exam link """ self.q(css=".xblock-student_view .timed-exam .start-timed-exam").first.click() self.wait_for_element_presence(".proctored_exam_status .exam-timer", "Timer bar") def stop_timed_exam(self): """ clicks the stop this timed exam link """ self.q(css=".proctored_exam_status button.exam-button-turn-in-exam").first.click() self.wait_for_element_absence(".proctored_exam_status .exam-button-turn-in-exam", "End Exam Button gone") self.wait_for_element_presence("button[name='submit-proctored-exam']", "Submit Exam Button") self.q(css="button[name='submit-proctored-exam']").first.click() self.wait_for_element_absence(".proctored_exam_status .exam-timer", "Timer bar") def start_proctored_exam(self): """ clicks the start this timed exam link """ self.q(css='button.start-timed-exam[data-start-immediately="false"]').first.click() # Wait for the unique exam code to appear. # elf.wait_for_element_presence(".proctored-exam-code", "unique exam code") @property def entrance_exam_message_selector(self): """ Return the entrance exam status message selector on the top of courseware page. """ return self.q(css='#content .container section.course-content .sequential-status-message') def has_entrance_exam_message(self): """ Returns boolean indicating presence entrance exam status message container div. """ return self.entrance_exam_message_selector.is_present() def has_passed_message(self): """ Returns boolean indicating presence of passed message. """ return self.entrance_exam_message_selector.is_present() \ and "You have passed the entrance exam" in self.entrance_exam_message_selector.text[0] @property def chapter_count_in_navigation(self): """ Returns count of chapters available on LHS navigation. """ return len(self.q(css='nav.course-navigation a.chapter')) @property def is_timer_bar_present(self): """ Returns True if the timed/proctored exam timer bar is visible on the courseware. """ return self.q(css=".proctored_exam_status .exam-timer").is_present() class CoursewareSequentialTabPage(CoursePage): """ Courseware Sequential page """ def __init__(self, browser, course_id, chapter, subsection, position): super(CoursewareSequentialTabPage, self).__init__(browser, course_id) self.url_path = "courseware/{}/{}/{}".format(chapter, subsection, position) def is_browser_on_page(self): return self.q(css='nav.sequence-list-wrapper').present def get_selected_tab_content(self): """ return the body of the sequential currently selected """ return self.q(css='#seq_content .xblock').text[0]
agpl-3.0
keulraesik/pyelftools
examples/elf_relocations.py
9
1693
#------------------------------------------------------------------------------- # elftools example: elf_relocations.py # # An example of obtaining a relocation section from an ELF file and examining # the relocation entries it contains. # # Eli Bendersky (eliben@gmail.com) # This code is in the public domain #------------------------------------------------------------------------------- from __future__ import print_function import sys # If pyelftools is not installed, the example can also run from the root or # examples/ dir of the source distribution. sys.path[0:0] = ['.', '..'] from elftools.elf.elffile import ELFFile from elftools.elf.relocation import RelocationSection def process_file(filename): print('Processing file:', filename) with open(filename, 'rb') as f: elffile = ELFFile(f) # Read the .rela.dyn section from the file, by explicitly asking # ELFFile for this section # The section names are strings reladyn_name = '.rela.dyn' reladyn = elffile.get_section_by_name(reladyn_name) if not isinstance(reladyn, RelocationSection): print(' The file has no %s section' % reladyn_name) print(' %s section with %s relocations' % ( reladyn_name, reladyn.num_relocations())) for reloc in reladyn.iter_relocations(): print(' Relocation (%s)' % 'RELA' if reloc.is_RELA() else 'REL') # Relocation entry attributes are available through item lookup print(' offset = %s' % reloc['r_offset']) if __name__ == '__main__': if sys.argv[1] == '--test': for filename in sys.argv[2:]: process_file(filename)
unlicense
tigeorgia/fixmystreet
apps/users/migrations/0001_initial.py
1
2962
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations import django.utils.timezone from django.conf import settings class Migration(migrations.Migration): dependencies = [ ('auth', '0001_initial'), ] operations = [ migrations.CreateModel( name='FMSUser', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('username', models.CharField(unique=True, max_length=20, verbose_name='username')), ('email', models.EmailField(unique=True, max_length=254, verbose_name='email address')), ('first_name', models.CharField(max_length=70, verbose_name='first name')), ('last_name', models.CharField(max_length=70, verbose_name='last name')), ('password', models.CharField(max_length=128, verbose_name='password')), ('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')), ('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')), ('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')), ('is_staff', models.BooleanField(default=False, verbose_name='staff')), ('is_active', models.BooleanField(default=True, verbose_name='active')), ], options={ 'verbose_name': 'user', 'verbose_name_plural': 'users', }, bases=(models.Model,), ), migrations.CreateModel( name='FMSSettings', fields=[ ('user', models.OneToOneField(primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)), ('language', models.CharField(default=b'ka', max_length=2, verbose_name='language', choices=[(b'ka', 'Georgian'), (b'en', 'English')])), ], options={ }, bases=(models.Model,), ), migrations.AddField( model_name='fmsuser', name='groups', field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups'), preserve_default=True, ), migrations.AddField( model_name='fmsuser', name='user_permissions', field=models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions'), preserve_default=True, ), ]
gpl-2.0