repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
sidartaoliveira/ansible
lib/ansible/plugins/action/ce_config.py
89
4192
# # Copyright 2015 Peter Sprygada <psprygada@ansible.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # from __future__ import (absolute_import, division, print_function) __metaclass__ = type import os import re import time import glob from ansible.plugins.action.ce import ActionModule as _ActionModule from ansible.module_utils._text import to_text from ansible.module_utils.six.moves.urllib.parse import urlsplit from ansible.utils.vars import merge_hash PRIVATE_KEYS_RE = re.compile('__.+__') class ActionModule(_ActionModule): def run(self, tmp=None, task_vars=None): if self._task.args.get('src'): try: self._handle_template() except ValueError as exc: return dict(failed=True, msg=exc.message) result = super(ActionModule, self).run(tmp, task_vars) if self._task.args.get('backup') and result.get('__backup__'): # User requested backup and no error occurred in module. # NOTE: If there is a parameter error, _backup key may not be in results. filepath = self._write_backup(task_vars['inventory_hostname'], result['__backup__']) result['backup_path'] = filepath # strip out any keys that have two leading and two trailing # underscore characters for key in result.keys(): if PRIVATE_KEYS_RE.match(key): del result[key] return result def _get_working_path(self): cwd = self._loader.get_basedir() if self._task._role is not None: cwd = self._task._role._role_path return cwd def _write_backup(self, host, contents): backup_path = self._get_working_path() + '/backup' if not os.path.exists(backup_path): os.mkdir(backup_path) for fn in glob.glob('%s/%s*' % (backup_path, host)): os.remove(fn) tstamp = time.strftime("%Y-%m-%d@%H:%M:%S", time.localtime(time.time())) filename = '%s/%s_config.%s' % (backup_path, host, tstamp) open(filename, 'w').write(contents) return filename def _handle_template(self): src = self._task.args.get('src') working_path = self._get_working_path() if os.path.isabs(src) or urlsplit('src').scheme: source = src else: source = self._loader.path_dwim_relative(working_path, 'templates', src) if not source: source = self._loader.path_dwim_relative(working_path, src) if not os.path.exists(source): raise ValueError('path specified in src not found') try: with open(source, 'r') as f: template_data = to_text(f.read()) except IOError: return dict(failed=True, msg='unable to load src file') # Create a template search path in the following order: # [working_path, self_role_path, dependent_role_paths, dirname(source)] searchpath = [working_path] if self._task._role is not None: searchpath.append(self._task._role._role_path) if hasattr(self._task, "_block:"): dep_chain = self._task._block.get_dep_chain() if dep_chain is not None: for role in dep_chain: searchpath.append(role._role_path) searchpath.append(os.path.dirname(source)) self._templar.environment.loader.searchpath = searchpath self._task.args['src'] = self._templar.template(template_data)
gpl-3.0
foliverkodi/repository.foliver
plugin.video.adultsonly/resources/lib/chardet/langbulgarianmodel.py
2965
12784
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### # 255: Control characters that usually does not exist in any text # 254: Carriage/Return # 253: symbol (punctuation) that does not belong to word # 252: 0 - 9 # Character Mapping Table: # this table is modified base on win1251BulgarianCharToOrderMap, so # only number <64 is sure valid Latin5_BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 194,195,196,197,198,199,200,201,202,203,204,205,206,207,208,209, # 80 210,211,212,213,214,215,216,217,218,219,220,221,222,223,224,225, # 90 81,226,227,228,229,230,105,231,232,233,234,235,236, 45,237,238, # a0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # b0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,239, 67,240, 60, 56, # c0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # d0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,241, 42, 16, # e0 62,242,243,244, 58,245, 98,246,247,248,249,250,251, 91,252,253, # f0 ) win1251BulgarianCharToOrderMap = ( 255,255,255,255,255,255,255,255,255,255,254,255,255,254,255,255, # 00 255,255,255,255,255,255,255,255,255,255,255,255,255,255,255,255, # 10 253,253,253,253,253,253,253,253,253,253,253,253,253,253,253,253, # 20 252,252,252,252,252,252,252,252,252,252,253,253,253,253,253,253, # 30 253, 77, 90, 99,100, 72,109,107,101, 79,185, 81,102, 76, 94, 82, # 40 110,186,108, 91, 74,119, 84, 96,111,187,115,253,253,253,253,253, # 50 253, 65, 69, 70, 66, 63, 68,112,103, 92,194,104, 95, 86, 87, 71, # 60 116,195, 85, 93, 97,113,196,197,198,199,200,253,253,253,253,253, # 70 206,207,208,209,210,211,212,213,120,214,215,216,217,218,219,220, # 80 221, 78, 64, 83,121, 98,117,105,222,223,224,225,226,227,228,229, # 90 88,230,231,232,233,122, 89,106,234,235,236,237,238, 45,239,240, # a0 73, 80,118,114,241,242,243,244,245, 62, 58,246,247,248,249,250, # b0 31, 32, 35, 43, 37, 44, 55, 47, 40, 59, 33, 46, 38, 36, 41, 30, # c0 39, 28, 34, 51, 48, 49, 53, 50, 54, 57, 61,251, 67,252, 60, 56, # d0 1, 18, 9, 20, 11, 3, 23, 15, 2, 26, 12, 10, 14, 6, 4, 13, # e0 7, 8, 5, 19, 29, 25, 22, 21, 27, 24, 17, 75, 52,253, 42, 16, # f0 ) # Model Table: # total sequences: 100% # first 512 sequences: 96.9392% # first 1024 sequences:3.0618% # rest sequences: 0.2992% # negative sequences: 0.0020% BulgarianLangModel = ( 0,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,3,3,3,3,3,3,3,2,3,3,3,3,3, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,2,2,1,2,2, 3,1,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,0,3,3,3,3,3,3,3,3,3,3,0,3,0,1, 0,0,0,0,0,0,0,0,0,0,1,0,1,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,2,3,2,3,3,3,3,3,3,3,3,0,3,1,0, 0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,2,3,3,2,3,3,3,3,3,3,3,3,3,3,3,3,1,3,2,3,3,3,3,3,3,3,3,0,3,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,3,3,3,3,3,3,2,3,2,2,1,3,3,3,3,2,2,2,1,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,2,2,3,3,1,1,2,3,3,2,3,3,3,3,2,1,2,0,2,0,3,0,0, 0,0,0,0,0,0,0,1,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,1,3,3,3,3,3,2,3,2,3,3,3,3,3,2,3,3,1,3,0,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,1,3,3,2,3,3,3,1,3,3,2,3,2,2,2,0,0,2,0,2,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,0,3,3,3,2,2,3,3,3,1,2,2,3,2,1,1,2,0,2,0,0,0,0, 1,0,0,0,0,0,0,0,0,0,2,0,0,1,0,0,1,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,2,3,3,1,2,3,2,2,2,3,3,3,3,3,2,2,3,1,2,0,2,1,2,0,0, 0,0,0,0,0,0,0,0,0,0,3,0,0,1,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,3,3,3,3,2,3,3,3,2,3,3,2,3,2,2,2,3,1,2,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,1,1,1,2,2,1,3,1,3,2,2,3,0,0,1,0,1,0,1,0,0, 0,0,0,1,0,0,0,0,1,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,2,2,3,2,2,3,1,2,1,1,1,2,3,1,3,1,2,2,0,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,1,3,2,2,3,3,1,2,3,1,1,3,3,3,3,1,2,2,1,1,1,0,2,0,2,0,1, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,3,3,3,3,3,3,3,3,3,1,2,2,3,3,3,2,2,1,1,2,0,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,0,1,2,1,3,3,2,3,3,3,3,3,2,3,2,1,0,3,1,2,1,2,1,2,3,2,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,1,1,2,3,3,3,3,3,3,3,3,3,3,3,3,0,0,3,1,3,3,2,3,3,2,2,2,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,0,3,3,3,3,3,2,1,1,2,1,3,3,0,3,1,1,1,1,3,2,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,2,2,3,3,3,3,3,3,3,3,3,3,3,1,1,3,1,3,3,2,3,2,2,2,3,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,3,2,2,3,2,1,1,1,1,1,3,1,3,1,1,0,0,0,1,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,3,2,3,2,0,3,2,0,3,0,2,0,0,2,1,3,1,0,0,1,0,0,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,1,1,1,2,1,1,2,1,1,1,2,2,1,2,1,1,1,0,1,1,0,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,2,1,3,1,1,2,1,3,2,1,1,0,1,2,3,2,1,1,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,3,3,3,3,2,2,1,0,1,0,0,1,0,0,0,2,1,0,3,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,2,3,2,3,3,1,3,2,1,1,1,2,1,1,2,1,3,0,1,0,0,0,1,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,1,1,2,2,3,3,2,3,2,2,2,3,1,2,2,1,1,2,1,1,2,2,0,1,1,0,1,0,2,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,3,3,3,2,1,3,1,0,2,2,1,3,2,1,0,0,2,0,2,0,1,0,0,0,0,0,0,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1, 3,3,3,3,3,3,1,2,0,2,3,1,2,3,2,0,1,3,1,2,1,1,1,0,0,1,0,0,2,2,2,3, 2,2,2,2,1,2,1,1,2,2,1,1,2,0,1,1,1,0,0,1,1,0,0,1,1,0,0,0,1,1,0,1, 3,3,3,3,3,2,1,2,2,1,2,0,2,0,1,0,1,2,1,2,1,1,0,0,0,1,0,1,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,2,0,0,0,0,0,0,0,0,0,0,1, 3,3,2,3,3,1,1,3,1,0,3,2,1,0,0,0,1,2,0,2,0,1,0,0,0,1,0,1,2,1,2,2, 1,1,1,1,1,1,1,2,2,2,1,1,1,1,1,1,1,0,1,2,1,1,1,0,0,0,0,0,1,1,0,0, 3,1,0,1,0,2,3,2,2,2,3,2,2,2,2,2,1,0,2,1,2,1,1,1,0,1,2,1,2,2,2,1, 1,1,2,2,2,2,1,2,1,1,0,1,2,1,2,2,2,1,1,1,0,1,1,1,1,2,0,1,0,0,0,0, 2,3,2,3,3,0,0,2,1,0,2,1,0,0,0,0,2,3,0,2,0,0,0,0,0,1,0,0,2,0,1,2, 2,1,2,1,2,2,1,1,1,2,1,1,1,0,1,2,2,1,1,1,1,1,0,1,1,1,0,0,1,2,0,0, 3,3,2,2,3,0,2,3,1,1,2,0,0,0,1,0,0,2,0,2,0,0,0,1,0,1,0,1,2,0,2,2, 1,1,1,1,2,1,0,1,2,2,2,1,1,1,1,1,1,1,0,1,1,1,0,0,0,0,0,0,1,1,0,0, 2,3,2,3,3,0,0,3,0,1,1,0,1,0,0,0,2,2,1,2,0,0,0,0,0,0,0,0,2,0,1,2, 2,2,1,1,1,1,1,2,2,2,1,0,2,0,1,0,1,0,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 3,3,3,3,2,2,2,2,2,0,2,1,1,1,1,2,1,2,1,1,0,2,0,1,0,1,0,0,2,0,1,2, 1,1,1,1,1,1,1,2,2,1,1,0,2,0,1,0,2,0,0,1,1,1,0,0,2,0,0,0,1,1,0,0, 2,3,3,3,3,1,0,0,0,0,0,0,0,0,0,0,2,0,0,1,1,0,0,0,0,0,0,1,2,0,1,2, 2,2,2,1,1,2,1,1,2,2,2,1,2,0,1,1,1,1,1,1,0,1,1,1,1,0,0,1,1,1,0,0, 2,3,3,3,3,0,2,2,0,2,1,0,0,0,1,1,1,2,0,2,0,0,0,3,0,0,0,0,2,0,2,2, 1,1,1,2,1,2,1,1,2,2,2,1,2,0,1,1,1,0,1,1,1,1,0,2,1,0,0,0,1,1,0,0, 2,3,3,3,3,0,2,1,0,0,2,0,0,0,0,0,1,2,0,2,0,0,0,0,0,0,0,0,2,0,1,2, 1,1,1,2,1,1,1,1,2,2,2,0,1,0,1,1,1,0,0,1,1,1,0,0,1,0,0,0,0,1,0,0, 3,3,2,2,3,0,1,0,1,0,0,0,0,0,0,0,1,1,0,3,0,0,0,0,0,0,0,0,1,0,2,2, 1,1,1,1,1,2,1,1,2,2,1,2,2,1,0,1,1,1,1,1,0,1,0,0,1,0,0,0,1,1,0,0, 3,1,0,1,0,2,2,2,2,3,2,1,1,1,2,3,0,0,1,0,2,1,1,0,1,1,1,1,2,1,1,1, 1,2,2,1,2,1,2,2,1,1,0,1,2,1,2,2,1,1,1,0,0,1,1,1,2,1,0,1,0,0,0,0, 2,1,0,1,0,3,1,2,2,2,2,1,2,2,1,1,1,0,2,1,2,2,1,1,2,1,1,0,2,1,1,1, 1,2,2,2,2,2,2,2,1,2,0,1,1,0,2,1,1,1,1,1,0,0,1,1,1,1,0,1,0,0,0,0, 2,1,1,1,1,2,2,2,2,1,2,2,2,1,2,2,1,1,2,1,2,3,2,2,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,1,2,0,1,2,1,1,0,1,0,1,2,1,2,0,0,0,1,1,0,0,0,1,0,0,2, 1,1,0,0,1,1,0,1,1,1,1,0,2,0,1,1,1,0,0,1,1,0,0,0,0,1,0,0,0,1,0,0, 2,0,0,0,0,1,2,2,2,2,2,2,2,1,2,1,1,1,1,1,1,1,0,1,1,1,1,1,2,1,1,1, 1,2,2,2,2,1,1,2,1,2,1,1,1,0,2,1,2,1,1,1,0,2,1,1,1,1,0,1,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,0,1,0,1,1,1,1,1,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,3,2,0,0,0,0,1,0,0,0,0,0,0,1,1,0,2,0,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,1,1,0,0,2,2,2,2,2,0,1,1,0,1,1,1,1,1,0,0,1,0,0,0,1,1,0,1, 2,3,1,2,1,0,1,1,0,2,2,2,0,0,1,0,0,1,1,1,1,0,0,0,0,0,0,0,1,0,1,2, 1,1,1,1,2,1,1,1,1,1,1,1,1,0,1,1,0,1,0,1,0,1,0,0,1,0,0,0,0,1,0,0, 2,2,2,2,2,0,0,2,0,0,2,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,2,0,2,2, 1,1,1,1,1,0,0,1,2,1,1,0,1,0,1,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,0,2,0,1,1,0,0,0,1,0,0,2,0,2,0,0,0,0,0,0,0,0,0,0,1,1, 0,0,0,1,1,1,1,1,1,1,1,1,1,0,1,0,0,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,3,2,0,0,1,0,0,1,0,0,0,0,0,0,1,0,2,0,0,0,1,0,0,0,0,0,0,0,2, 1,1,0,0,1,0,0,0,1,1,0,0,1,0,1,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,1,2,2,2,1,2,1,2,2,1,1,2,1,1,1,0,1,1,1,1,2,0,1,0,1,1,1,1,0,1,1, 1,1,2,1,1,1,1,1,1,0,0,1,2,1,1,1,1,1,1,0,0,1,1,1,0,0,0,0,0,0,0,0, 1,0,0,1,3,1,1,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,1,0,0,1,0,2,0,0,0,0,0,1,1,1,0,1,0,0,0,0,0,0,0,0,2,0,0,1, 0,2,0,1,0,0,1,1,2,0,1,0,1,0,1,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0, 1,2,2,2,2,0,1,1,0,2,1,0,1,1,1,0,0,1,0,2,0,1,0,0,0,0,0,0,0,0,0,1, 0,1,0,0,1,0,0,0,1,1,0,0,1,0,0,1,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,2,2,2,2,0,0,1,0,0,0,1,0,1,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,1,0,1,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0,1,1,0,0,0,0,0,0,0,0,0,0,0, 2,0,1,0,0,1,2,1,1,1,1,1,1,2,2,1,0,0,1,0,1,0,0,0,0,1,1,1,1,0,0,0, 1,1,2,1,1,1,1,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 2,2,1,2,1,0,0,1,0,0,0,0,0,0,0,0,1,1,0,1,0,0,0,0,0,0,0,0,0,0,0,1, 0,0,0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 3,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,2,0,0,0,0,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0,0,0,1,0,0,0, 0,1,1,0,1,1,1,0,0,1,0,0,1,0,1,0,0,0,1,0,0,0,0,0,1,0,0,0,0,0,0,0, 1,0,1,0,0,1,1,1,1,1,1,1,1,1,1,1,0,0,1,0,2,0,0,2,0,1,0,0,1,0,0,1, 1,1,0,0,1,1,0,1,0,0,0,1,0,0,1,0,0,0,0,0,0,0,0,1,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,1,1,0,0,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1,0,1,0, 1,1,1,1,1,1,1,2,0,0,0,0,0,0,2,1,0,1,1,0,0,1,1,1,0,1,0,0,0,0,0,0, 2,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0, 1,0,0,1,1,1,1,1,1,1,1,1,1,1,1,1,1,0,1,0,1,1,0,1,1,1,1,1,0,1,0,0, 0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1, ) Latin5BulgarianModel = { 'charToOrderMap': Latin5_BulgarianCharToOrderMap, 'precedenceMatrix': BulgarianLangModel, 'mTypicalPositiveRatio': 0.969392, 'keepEnglishLetter': False, 'charsetName': "ISO-8859-5" } Win1251BulgarianModel = { 'charToOrderMap': win1251BulgarianCharToOrderMap, 'precedenceMatrix': BulgarianLangModel, 'mTypicalPositiveRatio': 0.969392, 'keepEnglishLetter': False, 'charsetName': "windows-1251" } # flake8: noqa
gpl-2.0
ProfessorX/Config
.PyCharm30/system/python_stubs/-1247972723/gio/_gio/AppInfo.py
1
4475
# encoding: utf-8 # module gio._gio # from /usr/lib/python2.7/dist-packages/gtk-2.0/gio/_gio.so # by generator 1.135 # no doc # imports import gio as __gio import glib as __glib import gobject as __gobject import gobject._gobject as __gobject__gobject class AppInfo(__gobject.GInterface): # no doc def add_supports_type(self, *args, **kwargs): # real signature unknown pass def can_delete(self, *args, **kwargs): # real signature unknown pass def can_remove_supports_type(self, *args, **kwargs): # real signature unknown pass def delete(self, *args, **kwargs): # real signature unknown pass def dup(self, *args, **kwargs): # real signature unknown pass def equal(self, *args, **kwargs): # real signature unknown pass def get_commandline(self, *args, **kwargs): # real signature unknown pass def get_description(self, *args, **kwargs): # real signature unknown pass def get_executable(self, *args, **kwargs): # real signature unknown pass def get_icon(self, *args, **kwargs): # real signature unknown pass def get_id(self, *args, **kwargs): # real signature unknown pass def get_name(self, *args, **kwargs): # real signature unknown pass def launch(self, files=None, launch_context=None): # real signature unknown; restored from __doc__ """ launch (files=None, launch_context=None) -> gboolean Launches the application. Passes files to the launched application as arguments, using the optional launch_context to get information about the details of the launcher (like what screen it is on). On error, error will be set accordingly. Note that even if the launch is successful the application launched can fail to start if it runs into problems during startup. There is no way to detect this. Some URIs can be changed when passed through a gio.File (for instance unsupported uris with strange formats like mailto:), so if you have a textual uri you want to pass in as argument, consider using gio.AppInfo.launch_uris() instead. """ pass def launch_uris(self, files=None, launch_context=None): # real signature unknown; restored from __doc__ """ launch_uris (files=None, launch_context=None) -> gboolean Launches the application. Passes files to the launched application as arguments, using the optional launch_context to get information about the details of the launcher (like what screen it is on). On error, error will be set accordingly. Note that even if the launch is successful the application launched can fail to start if it runs into problems during startup. There is no way to detect this. """ pass def remove_supports_type(self, *args, **kwargs): # real signature unknown pass def set_as_default_for_extension(self, *args, **kwargs): # real signature unknown pass def set_as_default_for_type(self, *args, **kwargs): # real signature unknown pass def should_show(self, *args, **kwargs): # real signature unknown pass def supports_files(self, *args, **kwargs): # real signature unknown pass def supports_uris(self, *args, **kwargs): # real signature unknown pass def __eq__(self, y): # real signature unknown; restored from __doc__ """ x.__eq__(y) <==> x==y """ pass def __ge__(self, y): # real signature unknown; restored from __doc__ """ x.__ge__(y) <==> x>=y """ pass def __gt__(self, y): # real signature unknown; restored from __doc__ """ x.__gt__(y) <==> x>y """ pass def __init__(self, *args, **kwargs): # real signature unknown pass def __le__(self, y): # real signature unknown; restored from __doc__ """ x.__le__(y) <==> x<=y """ pass def __lt__(self, y): # real signature unknown; restored from __doc__ """ x.__lt__(y) <==> x<y """ pass def __ne__(self, y): # real signature unknown; restored from __doc__ """ x.__ne__(y) <==> x!=y """ pass def __repr__(self): # real signature unknown; restored from __doc__ """ x.__repr__() <==> repr(x) """ pass __gtype__ = None # (!) real value is ''
gpl-2.0
xxshutong/openerp-7.0
openerp/addons/project/res_config.py
55
4724
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Business Applications # Copyright (C) 2004-2012 OpenERP S.A. (<http://openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv from openerp.tools.translate import _ class project_configuration(osv.osv_memory): _name = 'project.config.settings' _inherit = 'res.config.settings' _columns = { 'module_project_mrp': fields.boolean('Generate tasks from sale orders', help ="""This feature automatically creates project tasks from service products in sale orders. More precisely, tasks are created for procurement lines with product of type 'Service', procurement method 'Make to Order', and supply method 'Manufacture'. This installs the module project_mrp."""), 'module_pad': fields.boolean("Use integrated collaborative note pads on task", help="""Lets the company customize which Pad installation should be used to link to new pads (by default, http://ietherpad.com/). This installs the module pad."""), 'module_project_timesheet': fields.boolean("Record timesheet lines per tasks", help="""This allows you to transfer the entries under tasks defined for Project Management to the timesheet line entries for particular date and user, with the effect of creating, editing and deleting either ways. This installs the module project_timesheet."""), 'module_project_long_term': fields.boolean("Manage resources planning on gantt view", help="""A long term project management module that tracks planning, scheduling, and resource allocation. This installs the module project_long_term."""), 'module_project_issue': fields.boolean("Track issues and bugs", help="""Provides management of issues/bugs in projects. This installs the module project_issue."""), 'time_unit': fields.many2one('product.uom', 'Working time unit', required=True, help="""This will set the unit of measure used in projects and tasks."""), 'module_project_issue_sheet': fields.boolean("Invoice working time on issues", help="""Provides timesheet support for the issues/bugs management in project. This installs the module project_issue_sheet."""), 'group_tasks_work_on_tasks': fields.boolean("Log work activities on tasks", implied_group='project.group_tasks_work_on_tasks', help="Allows you to compute work on tasks."), 'group_time_work_estimation_tasks': fields.boolean("Manage time estimation on tasks", implied_group='project.group_time_work_estimation_tasks', help="Allows you to compute Time Estimation on tasks."), 'group_manage_delegation_task': fields.boolean("Allow task delegation", implied_group='project.group_delegate_task', help="Allows you to delegate tasks to other users."), } def get_default_time_unit(self, cr, uid, fields, context=None): user = self.pool.get('res.users').browse(cr, uid, uid, context=context) return {'time_unit': user.company_id.project_time_mode_id.id} def set_time_unit(self, cr, uid, ids, context=None): config = self.browse(cr, uid, ids[0], context) user = self.pool.get('res.users').browse(cr, uid, uid, context) user.company_id.write({'project_time_mode_id': config.time_unit.id}) def onchange_time_estimation_project_timesheet(self, cr, uid, ids, group_time_work_estimation_tasks, module_project_timesheet): if group_time_work_estimation_tasks or module_project_timesheet: return {'value': {'group_tasks_work_on_tasks': True}} return {} # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
MihaiMoldovanu/ansible
test/units/module_utils/basic/test_log.py
63
10557
# -*- coding: utf-8 -*- # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division) __metaclass__ = type import sys import json import syslog from ansible.compat.tests import unittest from ansible.compat.tests.mock import patch, MagicMock from units.mock.procenv import swap_stdin_and_argv import ansible.module_utils.basic try: # Python 3.4+ from importlib import reload except ImportError: # Python 2 has reload as a builtin # Ignoring python3.0-3.3 (those have imp.reload if we decide we care) pass class TestAnsibleModuleSysLogSmokeTest(unittest.TestCase): def setUp(self): args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap = swap_stdin_and_argv(stdin_data=args) self.stdin_swap.__enter__() ansible.module_utils.basic._ANSIBLE_ARGS = None self.am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) self.am._name = 'unittest' self.has_journal = ansible.module_utils.basic.has_journal if self.has_journal: # Systems with journal can still test syslog ansible.module_utils.basic.has_journal = False def tearDown(self): # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap.__exit__(None, None, None) ansible.module_utils.basic.has_journal = self.has_journal def test_smoketest_syslog(self): # These talk to the live daemons on the system. Need to do this to # show that what we send doesn't cause an issue once it gets to the # daemon. These are just smoketests to test that we don't fail. self.am.log(u'Text string') self.am.log(u'Toshio くらとみ non-ascii test') self.am.log(b'Byte string') self.am.log(u'Toshio くらとみ non-ascii test'.encode('utf-8')) self.am.log(b'non-utf8 :\xff: test') class TestAnsibleModuleJournaldSmokeTest(unittest.TestCase): def setUp(self): args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap = swap_stdin_and_argv(stdin_data=args) self.stdin_swap.__enter__() ansible.module_utils.basic._ANSIBLE_ARGS = None self.am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) self.am._name = 'unittest' def tearDown(self): # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap.__exit__(None, None, None) @unittest.skipUnless(ansible.module_utils.basic.has_journal, 'python systemd bindings not installed') def test_smoketest_journal(self): # These talk to the live daemons on the system. Need to do this to # show that what we send doesn't cause an issue once it gets to the # daemon. These are just smoketests to test that we don't fail. self.am.log(u'Text string') self.am.log(u'Toshio くらとみ non-ascii test') self.am.log(b'Byte string') self.am.log(u'Toshio くらとみ non-ascii test'.encode('utf-8')) self.am.log(b'non-utf8 :\xff: test') class TestAnsibleModuleLogSyslog(unittest.TestCase): """Test the AnsibleModule Log Method""" py2_output_data = { u'Text string': b'Text string', u'Toshio くらとみ non-ascii test': u'Toshio くらとみ non-ascii test'.encode('utf-8'), b'Byte string': b'Byte string', u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test'.encode('utf-8'), b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace').encode('utf-8'), } py3_output_data = { u'Text string': u'Text string', u'Toshio くらとみ non-ascii test': u'Toshio くらとみ non-ascii test', b'Byte string': u'Byte string', u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test', b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace') } def setUp(self): args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap = swap_stdin_and_argv(stdin_data=args) self.stdin_swap.__enter__() ansible.module_utils.basic._ANSIBLE_ARGS = None self.am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) self.am._name = 'unittest' self.has_journal = ansible.module_utils.basic.has_journal if self.has_journal: # Systems with journal can still test syslog ansible.module_utils.basic.has_journal = False def tearDown(self): # teardown/reset ansible.module_utils.basic.has_journal = self.has_journal # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap.__exit__(None, None, None) @patch('syslog.syslog', autospec=True) def test_no_log(self, mock_func): no_log = self.am.no_log self.am.no_log = True self.am.log('unittest no_log') self.assertFalse(mock_func.called) self.am.no_log = False self.am.log('unittest no_log') mock_func.assert_called_once_with(syslog.LOG_INFO, 'unittest no_log') self.am.no_log = no_log def test_output_matches(self): if sys.version_info >= (3,): output_data = self.py3_output_data else: output_data = self.py2_output_data for msg, param in output_data.items(): with patch('syslog.syslog', autospec=True) as mock_func: self.am.log(msg) mock_func.assert_called_once_with(syslog.LOG_INFO, param) class TestAnsibleModuleLogJournal(unittest.TestCase): """Test the AnsibleModule Log Method""" output_data = { u'Text string': u'Text string', u'Toshio くらとみ non-ascii test': u'Toshio くらとみ non-ascii test', b'Byte string': u'Byte string', u'Toshio くらとみ non-ascii test'.encode('utf-8'): u'Toshio くらとみ non-ascii test', b'non-utf8 :\xff: test': b'non-utf8 :\xff: test'.decode('utf-8', 'replace') } # overriding run lets us use context managers for setup/teardown-esque behavior def setUp(self): args = json.dumps(dict(ANSIBLE_MODULE_ARGS={})) # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap = swap_stdin_and_argv(stdin_data=args) self.stdin_swap.__enter__() ansible.module_utils.basic._ANSIBLE_ARGS = None self.am = ansible.module_utils.basic.AnsibleModule( argument_spec=dict(), ) self.am._name = 'unittest' self.has_journal = ansible.module_utils.basic.has_journal ansible.module_utils.basic.has_journal = True self.module_patcher = None # In case systemd-python is not installed if not self.has_journal: self.module_patcher = patch.dict('sys.modules', {'systemd': MagicMock(), 'systemd.journal': MagicMock()}) self.module_patcher.start() try: reload(ansible.module_utils.basic) except NameError: self._fake_out_reload(ansible.module_utils.basic) def tearDown(self): # unittest doesn't have a clean place to use a context manager, so we have to enter/exit manually self.stdin_swap.__exit__(None, None, None) # teardown/reset ansible.module_utils.basic.has_journal = self.has_journal if self.module_patcher: self.module_patcher.stop() reload(ansible.module_utils.basic) @patch('systemd.journal.send') def test_no_log(self, mock_func): no_log = self.am.no_log self.am.no_log = True self.am.log('unittest no_log') self.assertFalse(mock_func.called) self.am.no_log = False self.am.log('unittest no_log') self.assertEqual(mock_func.called, 1) # Message # call_args is a 2-tuple of (arg_list, kwarg_dict) self.assertTrue(mock_func.call_args[0][0].endswith('unittest no_log'), msg='Message was not sent to log') # log adds this journal field self.assertIn('MODULE', mock_func.call_args[1]) self.assertIn('basic.py', mock_func.call_args[1]['MODULE']) self.am.no_log = no_log def test_output_matches(self): for msg, param in self.output_data.items(): with patch('systemd.journal.send', autospec=True) as mock_func: self.am.log(msg) self.assertEqual(mock_func.call_count, 1, msg='journal.send not called exactly once') self.assertTrue(mock_func.call_args[0][0].endswith(param)) @patch('systemd.journal.send') def test_log_args(self, mock_func): self.am.log('unittest log_args', log_args=dict(TEST='log unittest')) self.assertEqual(mock_func.called, 1) self.assertTrue(mock_func.call_args[0][0].endswith('unittest log_args'), msg='Message was not sent to log') # log adds this journal field self.assertIn('MODULE', mock_func.call_args[1]) self.assertIn('basic.py', mock_func.call_args[1]['MODULE']) # We added this journal field self.assertIn('TEST', mock_func.call_args[1]) self.assertIn('log unittest', mock_func.call_args[1]['TEST'])
gpl-3.0
mattseymour/django
tests/gis_tests/geoadmin/tests.py
21
4334
from django.contrib.gis import admin from django.contrib.gis.geos import Point from django.test import TestCase, override_settings, skipUnlessDBFeature from django.test.utils import patch_logger from .admin import UnmodifiableAdmin from .models import City, site @skipUnlessDBFeature("gis_enabled") @override_settings(ROOT_URLCONF='django.contrib.gis.tests.geoadmin.urls') class GeoAdminTest(TestCase): def test_ensure_geographic_media(self): geoadmin = site._registry[City] admin_js = geoadmin.media.render_js() self.assertTrue(any(geoadmin.openlayers_url in js for js in admin_js)) def test_olmap_OSM_rendering(self): delete_all_btn = """<a href="javascript:geodjango_point.clearFeatures()">Delete all Features</a>""" original_geoadmin = site._registry[City] params = original_geoadmin.get_map_widget(City._meta.get_field('point')).params result = original_geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476), params) self.assertIn( """geodjango_point.layers.base = new OpenLayers.Layer.OSM("OpenStreetMap (Mapnik)");""", result) self.assertIn(delete_all_btn, result) site.unregister(City) site.register(City, UnmodifiableAdmin) try: geoadmin = site._registry[City] params = geoadmin.get_map_widget(City._meta.get_field('point')).params result = geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476), params) self.assertNotIn(delete_all_btn, result) finally: site.unregister(City) site.register(City, original_geoadmin.__class__) def test_olmap_WMS_rendering(self): geoadmin = admin.GeoModelAdmin(City, site) result = geoadmin.get_map_widget(City._meta.get_field('point'))( ).render('point', Point(-79.460734, 40.18476)) self.assertIn( """geodjango_point.layers.base = new OpenLayers.Layer.WMS("OpenLayers WMS", """ """"http://vmap0.tiles.osgeo.org/wms/vmap0", {layers: 'basic', format: 'image/jpeg'});""", result) def test_olwidget_has_changed(self): """ Changes are accurately noticed by OpenLayersWidget. """ geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)() has_changed = form.fields['point'].has_changed initial = Point(13.4197458572965953, 52.5194108501149799, srid=4326) data_same = "SRID=3857;POINT(1493879.2754093995 6894592.019687599)" data_almost_same = "SRID=3857;POINT(1493879.2754093990 6894592.019687590)" data_changed = "SRID=3857;POINT(1493884.0527237 6894593.8111804)" self.assertTrue(has_changed(None, data_changed)) self.assertTrue(has_changed(initial, "")) self.assertFalse(has_changed(None, "")) self.assertFalse(has_changed(initial, data_same)) self.assertFalse(has_changed(initial, data_almost_same)) self.assertTrue(has_changed(initial, data_changed)) def test_olwidget_empty_string(self): geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)({'point': ''}) with patch_logger('django.contrib.gis', 'error') as logger_calls: output = str(form['point']) self.assertInHTML( '<textarea id="id_point" class="vWKTField required" cols="150"' ' rows="10" name="point"></textarea>', output ) self.assertEqual(logger_calls, []) def test_olwidget_invalid_string(self): geoadmin = site._registry[City] form = geoadmin.get_changelist_form(None)({'point': 'INVALID()'}) with patch_logger('django.contrib.gis', 'error') as logger_calls: output = str(form['point']) self.assertInHTML( '<textarea id="id_point" class="vWKTField required" cols="150"' ' rows="10" name="point"></textarea>', output ) self.assertEqual(len(logger_calls), 1) self.assertEqual( logger_calls[0], "Error creating geometry from value 'INVALID()' (String input " "unrecognized as WKT EWKT, and HEXEWKB.)" )
bsd-3-clause
aseber/OpenCV
ZBarImageReader.py
2
2601
#!/usr/bin/env python import cv2 # import rospy # import roslib import numpy as np import os.path import zbar from GenericCascade import GenericCascade # roslib.load_manifest('object_detection') # from sensor_msgs.msg import Image # from cv_bridge import CvBridge, CvBridgeError ''' ############################################################# # IEEE Hardware Team 2016 # # Created by: Austin Seber # # Email: aseber@techsouth.cc # # # # Created for: Based on work created by Nicholas Fry. # # Detects and returns the QRCode based # # on an input image. Returns a tuple of # # {image, centerX, centerY, # # {symbols{type, content}}} # # Created Date: Oct. 6th, 2015 # # # # Modified by: N/A # # Modified Date: N/A # # Reason Mod.: N/A # ############################################################# ''' def ZBarImageReader(image): #initialize ZBAR by creating a reader scanner = zbar.ImageScanner() scanner.parse_config('enable') grayImage = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) height, width, channels = image.shape imageString = grayImage.tostring() zbar_image = zbar.Image(width, height, 'Y800', imageString) scanner.scan(zbar_image) symbols = [] for symbol in zbar_image: # print 'decoded', symbol.type, 'symbol', '"%s"' % symbol.data symbols.append({'type':symbol.type, 'data':symbol.data}) return {'image':image, 'centerX':int(round(width/2)), 'centerY':int(round(height/2)), 'symbols':symbols} def main(): cascade_path = '/home/aseber/Desktop/ieeeData/IEEEOpenCV2016/ROS/src/object_detection/src/QR/QRCodeCascadeRC1/cascade.xml' camera = cv2.VideoCapture(-1) while(True): _, image = camera.read() images = GenericCascade(image, cascade_path, 0.0) for currentImage in images: tup = ZBarImageReader(image) symbols = tup['symbols'] for symbol in symbols: print symbol['data'] cv2.waitKey(1) cv2.imshow("Image window", image) #Destroy open windows to prevent memory leaks cv2.destroyAllWindows() if __name__ == '__main__': main()
apache-2.0
cdgallahue/atomic-turbine
MVP.py
1
5650
import json import requests import time import urllib2 import psycopg2 import sys from psycopg2 import sql #import websocket def checkURL(URL): request = requests.get(URL) if request.status_code == 200: return True else: return False def getTemp(turbine): url ='https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + str(turbine) + '/sensors/temperature' ##grab value and assign it to temperature if checkURL(url): temperature = urllib2.urlopen(url).read() if temperature == '{}': return 'SensorError' myArray = temperature.split(':') myValue = myArray[3].split('}') return float(myValue[0]) else: return 'SensorError' def getVoltage(turbine): str(turbine) url ='https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + str(turbine) + '/sensors/voltage' ##grab value and assign it to voltage if checkURL(url): voltage = urllib2.urlopen(url).read() if voltage == '{}': return 'SensorError' myArray = voltage.split(':') myValue = myArray[3].split('}') return float(myValue[0]) else: return 'SensorError' def getHeartBeat(turbine): url = 'https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + str(turbine) + '/heartbeat' if checkURL(url): heartbeat = urllib2.urlopen(url).read() if heartbeat == '{}': return 'OFFLINE' myArray = heartbeat.split(':') size = len(myArray) cleanResult = myArray[size-1].split("\"") return cleanResult[1] else: return 'OFFLINE' def getTimeStamp(turbine): url ='https://turbine-farm.run.aws-usw02-pr.ice.predix.io/api/turbines/' + str(turbine) + '/sensors/temperature' ##grab value and assign it to voltage if checkURL(url): voltage = urllib2.urlopen(url).read() if voltage == '{}': return '' myArray = voltage.split(':') myValue = myArray[1].split(',') return myValue[0] else: return time.time() * 1000 #headers = {'Authorization': 'Bearer eyJhbGciOiJSUzI1NiIsImtpZCI6ImxlZ2FjeS10b2tlbi1rZXkiLCJ0eXAiOiJKV1QifQ.eyJqdGkiOiI3NzUzZjZhNWFjNTc0ZjFmODc1YTIwNzM0Y2U5MTlmZSIsInN1YiI6InRzLWNsaWVudDEiLCJzY29wZSI6WyJ1YWEucmVzb3VyY2UiLCJ0aW1lc2VyaWVzLnpvbmVzLmQ5N2Y1OTUzLTJjMDctNGU4Zi1hYzBkLThiOGRmODk3MTM1ZS5pbmdlc3QiLCJ1YWEubm9uZSIsInRpbWVzZXJpZXMuem9uZXMuZDk3ZjU5NTMtMmMwNy00ZThmLWFjMGQtOGI4ZGY4OTcxMzVlLnVzZXIiLCJ0aW1lc2VyaWVzLnpvbmVzLmQ5N2Y1OTUzLTJjMDctNGU4Zi1hYzBkLThiOGRmODk3MTM1ZS5xdWVyeSJdLCJjbGllbnRfaWQiOiJ0cy1jbGllbnQxIiwiY2lkIjoidHMtY2xpZW50MSIsImF6cCI6InRzLWNsaWVudDEiLCJncmFudF90eXBlIjoiY2xpZW50X2NyZWRlbnRpYWxzIiwicmV2X3NpZyI6IjlkYWMxM2ZmIiwiaWF0IjoxNTAwMDU1NDUyLCJleHAiOjE1MDAwOTg2NTIsImlzcyI6Imh0dHBzOi8vZmYyMzU5ZDYtMDViNC00YTBmLTkwMDEtMjUzM2M3N2NmZTlkLnByZWRpeC11YWEucnVuLmF3cy11c3cwMi1wci5pY2UucHJlZGl4LmlvL29hdXRoL3Rva2VuIiwiemlkIjoiZmYyMzU5ZDYtMDViNC00YTBmLTkwMDEtMjUzM2M3N2NmZTlkIiwiYXVkIjpbInRpbWVzZXJpZXMuem9uZXMuZDk3ZjU5NTMtMmMwNy00ZThmLWFjMGQtOGI4ZGY4OTcxMzVlIiwidWFhIiwidHMtY2xpZW50MSJdfQ.r8QqCe3OW_oz4EnUh5BiUFbL9Dy0fXwQ0l1-sfPBQmCnk5ZzlYpVbp_xBDgU-hfLIaZyJE_COTe7fV73gfi7I9pk49Ravq_pnSYxFN4ed3G7QsPw4WjJyb07wIYeaEfZv4EsOU6xwWVms86nIF4mq0pRC_dTIPNXt66SN9C3GR9xfgGqd_x8f4t4onEIAUQ10tRImaLW2_Yb7c19PHK3dENLD5p4dXswCP5sBRlAbV9oA37rAbzgpaGOeLbQWe21w1PQ3sHwmxoLYBdDnZDizoWZa1mcOf1tcKCdHVXSxDZQKCdIY_fOLAB93K1-iqfAWOD2944ck2rkfMDa5vJq1g','Predix-Zone-Id': 'd97f5953-2c07-4e8f-ac0d-8b8df897135e'} #Payload: #{"start": 1500051600000,"end": 1500058800000,"tags": [{"name": "Atomice-turbine-1-Temperature", "filters":{ "quality": 2}}] #ingestionUrl = 'https://gateway-predix-data-services.run.aws-usw02-pr.ice.predix.io/v1/stream/messages' #while (1): # for i in [1,2,3]: # for variable in ['Temperature','Voltage','Time','Heartbeat']: # myString = 'Atomic-turbine-' + str(i) + '-' + variable # data = {'messageID': 'Atomic-[]','body': [{'name': myString,'datapoints': [[getTimeStamp(i),getTemp(i),2]]}]} #data = {'messageID': 'Atomic-[]','body': [{'name': 'Atomice-turbine-1-Temperature','datapoints': [[getTimeStamp(1),getTemp(1),2]]}]} #r = requests.get(ingestionUrl, data=json.dumps(data), headers=headers) #print(r) #{"start": 1500051600000,"end": 1500058800000,"tags": [{"name": "Atomic-turbine-1-Temperature"}], "quality": 2}#,"limit": 1000,"aggregations": [{"type": "avg","interval": "1h"}],"filters": {"attributes": {"host": "<host>","type": "<type>"},"measurements": {"condition": "ge","values": "23.1"},"qualities": {"values": ["0","3"]}}}]} #time.sleep(10) #print('Thanks for playing') while (1): try: conn = psycopg2.connect(dbname='energystream', user='johndwyer', host='localhost', password='') conn.autocommit = True except: print("i could not connect") sys.exit() cur = conn.cursor() #print voltage of each turbine for i in [1, 2, 3]: turbineid = i voltage = getVoltage(i) temp = getTemp(i) heartbeat = getHeartBeat(i) timestamp = getTimeStamp(i) print('turbine {0}: Voltage->{1} -- Temperature->{2} -- Heartbeat->{3} -- Timestamp->{4}'.format(i, voltage, temp, heartbeat, timestamp)) insert = "INSERT INTO energystream (voltage, time, temp, heartbeat, turbineid) VALUES ('" + str(voltage) + "', '" + str(timestamp) + "', '" + str(temp) + "', '" + str(heartbeat) + "', " + str(i) + ");" cur.execute(insert) time.sleep(2) cur.close() #use .datetime.fromtimestamp(ms/1000) to get date formate ## wait 2 seconds before printing again
mit
stuart-knock/tvb-library
contrib/from_articles/region_deterministic_bnm_wc.py
5
3642
# -*- coding: utf-8 -*- """ What: Reproduces Figures 23 and 24 of Sanz-Leon P., Knock, S. A., Spiegler, A. and Jirsa V. Mathematical framework for large-scale brain network modelling in The Virtual Brain. Neuroimage, 2014, (in review) Needs: A working installation of tvb Run: python region_deterministic_bnm_wc.py -s True -f True #Subsequent calls can be made with: python region_deterministic_bnm_wc.py -f True .. author:: Paula Sanz-Leon """ import numpy import argparse from tvb.simulator.lab import * import matplotlib.pylab as pylab pylab.rcParams['figure.figsize'] = 19.42, 12 # that's default image size for this interactive session pylab.rcParams.update({'font.size': 22}) parser = argparse.ArgumentParser(description='Reproduce results of Figure XX presented in Sanz-Leon et al 2014') parser.add_argument('-s','--sim', help='Run the simulations', default=False) parser.add_argument('-f','--fig', help='Plot the figures', default=False) args = vars(parser.parse_args()) speed = 4.0 simulation_length = 512 oscilator = models.WilsonCowan(c_1 = 16., c_2=12., c_3=15., c_4=3, tau_e=8., tau_i=8., a_e=1.3, a_i=2., theta_e=4., theta_i=3.7) white_matter = connectivity.Connectivity(load_default=True) white_matter.speed = numpy.array([speed]) gcs = 8 white_matter_coupling = coupling.Linear(a=2**-gcs) #Initialise an Integrator heunint = integrators.HeunDeterministic(dt=2**-4) #Initialise some Monitors with period in physical time momo = monitors.Raw() mama = monitors.TemporalAverage(period=2**-2) #Bundle them what_to_watch = (momo, mama) #Initialise a Simulator -- Model, Connectivity, Integrator, and Monitors. sim = simulator.Simulator(model = oscilator, connectivity = white_matter, coupling = white_matter_coupling, integrator = heunint, monitors = what_to_watch) sim.configure() LOG.info("Starting simulation...") #Perform the simulation raw_data = [] raw_time = [] tavg_data = [] tavg_time = [] for raw, tavg in sim(simulation_length=simulation_length): if not raw is None: raw_time.append(raw[0]) raw_data.append(raw[1]) if not tavg is None: tavg_time.append(tavg[0]) tavg_data.append(tavg[1]) LOG.info("Finished simulation.") #Make the lists numpy.arrays for easier use. RAW = numpy.array(raw_data) TAVG = numpy.array(tavg_data) # <codecell> numpy.save('region_deterministic_bnm_article_wc_raw.npy', RAW) numpy.save('region_deterministic_bnm_article_wc_rawtime.npy', raw_time) numpy.save('region_deterministic_bnm_article_wc_tavg.npy', TAVG) numpy.save('region_deterministic_bnm_article_wc_tavgtime.npy', tavg_time) if args['fig']: RAW = numpy.load('region_deterministic_bnm_article_wc_raw.npy') raw_time = numpy.load('region_deterministic_bnm_article_wc_rawtime.npy') #Plot temporally averaged time series figure(1) subplot(1, 2, 1) plot(raw_time, RAW[:, 0, :, 0], 'k', alpha=0.042, linewidth=3) plot(raw_time, RAW[:, 1, :, 0], 'r', alpha=0.042, linewidth=3) plot(raw_time, RAW[:, 0, :, 0].mean(axis=1), 'k', linewidth=3) plot(raw_time, RAW[:, 1, :, 0].mean(axis=1), 'r', linewidth=3) xlabel('time[ms]') #ylim([-25, 5]) xlim([0, sim.simulation_length]) subplot(1, 2, 2) plot(RAW[:, 0, :, 0], RAW[:, 1, :, 0], alpha=0.042) plot(RAW[:, 0, :, 0].mean(axis=1), RAW[:, 1, :, 0].mean(axis=1), alpha=1.) plot(RAW[0, 0, :, 0], RAW[0, 1, :, 0], 'bo', alpha=0.15) xlabel(r'$E$') ylabel(r'$I$') show() fig_name = 'wc_default_speed_' + str(int(white_matter.speed)) + '_gcs_2**-' + str(gcs) + '.pdf' savefig(fig_name) ###EoF###
gpl-2.0
EDRN/CancerDataExpo
src/edrn.rdf/edrn/rdf/labcascollectionrdfgenerator.py
2
6485
# encoding: utf-8 # Copyright 2012–2020 California Institute of Technology. ALL RIGHTS # RESERVED. U.S. Government Sponsorship acknowledged. '''LabCAS RDF Generator. ''' from .rdfgenerator import IRDFGenerator from .utils import validateAccessibleURL from Acquisition import aq_inner from edrn.rdf import _ from pysolr import Solr from rdflib.term import URIRef, Literal from zope import schema import rdflib, logging _logger = logging.getLogger(__name__) # Predicate URIs _titlePredicateURI = URIRef('http://purl.org/dc/terms/title') _typeURI = URIRef('urn:edrn:types:labcas:collection') _piPredicateURI = URIRef('urn:edrn:predicates:pi') _organPredicateURI = URIRef('urn:edrn:predicates:organ') _protocolPredicateURI = URIRef('urn:edrn:predicates:protocol') _collaborativeGroupPredicateURI = URIRef('urn:edrn:predicates:collaborativeGroup') _cardinalityPredicateURI = URIRef('urn:edrn:predicates:cardinality') # Type URIs _statsTypeURI = URIRef('urn:edrn:types:labcas:statistics') # URI prefixes _protocolPrefix = 'http://edrn.nci.nih.gov/data/protocols/' _subjectPrefix = 'https://edrn-labcas.jpl.nasa.gov/labcas-ui/c/index.html?collection_id=' # Data inconsistency; thanks to @LucaCinquini and @AshishMahabal for # https://github.com/EDRN/labcas-backend/issues/3 … # This maps from various collaborative group names in LabCAS to the # official names; if a mapping doesn't appear here, then it's an # invalid collaborative group like "TBD" and we don't put it in RDF. _inconsistentCollaborativeGroupNaming = { 'Breast and Gynecologic Cancers Research Group': 'Breast and Gynecologic Cancers Research Group', 'Breast and Gynecologic': 'Breast and Gynecologic Cancers Research Group', 'Breast/GYN': 'Breast and Gynecologic Cancers Research Group', 'G.I. and Other Associated Cancers Research Group': 'G.I. and Other Associated Cancers Research Group', 'GI and Other Associated': 'G.I. and Other Associated Cancers Research Group', 'Lung and Upper Aerodigestive Cancers Research Group': 'Lung and Upper Aerodigestive Cancers Research Group', 'Lung and Upper Aerodigestive': 'Lung and Upper Aerodigestive Cancers Research Group', 'Lung and Upper Areodigestive': 'Lung and Upper Aerodigestive Cancers Research Group', 'Prostate and Urologic Cancers Research Group': 'Prostate and Urologic Cancers Research Group', 'Prostate and Urologic': 'Prostate and Urologic Cancers Research Group', } class ILabCASCollectionRDFGenerator(IRDFGenerator): '''Generator for RDF using data from LabCAS.''' labcasSolrURL = schema.TextLine( title=_('LabCAS Data Access API URL'), description=_('The Uniform Resource Locator to the LabCAS API.'), required=True, constraint=validateAccessibleURL, default='https://edrn-labcas.jpl.nasa.gov/data-access-api' ) username = schema.TextLine( title=_('Username'), description=_('Username to authenticate with; use a service account if available'), required=True, default='service' ) password = schema.TextLine( title=_('Password'), description=_('Password to confirm the identity of the username; this will be visible!'), required=True, ) class LabCASCollectionGraphGenerator(object): '''A graph generator that produces statements about EDRN's science data collections.''' def __init__(self, context): self.context = context def generateGraph(self): context = aq_inner(self.context) graph = rdflib.Graph() solr = Solr(context.labcasSolrURL + '/datasets', auth=(context.username, context.password)) numDatasets = solr.search(q='*:*', rows=0).hits solr = Solr(context.labcasSolrURL + '/files', auth=(context.username, context.password)) numFiles = solr.search(q='*:*', rows=0).hits solr = Solr(context.labcasSolrURL + '/collections', auth=(context.username, context.password)) results = solr.search(q='*:*', rows=999999) # 😮 TODO This'll fail once we get to a million collections numCollections = results.hits for i in results: collectionID, name, consortia = i.get('id'), i.get('CollectionName', '«unknown»'), i.get('Consortium', []) if not collectionID: _logger.warn('😮 The ``id`` is missing from a LabCAS collection named %s; skipping', name) continue if 'EDRN' not in consortia: _logger.warn('😌 Collection ``%s`` belongs to %r, not EDRN, so skipping it', collectionID, consortia) continue subjectURI = URIRef(_subjectPrefix + collectionID) # ⚠️ Note that we are not URI-escaping anything here, hope that's oK! graph.add((subjectURI, rdflib.RDF.type, URIRef(_typeURI))) graph.add((subjectURI, _titlePredicateURI, Literal(name))) for pi in i.get('LeadPI', []): graph.add((subjectURI, _piPredicateURI, Literal(pi))) for organ in i.get('Organ', []): graph.add((subjectURI, _organPredicateURI, Literal(organ))) for protocolID in i.get('ProtocolId', []): try: protocolID = int(protocolID) graph.add((subjectURI, _protocolPredicateURI, URIRef(f'{_protocolPrefix}{protocolID}'))) except ValueError: _logger.warn('😮 The protocol ID «%s» for collection «%s» looks invalid; I will skip it', protocolID, collectionID) for group in i.get('CollaborativeGroup', []): group = _inconsistentCollaborativeGroupNaming.get(group) if group is not None: graph.add((subjectURI, _collaborativeGroupPredicateURI, Literal(group))) # And summary info graph.add((URIRef(context.labcasSolrURL + '/collections'), _cardinalityPredicateURI, Literal(str(numCollections)))) graph.add((URIRef(context.labcasSolrURL + '/collections'), rdflib.RDF.type, _statsTypeURI)) graph.add((URIRef(context.labcasSolrURL + '/datasets'), _cardinalityPredicateURI, Literal(str(numDatasets)))) graph.add((URIRef(context.labcasSolrURL + '/datasets'), rdflib.RDF.type, _statsTypeURI)) graph.add((URIRef(context.labcasSolrURL + '/files'), _cardinalityPredicateURI, Literal(str(numFiles)))) graph.add((URIRef(context.labcasSolrURL + '/files'), rdflib.RDF.type, _statsTypeURI)) return graph
apache-2.0
kmacinnis/sympy
sympy/core/tests/test_evalf.py
4
16253
from sympy import (Add, ceiling, cos, E, Eq, exp, factorial, fibonacci, floor, Function, GoldenRatio, I, log, Mul, oo, pi, Pow, Rational, sin, sqrt, sstr, Sum, sympify, S, integrate, atan, product) from sympy.core.evalf import complex_accuracy, PrecisionExhausted, scaled_zero from sympy.core.compatibility import long from sympy.mpmath import inf, ninf, nan from sympy.abc import n, x, y from sympy.mpmath.libmp.libmpf import from_float from sympy.utilities.pytest import raises, XFAIL def NS(e, n=15, **options): return sstr(sympify(e).evalf(n, **options), full_prec=True) def test_evalf_helpers(): assert complex_accuracy((from_float(2.0), None, 35, None)) == 35 assert complex_accuracy((from_float(2.0), from_float(10.0), 35, 100)) == 37 assert complex_accuracy( (from_float(2.0), from_float(1000.0), 35, 100)) == 43 assert complex_accuracy((from_float(2.0), from_float(10.0), 100, 35)) == 35 assert complex_accuracy( (from_float(2.0), from_float(1000.0), 100, 35)) == 35 def test_evalf_basic(): assert NS('pi', 15) == '3.14159265358979' assert NS('2/3', 10) == '0.6666666667' assert NS('355/113-pi', 6) == '2.66764e-7' assert NS('16*atan(1/5)-4*atan(1/239)', 15) == '3.14159265358979' def test_cancellation(): assert NS(Add(pi, Rational(1, 10**1000), -pi, evaluate=False), 15, maxn=1200) == '1.00000000000000e-1000' def test_evalf_powers(): assert NS('pi**(10**20)', 10) == '1.339148777e+49714987269413385435' assert NS(pi**(10**100), 10) == ('4.946362032e+4971498726941338543512682882' '9089887365167832438044244613405349992494711208' '95526746555473864642912223') assert NS('2**(1/10**50)', 15) == '1.00000000000000' assert NS('2**(1/10**50)-1', 15) == '6.93147180559945e-51' # Evaluation of Rump's ill-conditioned polynomial def test_evalf_rump(): a = 1335*y**6/4 + x**2*(11*x**2*y**2 - y**6 - 121*y**4 - 2) + 11*y**8/2 + x/(2*y) assert NS(a, 15, subs={x: 77617, y: 33096}) == '-0.827396059946821' def test_evalf_complex(): assert NS('2*sqrt(pi)*I', 10) == '3.544907702*I' assert NS('3+3*I', 15) == '3.00000000000000 + 3.00000000000000*I' assert NS('E+pi*I', 15) == '2.71828182845905 + 3.14159265358979*I' assert NS('pi * (3+4*I)', 15) == '9.42477796076938 + 12.5663706143592*I' assert NS('I*(2+I)', 15) == '-1.00000000000000 + 2.00000000000000*I' @XFAIL def test_evalf_complex_bug(): assert NS('(pi+E*I)*(E+pi*I)', 15) in ('0.e-15 + 17.25866050002*I', '0.e-17 + 17.25866050002*I', '-0.e-17 + 17.25866050002*I') def test_evalf_complex_powers(): assert NS('(E+pi*I)**100000000000000000') == \ '-3.58896782867793e+61850354284995199 + 4.58581754997159e+61850354284995199*I' # XXX: rewrite if a+a*I simplification introduced in sympy #assert NS('(pi + pi*I)**2') in ('0.e-15 + 19.7392088021787*I', '0.e-16 + 19.7392088021787*I') assert NS('(pi + pi*I)**2', chop=True) == '19.7392088021787*I' assert NS( '(pi + 1/10**8 + pi*I)**2') == '6.2831853e-8 + 19.7392088650106*I' assert NS('(pi + 1/10**12 + pi*I)**2') == '6.283e-12 + 19.7392088021850*I' assert NS('(pi + pi*I)**4', chop=True) == '-389.636364136010' assert NS( '(pi + 1/10**8 + pi*I)**4') == '-389.636366616512 + 2.4805021e-6*I' assert NS('(pi + 1/10**12 + pi*I)**4') == '-389.636364136258 + 2.481e-10*I' assert NS( '(10000*pi + 10000*pi*I)**4', chop=True) == '-3.89636364136010e+18' @XFAIL def test_evalf_complex_powers_bug(): assert NS('(pi + pi*I)**4') == '-389.63636413601 + 0.e-14*I' def test_evalf_exponentiation(): assert NS(sqrt(-pi)) == '1.77245385090552*I' assert NS(Pow(pi*I, Rational( 1, 2), evaluate=False)) == '1.25331413731550 + 1.25331413731550*I' assert NS(pi**I) == '0.413292116101594 + 0.910598499212615*I' assert NS(pi**(E + I/3)) == '20.8438653991931 + 8.36343473930031*I' assert NS((pi + I/3)**(E + I/3)) == '17.2442906093590 + 13.6839376767037*I' assert NS(exp(pi)) == '23.1406926327793' assert NS(exp(pi + E*I)) == '-21.0981542849657 + 9.50576358282422*I' assert NS(pi**pi) == '36.4621596072079' assert NS((-pi)**pi) == '-32.9138577418939 - 15.6897116534332*I' assert NS((-pi)**(-pi)) == '-0.0247567717232697 + 0.0118013091280262*I' # An example from Smith, "Multiple Precision Complex Arithmetic and Functions" def test_evalf_complex_cancellation(): A = Rational('63287/100000') B = Rational('52498/100000') C = Rational('69301/100000') D = Rational('83542/100000') F = Rational('2231321613/2500000000') # XXX: the number of returned mantissa digits in the real part could # change with the implementation. What matters is that the returned digits are # correct; those that are showing now are correct. # >>> ((A+B*I)*(C+D*I)).expand() # 64471/10000000000 + 2231321613*I/2500000000 # >>> 2231321613*4 # 8925286452L assert NS((A + B*I)*(C + D*I), 6) == '6.44710e-6 + 0.892529*I' assert NS((A + B*I)*(C + D*I), 10) == '6.447100000e-6 + 0.8925286452*I' assert NS((A + B*I)*( C + D*I) - F*I, 5) in ('6.4471e-6 + 0.e-14*I', '6.4471e-6 - 0.e-14*I') def test_evalf_logs(): assert NS("log(3+pi*I)", 15) == '1.46877619736226 + 0.808448792630022*I' assert NS("log(pi*I)", 15) == '1.14472988584940 + 1.57079632679490*I' def test_evalf_trig(): assert NS('sin(1)', 15) == '0.841470984807897' assert NS('cos(1)', 15) == '0.540302305868140' assert NS('sin(10**-6)', 15) == '9.99999999999833e-7' assert NS('cos(10**-6)', 15) == '0.999999999999500' assert NS('sin(E*10**100)', 15) == '0.409160531722613' # Some input near roots assert NS(sin(exp(pi*sqrt(163))*pi), 15) == '-2.35596641936785e-12' assert NS(sin(pi*10**100 + Rational(7, 10**5), evaluate=False), 15, maxn=120) == \ '6.99999999428333e-5' assert NS(sin(Rational(7, 10**5), evaluate=False), 15) == \ '6.99999999428333e-5' # Check detection of various false identities def test_evalf_near_integers(): # Binet's formula f = lambda n: ((1 + sqrt(5))**n)/(2**n * sqrt(5)) assert NS(f(5000) - fibonacci(5000), 10, maxn=1500) == '5.156009964e-1046' # Some near-integer identities from # http://mathworld.wolfram.com/AlmostInteger.html assert NS('sin(2017*2**(1/5))', 15) == '-1.00000000000000' assert NS('sin(2017*2**(1/5))', 20) == '-0.99999999999999997857' assert NS('1+sin(2017*2**(1/5))', 15) == '2.14322287389390e-17' assert NS('45 - 613*E/37 + 35/991', 15) == '6.03764498766326e-11' def test_evalf_ramanujan(): assert NS(exp(pi*sqrt(163)) - 640320**3 - 744, 10) == '-7.499274028e-13' # A related identity A = 262537412640768744*exp(-pi*sqrt(163)) B = 196884*exp(-2*pi*sqrt(163)) C = 103378831900730205293632*exp(-3*pi*sqrt(163)) assert NS(1 - A - B + C, 10) == '1.613679005e-59' # Input that for various reasons have failed at some point def test_evalf_bugs(): assert NS(sin(1) + exp(-10**10), 10) == NS(sin(1), 10) assert NS(exp(10**10) + sin(1), 10) == NS(exp(10**10), 10) assert NS('log(1+1/10**50)', 20) == '1.0000000000000000000e-50' assert NS('log(10**100,10)', 10) == '100.0000000' assert NS('log(2)', 10) == '0.6931471806' assert NS( '(sin(x)-x)/x**3', 15, subs={x: '1/10**50'}) == '-0.166666666666667' assert NS(sin(1) + Rational( 1, 10**100)*I, 15) == '0.841470984807897 + 1.00000000000000e-100*I' assert x.evalf() == x assert NS((1 + I)**2*I, 6) == '-2.00000' d = {n: ( -1)**Rational(6, 7), y: (-1)**Rational(4, 7), x: (-1)**Rational(2, 7)} assert NS((x*(1 + y*(1 + n))).subs(d).evalf(), 6) == '0.346011 + 0.433884*I' assert NS(((-I - sqrt(2)*I)**2).evalf()) == '-5.82842712474619' assert NS((1 + I)**2*I, 15) == '-2.00000000000000' #1659 (1/2): assert NS(pi.evalf(69) - pi) == '-4.43863937855894e-71' #1659 (2/2): With the bug present, this still only fails if the # terms are in the order given here. This is not generally the case, # because the order depends on the hashes of the terms. assert NS(20 - 5008329267844*n**25 - 477638700*n**37 - 19*n, subs={n: .01}) == '19.8100000000000' assert NS(((x - 1)*((1 - x))**1000).n() ) == '(-x + 1.00000000000000)**1000*(x - 1.00000000000000)' assert NS((-x).n()) == '-x' assert NS((-2*x).n()) == '-2.00000000000000*x' assert NS((-2*x*y).n()) == '-2.00000000000000*x*y' assert cos(x).n(subs={x: 1+I}) == cos(x).subs(x, 1+I).n() #3561. Also NaN != mpmath.nan # In this order: # 0*nan, 0/nan, 0*inf, 0/inf # 0+nan, 0-nan, 0+inf, 0-inf # >>> n = Some Number # n*nan, n/nan, n*inf, n/inf # n+nan, n-nan, n+inf, n-inf assert (0*sin(oo)).n() == S.Zero assert (0/sin(oo)).n() == S.Zero assert (0*E**(oo)).n() == S.NaN assert (0/E**(oo)).n() == S.Zero assert (0+sin(oo)).n() == S.NaN assert (0-sin(oo)).n() == S.NaN assert (0+E**(oo)).n() == S.Infinity assert (0-E**(oo)).n() == S.NegativeInfinity assert (5*sin(oo)).n() == S.NaN assert (5/sin(oo)).n() == S.NaN assert (5*E**(oo)).n() == S.Infinity assert (5/E**(oo)).n() == S.Zero assert (5+sin(oo)).n() == S.NaN assert (5-sin(oo)).n() == S.NaN assert (5+E**(oo)).n() == S.Infinity assert (5-E**(oo)).n() == S.NegativeInfinity def test_evalf_integer_parts(): a = floor(log(8)/log(2) - exp(-1000), evaluate=False) b = floor(log(8)/log(2), evaluate=False) raises(PrecisionExhausted, lambda: a.evalf()) assert a.evalf(chop=True) == 3 assert a.evalf(maxn=500) == 2 assert b.evalf() == 3 # equals, as a fallback, can still fail but it might succeed as here assert ceiling(10*(sin(1)**2 + cos(1)**2)) == 10 assert int(floor(factorial(50)/E, evaluate=False).evalf(70)) == \ long(11188719610782480504630258070757734324011354208865721592720336800) assert int(ceiling(factorial(50)/E, evaluate=False).evalf(70)) == \ long(11188719610782480504630258070757734324011354208865721592720336801) assert int(floor((GoldenRatio**999 / sqrt(5) + Rational(1, 2))) .evalf(1000)) == fibonacci(999) assert int(floor((GoldenRatio**1000 / sqrt(5) + Rational(1, 2))) .evalf(1000)) == fibonacci(1000) def test_evalf_trig_zero_detection(): a = sin(160*pi, evaluate=False) t = a.evalf(maxn=100) assert abs(t) < 1e-100 assert t._prec < 2 assert a.evalf(chop=True) == 0 raises(PrecisionExhausted, lambda: a.evalf(strict=True)) def test_evalf_sum(): assert Sum(n,(n,1,2)).evalf() == 3. assert Sum(n,(n,1,2)).doit().evalf() == 3. # the next test should return instantly assert Sum(1/n,(n,1,2)).evalf() == 1.5 def test_evalf_divergent_series(): raises(ValueError, lambda: Sum(1/n, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum(n/(n**2 + 1), (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum((-1)**n, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum((-1)**n, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum(n**2, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum(2**n, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum((-2)**n, (n, 1, oo)).evalf()) raises(ValueError, lambda: Sum((2*n + 3)/(3*n**2 + 4), (n, 0, oo)).evalf()) raises(ValueError, lambda: Sum((0.5*n**3)/(n**4 + 1), (n, 0, oo)).evalf()) def test_evalf_py_methods(): assert abs(float(pi + 1) - 4.1415926535897932) < 1e-10 assert abs(complex(pi + 1) - 4.1415926535897932) < 1e-10 assert abs( complex(pi + E*I) - (3.1415926535897931 + 2.7182818284590451j)) < 1e-10 raises(TypeError, lambda: float(pi + x)) def test_evalf_power_subs_bugs(): assert (x**2).evalf(subs={x: 0}) == 0 assert sqrt(x).evalf(subs={x: 0}) == 0 assert (x**Rational(2, 3)).evalf(subs={x: 0}) == 0 assert (x**x).evalf(subs={x: 0}) == 1 assert (3**x).evalf(subs={x: 0}) == 1 assert exp(x).evalf(subs={x: 0}) == 1 assert ((2 + I)**x).evalf(subs={x: 0}) == 1 assert (0**x).evalf(subs={x: 0}) == 1 def test_evalf_arguments(): raises(TypeError, lambda: pi.evalf(method="garbage")) def test_implemented_function_evalf(): from sympy.utilities.lambdify import implemented_function f = Function('f') f = implemented_function(f, lambda x: x + 1) assert str(f(x)) == "f(x)" assert str(f(2)) == "f(2)" assert f(2).evalf() == 3 assert f(x).evalf() == f(x) del f._imp_ # XXX: due to caching _imp_ would influence all other tests def test_evaluate_false(): for no in [0, False, None]: assert Add(3, 2, evaluate=no).is_Add assert Mul(3, 2, evaluate=no).is_Mul assert Pow(3, 2, evaluate=no).is_Pow assert Pow(y, 2, evaluate=True) - Pow(y, 2, evaluate=True) == 0 def test_evalf_relational(): assert Eq(x/5, y/10).evalf() == Eq(0.2*x, 0.1*y) def test_issue_2387(): assert not cos(sqrt(0.5 + I)).n().is_Function def test_issue_2387_bug(): from sympy import I, Expr assert abs(Expr._from_mpmath(I._to_mpmath(15), 15) - I) < 1.0e-15 def test_bugs(): from sympy import polar_lift, re assert abs(re((1 + I)**2)) < 1e-15 # anything that evalf's to 0 will do in place of polar_lift assert abs(polar_lift(0)).n() == 0 def test_subs_bugs(): from sympy import besseli assert NS('besseli(-x, y) - besseli(x, y)', subs={x: 3.5, y: 20.0}) == \ '-4.92535585957223e-10' assert NS('Piecewise((x, x>0)) + Piecewise((1-x, x>0))', subs={x: 0.1}) == \ '1.00000000000000' def test_issue_1857_2105(): # 1857 v = S('''(-27*12**(1/3)*sqrt(31)*I + 27*2**(2/3)*3**(1/3)*sqrt(31)*I)/(-2511*2**(2/3)*3**(1/3) + (29*18**(1/3) + 9*2**(1/3)*3**(2/3)*sqrt(31)*I + 87*2**(1/3)*3**(1/6)*I)**2)''') assert NS(v, 1) == '0.e-118 - 0.e-118*I' # 2105 v = S('''-(357587765856 + 18873261792*249**(1/2) + 56619785376*I*83**(1/2) + 108755765856*I*3**(1/2) + 41281887168*6**(1/3)*(1422 + 54*249**(1/2))**(1/3) - 1239810624*6**(1/3)*249**(1/2)*(1422 + 54*249**(1/2))**(1/3) - 3110400000*I*6**(1/3)*83**(1/2)*(1422 + 54*249**(1/2))**(1/3) + 13478400000*I*3**(1/2)*6**(1/3)*(1422 + 54*249**(1/2))**(1/3) + 1274950152*6**(2/3)*(1422 + 54*249**(1/2))**(2/3) + 32347944*6**(2/3)*249**(1/2)*(1422 + 54*249**(1/2))**(2/3) - 1758790152*I*3**(1/2)*6**(2/3)*(1422 + 54*249**(1/2))**(2/3) - 304403832*I*6**(2/3)*83**(1/2)*(1422 + 4*249**(1/2))**(2/3))/(175732658352 + (1106028 + 25596*249**(1/2) + 76788*I*83**(1/2))**2)''') assert NS(v, 5) == '0.077284 + 1.1104*I' assert NS(v, 1) == '0.08 + 1.*I' def test_old_docstring(): a = (E + pi*I)*(E - pi*I) assert NS(a) == '17.2586605000200' assert a.n() == 17.25866050002001 def test_issue_1707(): assert integrate(atan(x)**2, (x, -1, 1)).evalf().round(1) == 0.5 assert atan(0, evaluate=False).n() == 0 def test_evalf_mul(): # sympy should not try to expand this; it should be handled term-wise # in evalf through mpmath assert NS(product(1 + sqrt(n)*I, (n, 1, 500)), 1) == '5.e+567 + 2.e+568*I' def test_scaled_zero(): a, b = (([0], 1, 100, 1), -1) assert scaled_zero(100) == (a, b) assert scaled_zero(a) == (0, 1, 100, 1) a, b = (([1], 1, 100, 1), -1) assert scaled_zero(100, -1) == (a, b) assert scaled_zero(a) == (1, 1, 100, 1) raises(ValueError, lambda: scaled_zero(scaled_zero(100))) raises(ValueError, lambda: scaled_zero(100, 2)) raises(ValueError, lambda: scaled_zero(100, 0)) raises(ValueError, lambda: scaled_zero((1, 5, 1, 3))) def test_chop_value(): for i in range(-27, 28): assert (Pow(10, i)*2).n(chop=10**i) and not (Pow(10, i)).n(chop=10**i) def test_infinities(): assert oo.evalf(chop=True) == inf assert (-oo).evalf(chop=True) == ninf def test_to_mpmath(): assert sqrt(3)._to_mpmath(20)._mpf_ == (0, long(908093), -19, 20) assert S(3.2)._to_mpmath(20)._mpf_ == (0, long(838861), -18, 20) def test_issue_3533_evalf(): add = (-100000*sqrt(2500000001) + 5000000001) assert add.n() == 9.999999998e-11 assert (add*add).n() == 9.999999996e-21
bsd-3-clause
yaojingwu1992/XlsxWriter
xlsxwriter/test/comparison/test_chart_data_labels08.py
8
1708
############################################################################### # # Tests for XlsxWriter. # # Copyright (c), 2013-2015, John McNamara, jmcnamara@cpan.org # from ..excel_comparsion_test import ExcelComparisonTest from ...workbook import Workbook class TestCompareXLSXFiles(ExcelComparisonTest): """ Test file created by XlsxWriter against a file created by Excel. """ def setUp(self): self.maxDiff = None filename = 'chart_data_labels08.xlsx' test_dir = 'xlsxwriter/test/comparison/' self.got_filename = test_dir + '_test_' + filename self.exp_filename = test_dir + 'xlsx_files/' + filename self.ignore_files = [] self.ignore_elements = {} def test_create_file(self): """Test the creation of a simple XlsxWriter file.""" workbook = Workbook(self.got_filename) worksheet = workbook.add_worksheet() chart = workbook.add_chart({'type': 'scatter'}) chart.axis_ids = [45740416, 45705856] data = [ [1, 2, 3, 4, 5], [2, 4, 6, 8, 10], [3, 6, 9, 12, 15], ] worksheet.write_column('A1', data[0]) worksheet.write_column('B1', data[1]) worksheet.write_column('C1', data[2]) chart.add_series({ 'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$B$1:$B$5', 'data_labels': {'value': 1, 'position': 'right'}, }) chart.add_series({ 'categories': '=Sheet1!$A$1:$A$5', 'values': '=Sheet1!$C$1:$C$5', }) worksheet.insert_chart('E9', chart) workbook.close() self.assertExcelEqual()
bsd-2-clause
tombstone/models
official/nlp/modeling/losses/weighted_sparse_categorical_crossentropy_test.py
1
8816
# Copyright 2019 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for masked LM loss.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np import tensorflow as tf from tensorflow.python.keras import keras_parameterized # pylint: disable=g-direct-tensorflow-import from official.nlp.modeling import layers from official.nlp.modeling import networks from official.nlp.modeling.losses import weighted_sparse_categorical_crossentropy @keras_parameterized.run_all_keras_modes class ClassificationLossTest(keras_parameterized.TestCase): def create_lm_model(self, vocab_size, sequence_length, hidden_size, num_predictions, output="predictions"): # First, create a transformer stack that we can use to get the LM's # vocabulary weight. xformer_stack = networks.TransformerEncoder( vocab_size=vocab_size, num_layers=1, sequence_length=sequence_length, hidden_size=hidden_size, num_attention_heads=4, ) word_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) mask = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) type_ids = tf.keras.Input(shape=(sequence_length,), dtype=tf.int32) _ = xformer_stack([word_ids, mask, type_ids]) # Create a maskedLM from the transformer stack. test_layer = layers.MaskedLM( embedding_table=xformer_stack.get_embedding_table(), output=output) # Create a model from the masked LM layer. lm_input_tensor = tf.keras.Input(shape=(sequence_length, hidden_size)) masked_lm_positions = tf.keras.Input( shape=(num_predictions,), dtype=tf.int32) output = test_layer(lm_input_tensor, masked_positions=masked_lm_positions) return tf.keras.Model([lm_input_tensor, masked_lm_positions], output) def test_loss_3d_input(self): """Test overall loss with a 3-dimensional input, from a masked LM.""" vocab_size = 100 sequence_length = 32 hidden_size = 64 num_predictions = 21 model = self.create_lm_model( vocab_size=vocab_size, sequence_length=sequence_length, hidden_size=hidden_size, num_predictions=num_predictions) # Get the output of the masked LM. batch_size = 3 lm_input_data = 10 * np.random.random_sample( (batch_size, sequence_length, hidden_size)) masked_position_data = np.random.randint( 2, size=(batch_size, num_predictions)) output_data = model.predict([lm_input_data, masked_position_data]) # Calculate loss. labels = np.random.randint(vocab_size, size=(batch_size, num_predictions)) weights = np.random.randint(2, size=(batch_size, num_predictions)) per_example_loss_data = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights) # Total loss data should have one value, and that value shouldn't be zero # in this case (as we're using random data). expected_shape = [] # Scalar self.assertEqual(expected_shape, per_example_loss_data.shape.as_list()) self.assertNotAllClose( tf.zeros_like(per_example_loss_data), per_example_loss_data) def test_loss_weights_3d_input(self): """Test masked loss with a 3-dimensional input, from a masked LM.""" vocab_size = 100 sequence_length = 32 hidden_size = 64 num_predictions = 21 model = self.create_lm_model( vocab_size=vocab_size, sequence_length=sequence_length, hidden_size=hidden_size, num_predictions=num_predictions) # Get the output of the masked LM. batch_size = 3 lm_input_data = 10 * np.random.random_sample( (batch_size, sequence_length, hidden_size)) masked_position_data = np.random.randint( 2, size=(batch_size, num_predictions)) output_data = model.predict([lm_input_data, masked_position_data]) # Calculate a fully masked weight tensor. This should give a loss of zero. labels = np.random.randint(vocab_size, size=(batch_size, num_predictions)) null_weights = np.zeros((batch_size, num_predictions)) weighted_loss_data = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=null_weights) # Because the tensor is fully masked, the loss should be 0. self.assertAllClose(0, weighted_loss_data) def test_mismatched_predictions_and_labels_ranks_squeezes(self): """Test that the loss asserts when rank(predictions)-1 != rank(labels).""" batch_size = 3 output_data = np.random.random_sample((batch_size, 10)) labels = np.random.randint(10, size=(batch_size, 1)) # All that this test tests is that the squeeze is successful. _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels) def test_mismatched_weights_and_labels_ranks_fail(self): """Test that the loss asserts when rank(predictions) != rank(labels).""" batch_size = 3 output_data = np.random.random_sample((batch_size, 10, 15)) labels = np.random.randint(10, size=(batch_size, 10)) weights = np.random.randint(2, size=(batch_size)) with self.assertRaisesRegex(RuntimeError, ".*of the same rank.*"): _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights) def test_tf_tensor_inputs(self): """Test that tf.Tensors can be used as inputs to the loss function.""" batch_size = 3 output_data = tf.convert_to_tensor( np.random.random_sample((batch_size, 10, 15))) labels = tf.convert_to_tensor(np.random.randint(10, size=(batch_size, 10))) weights = tf.convert_to_tensor(np.random.randint(2, size=(batch_size, 10))) # We're not trying to validate numerical correctness, just ensure that # we can in fact pass tensors to these functions without causing runtime # errors from the shape checking code. _ = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights) def test_legacy_lm_loss_compatibility(self): """Test to validate computational correctness during refactors.""" # This is the empirical output of a masked LM with the following parameters: # batch_size = 3 # vocab_size = 5 # sequence_length = 4 # num_predictions = 2 output_data = np.array( [[[-2.5286622, -1.0963473, -1.4925185, -2.4451098, -1.2923571], [-2.7117882, -1.1205841, -4.02187, -0.9966936, -1.5119683]], [[-2.5379114, -0.82479054, -2.287932, -1.3747153, -2.053741], [-2.5379114, -0.82479054, -2.287932, -1.3747153, -2.053741]], [[-2.7760355, -1.8219438, -3.0924666, -1.0779881, -0.9407509], [-2.7760355, -1.8219438, -3.0924666, -1.0779881, -0.9407509]]]) labels = np.array([[4, 0], [2, 2], [2, 1]]) # Validate that overall loss calculations are the same. weights = np.array([[1, 0], [0, 0], [0, 0]]) loss_data = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights, from_logits=True) expected_loss_data = 1.2923441 self.assertAllClose(expected_loss_data, loss_data, rtol=1e-3) def test_legacy_classification_loss_compatibility(self): """Test to validate computational correctness during refactors.""" # This is the empirical output of a classifier with the following params: # batch_size = 2 # num_classes = 3 output_data = np.array([[-1.6094601e-03, -1.0966038e+01, -6.4434357e+00], [-1.6975292e-03, -6.4009643e+00, -1.0226612e+01]]) labels = np.array([2, 1]) # Validate that overall loss calculations are the same. weights = None loss_data = weighted_sparse_categorical_crossentropy.loss( predictions=output_data, labels=labels, weights=weights, from_logits=True) expected_loss_data = 6.4222 self.assertAllClose(expected_loss_data, loss_data, rtol=1e-3) if __name__ == "__main__": tf.test.main()
apache-2.0
lrowe/selenium
py/selenium/webdriver/firefox/extension_connection.py
66
2846
# Licensed to the Software Freedom Conservancy (SFC) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The SFC licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. import logging import time from selenium.webdriver.common.desired_capabilities import DesiredCapabilities from selenium.webdriver.common import utils from selenium.webdriver.remote.command import Command from selenium.webdriver.remote.remote_connection import RemoteConnection from selenium.webdriver.firefox.firefox_binary import FirefoxBinary LOGGER = logging.getLogger(__name__) PORT = 0 # HOST = None _URL = "" class ExtensionConnection(RemoteConnection): def __init__(self, host, firefox_profile, firefox_binary=None, timeout=30): self.profile = firefox_profile self.binary = firefox_binary HOST = host if self.binary is None: self.binary = FirefoxBinary() if HOST is None: HOST = "127.0.0.1" PORT = utils.free_port() self.profile.port = PORT self.profile.update_preferences() self.profile.add_extension() self.binary.launch_browser(self.profile) _URL = "http://%s:%d/hub" % (HOST, PORT) RemoteConnection.__init__( self, _URL, keep_alive=True) def quit(self, sessionId=None): self.execute(Command.QUIT, {'sessionId':sessionId}) while self.is_connectable(): LOGGER.info("waiting to quit") time.sleep(1) def connect(self): """Connects to the extension and retrieves the session id.""" return self.execute(Command.NEW_SESSION, {'desiredCapabilities': DesiredCapabilities.FIREFOX}) @classmethod def connect_and_quit(self): """Connects to an running browser and quit immediately.""" self._request('%s/extensions/firefox/quit' % _URL) @classmethod def is_connectable(self): """Trys to connect to the extension but do not retrieve context.""" utils.is_connectable(self.profile.port) class ExtensionConnectionError(Exception): """An internal error occurred int the extension. Might be caused by bad input or bugs in webdriver """ pass
apache-2.0
ryfeus/lambda-packs
Opencv_pil/source36/PIL/IcnsImagePlugin.py
2
11843
# # The Python Imaging Library. # $Id$ # # macOS icns file decoder, based on icns.py by Bob Ippolito. # # history: # 2004-10-09 fl Turned into a PIL plugin; removed 2.3 dependencies. # # Copyright (c) 2004 by Bob Ippolito. # Copyright (c) 2004 by Secret Labs. # Copyright (c) 2004 by Fredrik Lundh. # Copyright (c) 2014 by Alastair Houghton. # # See the README file for information on usage and redistribution. # from PIL import Image, ImageFile, PngImagePlugin from PIL._binary import i8 import io import os import shutil import struct import sys import tempfile enable_jpeg2k = hasattr(Image.core, 'jp2klib_version') if enable_jpeg2k: from PIL import Jpeg2KImagePlugin HEADERSIZE = 8 def nextheader(fobj): return struct.unpack('>4sI', fobj.read(HEADERSIZE)) def read_32t(fobj, start_length, size): # The 128x128 icon seems to have an extra header for some reason. (start, length) = start_length fobj.seek(start) sig = fobj.read(4) if sig != b'\x00\x00\x00\x00': raise SyntaxError('Unknown signature, expecting 0x00000000') return read_32(fobj, (start + 4, length - 4), size) def read_32(fobj, start_length, size): """ Read a 32bit RGB icon resource. Seems to be either uncompressed or an RLE packbits-like scheme. """ (start, length) = start_length fobj.seek(start) pixel_size = (size[0] * size[2], size[1] * size[2]) sizesq = pixel_size[0] * pixel_size[1] if length == sizesq * 3: # uncompressed ("RGBRGBGB") indata = fobj.read(length) im = Image.frombuffer("RGB", pixel_size, indata, "raw", "RGB", 0, 1) else: # decode image im = Image.new("RGB", pixel_size, None) for band_ix in range(3): data = [] bytesleft = sizesq while bytesleft > 0: byte = fobj.read(1) if not byte: break byte = i8(byte) if byte & 0x80: blocksize = byte - 125 byte = fobj.read(1) for i in range(blocksize): data.append(byte) else: blocksize = byte + 1 data.append(fobj.read(blocksize)) bytesleft -= blocksize if bytesleft <= 0: break if bytesleft != 0: raise SyntaxError( "Error reading channel [%r left]" % bytesleft ) band = Image.frombuffer( "L", pixel_size, b"".join(data), "raw", "L", 0, 1 ) im.im.putband(band.im, band_ix) return {"RGB": im} def read_mk(fobj, start_length, size): # Alpha masks seem to be uncompressed start = start_length[0] fobj.seek(start) pixel_size = (size[0] * size[2], size[1] * size[2]) sizesq = pixel_size[0] * pixel_size[1] band = Image.frombuffer( "L", pixel_size, fobj.read(sizesq), "raw", "L", 0, 1 ) return {"A": band} def read_png_or_jpeg2000(fobj, start_length, size): (start, length) = start_length fobj.seek(start) sig = fobj.read(12) if sig[:8] == b'\x89PNG\x0d\x0a\x1a\x0a': fobj.seek(start) im = PngImagePlugin.PngImageFile(fobj) return {"RGBA": im} elif sig[:4] == b'\xff\x4f\xff\x51' \ or sig[:4] == b'\x0d\x0a\x87\x0a' \ or sig == b'\x00\x00\x00\x0cjP \x0d\x0a\x87\x0a': if not enable_jpeg2k: raise ValueError('Unsupported icon subimage format (rebuild PIL ' 'with JPEG 2000 support to fix this)') # j2k, jpc or j2c fobj.seek(start) jp2kstream = fobj.read(length) f = io.BytesIO(jp2kstream) im = Jpeg2KImagePlugin.Jpeg2KImageFile(f) if im.mode != 'RGBA': im = im.convert('RGBA') return {"RGBA": im} else: raise ValueError('Unsupported icon subimage format') class IcnsFile(object): SIZES = { (512, 512, 2): [ (b'ic10', read_png_or_jpeg2000), ], (512, 512, 1): [ (b'ic09', read_png_or_jpeg2000), ], (256, 256, 2): [ (b'ic14', read_png_or_jpeg2000), ], (256, 256, 1): [ (b'ic08', read_png_or_jpeg2000), ], (128, 128, 2): [ (b'ic13', read_png_or_jpeg2000), ], (128, 128, 1): [ (b'ic07', read_png_or_jpeg2000), (b'it32', read_32t), (b't8mk', read_mk), ], (64, 64, 1): [ (b'icp6', read_png_or_jpeg2000), ], (32, 32, 2): [ (b'ic12', read_png_or_jpeg2000), ], (48, 48, 1): [ (b'ih32', read_32), (b'h8mk', read_mk), ], (32, 32, 1): [ (b'icp5', read_png_or_jpeg2000), (b'il32', read_32), (b'l8mk', read_mk), ], (16, 16, 2): [ (b'ic11', read_png_or_jpeg2000), ], (16, 16, 1): [ (b'icp4', read_png_or_jpeg2000), (b'is32', read_32), (b's8mk', read_mk), ], } def __init__(self, fobj): """ fobj is a file-like object as an icns resource """ # signature : (start, length) self.dct = dct = {} self.fobj = fobj sig, filesize = nextheader(fobj) if sig != b'icns': raise SyntaxError('not an icns file') i = HEADERSIZE while i < filesize: sig, blocksize = nextheader(fobj) if blocksize <= 0: raise SyntaxError('invalid block header') i += HEADERSIZE blocksize -= HEADERSIZE dct[sig] = (i, blocksize) fobj.seek(blocksize, 1) i += blocksize def itersizes(self): sizes = [] for size, fmts in self.SIZES.items(): for (fmt, reader) in fmts: if fmt in self.dct: sizes.append(size) break return sizes def bestsize(self): sizes = self.itersizes() if not sizes: raise SyntaxError("No 32bit icon resources found") return max(sizes) def dataforsize(self, size): """ Get an icon resource as {channel: array}. Note that the arrays are bottom-up like windows bitmaps and will likely need to be flipped or transposed in some way. """ dct = {} for code, reader in self.SIZES[size]: desc = self.dct.get(code) if desc is not None: dct.update(reader(self.fobj, desc, size)) return dct def getimage(self, size=None): if size is None: size = self.bestsize() if len(size) == 2: size = (size[0], size[1], 1) channels = self.dataforsize(size) im = channels.get('RGBA', None) if im: return im im = channels.get("RGB").copy() try: im.putalpha(channels["A"]) except KeyError: pass return im ## # Image plugin for Mac OS icons. class IcnsImageFile(ImageFile.ImageFile): """ PIL image support for Mac OS .icns files. Chooses the best resolution, but will possibly load a different size image if you mutate the size attribute before calling 'load'. The info dictionary has a key 'sizes' that is a list of sizes that the icns file has. """ format = "ICNS" format_description = "Mac OS icns resource" def _open(self): self.icns = IcnsFile(self.fp) self.mode = 'RGBA' self.info['sizes'] = self.icns.itersizes() self.best_size = self.icns.bestsize() self.size = (self.best_size[0] * self.best_size[2], self.best_size[1] * self.best_size[2]) # Just use this to see if it's loaded or not yet. self.tile = ('',) @property def size(self): return self._size @size.setter def size(self, value): info_size = value if info_size not in self.info['sizes'] and len(info_size) == 2: info_size = (info_size[0], info_size[1], 1) if info_size not in self.info['sizes'] and len(info_size) == 3 and \ info_size[2] == 1: simple_sizes = [(size[0] * size[2], size[1] * size[2]) for size in self.info['sizes']] if value in simple_sizes: info_size = self.info['sizes'][simple_sizes.index(value)] if info_size not in self.info['sizes']: raise ValueError( "This is not one of the allowed sizes of this image") self._size = value def load(self): if len(self.size) == 3: self.best_size = self.size self.size = (self.best_size[0] * self.best_size[2], self.best_size[1] * self.best_size[2]) Image.Image.load(self) if not self.tile: return self.load_prepare() # This is likely NOT the best way to do it, but whatever. im = self.icns.getimage(self.best_size) # If this is a PNG or JPEG 2000, it won't be loaded yet im.load() self.im = im.im self.mode = im.mode self.size = im.size if self._exclusive_fp: self.fp.close() self.fp = None self.icns = None self.tile = () self.load_end() def _save(im, fp, filename): """ Saves the image as a series of PNG files, that are then converted to a .icns file using the macOS command line utility 'iconutil'. macOS only. """ if hasattr(fp, "flush"): fp.flush() # create the temporary set of pngs iconset = tempfile.mkdtemp('.iconset') provided_images = {im.width: im for im in im.encoderinfo.get("append_images", [])} last_w = None second_path = None for w in [16, 32, 128, 256, 512]: prefix = 'icon_{}x{}'.format(w, w) first_path = os.path.join(iconset, prefix+'.png') if last_w == w: shutil.copyfile(second_path, first_path) else: im_w = provided_images.get(w, im.resize((w, w), Image.LANCZOS)) im_w.save(first_path) second_path = os.path.join(iconset, prefix+'@2x.png') im_w2 = provided_images.get(w*2, im.resize((w*2, w*2), Image.LANCZOS)) im_w2.save(second_path) last_w = w*2 # iconutil -c icns -o {} {} from subprocess import Popen, PIPE, CalledProcessError convert_cmd = ["iconutil", "-c", "icns", "-o", filename, iconset] with open(os.devnull, 'wb') as devnull: convert_proc = Popen(convert_cmd, stdout=PIPE, stderr=devnull) convert_proc.stdout.close() retcode = convert_proc.wait() # remove the temporary files shutil.rmtree(iconset) if retcode: raise CalledProcessError(retcode, convert_cmd) Image.register_open(IcnsImageFile.format, IcnsImageFile, lambda x: x[:4] == b'icns') Image.register_extension(IcnsImageFile.format, '.icns') if sys.platform == 'darwin': Image.register_save(IcnsImageFile.format, _save) Image.register_mime(IcnsImageFile.format, "image/icns") if __name__ == '__main__': if len(sys.argv) < 2: print("Syntax: python IcnsImagePlugin.py [file]") sys.exit() imf = IcnsImageFile(open(sys.argv[1], 'rb')) for size in imf.info['sizes']: imf.size = size imf.load() im = imf.im im.save('out-%s-%s-%s.png' % size) im = Image.open(sys.argv[1]) im.save("out.png") if sys.platform == 'windows': os.startfile("out.png")
mit
CodyKochmann/sync_lab
simple_notepad_server/cherrypy/test/webtest.py
20
20601
"""Extensions to unittest for web frameworks. Use the WebCase.getPage method to request a page from your HTTP server. Framework Integration ===================== If you have control over your server process, you can handle errors in the server-side of the HTTP conversation a bit better. You must run both the client (your WebCase tests) and the server in the same process (but in separate threads, obviously). When an error occurs in the framework, call server_error. It will print the traceback to stdout, and keep any assertions you have from running (the assumption is that, if the server errors, the page output will not be of further significance to your tests). """ import pprint import re import socket import sys import time import traceback import types from unittest import * from unittest import _TextTestResult from cherrypy._cpcompat import basestring, ntob, py3k, HTTPConnection from cherrypy._cpcompat import HTTPSConnection, unicodestr def interface(host): """Return an IP address for a client connection given the server host. If the server is listening on '0.0.0.0' (INADDR_ANY) or '::' (IN6ADDR_ANY), this will return the proper localhost.""" if host == '0.0.0.0': # INADDR_ANY, which should respond on localhost. return "127.0.0.1" if host == '::': # IN6ADDR_ANY, which should respond on localhost. return "::1" return host class TerseTestResult(_TextTestResult): def printErrors(self): # Overridden to avoid unnecessary empty line if self.errors or self.failures: if self.dots or self.showAll: self.stream.writeln() self.printErrorList('ERROR', self.errors) self.printErrorList('FAIL', self.failures) class TerseTestRunner(TextTestRunner): """A test runner class that displays results in textual form.""" def _makeResult(self): return TerseTestResult(self.stream, self.descriptions, self.verbosity) def run(self, test): "Run the given test case or test suite." # Overridden to remove unnecessary empty lines and separators result = self._makeResult() test(result) result.printErrors() if not result.wasSuccessful(): self.stream.write("FAILED (") failed, errored = list(map(len, (result.failures, result.errors))) if failed: self.stream.write("failures=%d" % failed) if errored: if failed: self.stream.write(", ") self.stream.write("errors=%d" % errored) self.stream.writeln(")") return result class ReloadingTestLoader(TestLoader): def loadTestsFromName(self, name, module=None): """Return a suite of all tests cases given a string specifier. The name may resolve either to a module, a test case class, a test method within a test case class, or a callable object which returns a TestCase or TestSuite instance. The method optionally resolves the names relative to a given module. """ parts = name.split('.') unused_parts = [] if module is None: if not parts: raise ValueError("incomplete test name: %s" % name) else: parts_copy = parts[:] while parts_copy: target = ".".join(parts_copy) if target in sys.modules: module = reload(sys.modules[target]) parts = unused_parts break else: try: module = __import__(target) parts = unused_parts break except ImportError: unused_parts.insert(0, parts_copy[-1]) del parts_copy[-1] if not parts_copy: raise parts = parts[1:] obj = module for part in parts: obj = getattr(obj, part) if isinstance(obj, types.ModuleType): return self.loadTestsFromModule(obj) elif (((py3k and isinstance(obj, type)) or isinstance(obj, (type, types.ClassType))) and issubclass(obj, TestCase)): return self.loadTestsFromTestCase(obj) elif isinstance(obj, types.UnboundMethodType): if py3k: return obj.__self__.__class__(obj.__name__) else: return obj.im_class(obj.__name__) elif hasattr(obj, '__call__'): test = obj() if not isinstance(test, TestCase) and \ not isinstance(test, TestSuite): raise ValueError("calling %s returned %s, " "not a test" % (obj, test)) return test else: raise ValueError("do not know how to make test from: %s" % obj) try: # Jython support if sys.platform[:4] == 'java': def getchar(): # Hopefully this is enough return sys.stdin.read(1) else: # On Windows, msvcrt.getch reads a single char without output. import msvcrt def getchar(): return msvcrt.getch() except ImportError: # Unix getchr import tty import termios def getchar(): fd = sys.stdin.fileno() old_settings = termios.tcgetattr(fd) try: tty.setraw(sys.stdin.fileno()) ch = sys.stdin.read(1) finally: termios.tcsetattr(fd, termios.TCSADRAIN, old_settings) return ch class WebCase(TestCase): HOST = "127.0.0.1" PORT = 8000 HTTP_CONN = HTTPConnection PROTOCOL = "HTTP/1.1" scheme = "http" url = None status = None headers = None body = None encoding = 'utf-8' time = None def get_conn(self, auto_open=False): """Return a connection to our HTTP server.""" if self.scheme == "https": cls = HTTPSConnection else: cls = HTTPConnection conn = cls(self.interface(), self.PORT) # Automatically re-connect? conn.auto_open = auto_open conn.connect() return conn def set_persistent(self, on=True, auto_open=False): """Make our HTTP_CONN persistent (or not). If the 'on' argument is True (the default), then self.HTTP_CONN will be set to an instance of HTTPConnection (or HTTPS if self.scheme is "https"). This will then persist across requests. We only allow for a single open connection, so if you call this and we currently have an open connection, it will be closed. """ try: self.HTTP_CONN.close() except (TypeError, AttributeError): pass if on: self.HTTP_CONN = self.get_conn(auto_open=auto_open) else: if self.scheme == "https": self.HTTP_CONN = HTTPSConnection else: self.HTTP_CONN = HTTPConnection def _get_persistent(self): return hasattr(self.HTTP_CONN, "__class__") def _set_persistent(self, on): self.set_persistent(on) persistent = property(_get_persistent, _set_persistent) def interface(self): """Return an IP address for a client connection. If the server is listening on '0.0.0.0' (INADDR_ANY) or '::' (IN6ADDR_ANY), this will return the proper localhost.""" return interface(self.HOST) def getPage(self, url, headers=None, method="GET", body=None, protocol=None): """Open the url with debugging support. Return status, headers, body. """ ServerError.on = False if isinstance(url, unicodestr): url = url.encode('utf-8') if isinstance(body, unicodestr): body = body.encode('utf-8') self.url = url self.time = None start = time.time() result = openURL(url, headers, method, body, self.HOST, self.PORT, self.HTTP_CONN, protocol or self.PROTOCOL) self.time = time.time() - start self.status, self.headers, self.body = result # Build a list of request cookies from the previous response cookies. self.cookies = [('Cookie', v) for k, v in self.headers if k.lower() == 'set-cookie'] if ServerError.on: raise ServerError() return result interactive = True console_height = 30 def _handlewebError(self, msg): print("") print(" ERROR: %s" % msg) if not self.interactive: raise self.failureException(msg) p = (" Show: " "[B]ody [H]eaders [S]tatus [U]RL; " "[I]gnore, [R]aise, or sys.e[X]it >> ") sys.stdout.write(p) sys.stdout.flush() while True: i = getchar().upper() if not isinstance(i, type("")): i = i.decode('ascii') if i not in "BHSUIRX": continue print(i.upper()) # Also prints new line if i == "B": for x, line in enumerate(self.body.splitlines()): if (x + 1) % self.console_height == 0: # The \r and comma should make the next line overwrite sys.stdout.write("<-- More -->\r") m = getchar().lower() # Erase our "More" prompt sys.stdout.write(" \r") if m == "q": break print(line) elif i == "H": pprint.pprint(self.headers) elif i == "S": print(self.status) elif i == "U": print(self.url) elif i == "I": # return without raising the normal exception return elif i == "R": raise self.failureException(msg) elif i == "X": self.exit() sys.stdout.write(p) sys.stdout.flush() def exit(self): sys.exit() def assertStatus(self, status, msg=None): """Fail if self.status != status.""" if isinstance(status, basestring): if not self.status == status: if msg is None: msg = 'Status (%r) != %r' % (self.status, status) self._handlewebError(msg) elif isinstance(status, int): code = int(self.status[:3]) if code != status: if msg is None: msg = 'Status (%r) != %r' % (self.status, status) self._handlewebError(msg) else: # status is a tuple or list. match = False for s in status: if isinstance(s, basestring): if self.status == s: match = True break elif int(self.status[:3]) == s: match = True break if not match: if msg is None: msg = 'Status (%r) not in %r' % (self.status, status) self._handlewebError(msg) def assertHeader(self, key, value=None, msg=None): """Fail if (key, [value]) not in self.headers.""" lowkey = key.lower() for k, v in self.headers: if k.lower() == lowkey: if value is None or str(value) == v: return v if msg is None: if value is None: msg = '%r not in headers' % key else: msg = '%r:%r not in headers' % (key, value) self._handlewebError(msg) def assertHeaderIn(self, key, values, msg=None): """Fail if header indicated by key doesn't have one of the values.""" lowkey = key.lower() for k, v in self.headers: if k.lower() == lowkey: matches = [value for value in values if str(value) == v] if matches: return matches if msg is None: msg = '%(key)r not in %(values)r' % vars() self._handlewebError(msg) def assertHeaderItemValue(self, key, value, msg=None): """Fail if the header does not contain the specified value""" actual_value = self.assertHeader(key, msg=msg) header_values = map(str.strip, actual_value.split(',')) if value in header_values: return value if msg is None: msg = "%r not in %r" % (value, header_values) self._handlewebError(msg) def assertNoHeader(self, key, msg=None): """Fail if key in self.headers.""" lowkey = key.lower() matches = [k for k, v in self.headers if k.lower() == lowkey] if matches: if msg is None: msg = '%r in headers' % key self._handlewebError(msg) def assertBody(self, value, msg=None): """Fail if value != self.body.""" if isinstance(value, unicodestr): value = value.encode(self.encoding) if value != self.body: if msg is None: msg = 'expected body:\n%r\n\nactual body:\n%r' % ( value, self.body) self._handlewebError(msg) def assertInBody(self, value, msg=None): """Fail if value not in self.body.""" if isinstance(value, unicodestr): value = value.encode(self.encoding) if value not in self.body: if msg is None: msg = '%r not in body: %s' % (value, self.body) self._handlewebError(msg) def assertNotInBody(self, value, msg=None): """Fail if value in self.body.""" if isinstance(value, unicodestr): value = value.encode(self.encoding) if value in self.body: if msg is None: msg = '%r found in body' % value self._handlewebError(msg) def assertMatchesBody(self, pattern, msg=None, flags=0): """Fail if value (a regex pattern) is not in self.body.""" if isinstance(pattern, unicodestr): pattern = pattern.encode(self.encoding) if re.search(pattern, self.body, flags) is None: if msg is None: msg = 'No match for %r in body' % pattern self._handlewebError(msg) methods_with_bodies = ("POST", "PUT") def cleanHeaders(headers, method, body, host, port): """Return request headers, with required headers added (if missing).""" if headers is None: headers = [] # Add the required Host request header if not present. # [This specifies the host:port of the server, not the client.] found = False for k, v in headers: if k.lower() == 'host': found = True break if not found: if port == 80: headers.append(("Host", host)) else: headers.append(("Host", "%s:%s" % (host, port))) if method in methods_with_bodies: # Stick in default type and length headers if not present found = False for k, v in headers: if k.lower() == 'content-type': found = True break if not found: headers.append( ("Content-Type", "application/x-www-form-urlencoded")) headers.append(("Content-Length", str(len(body or "")))) return headers def shb(response): """Return status, headers, body the way we like from a response.""" if py3k: h = response.getheaders() else: h = [] key, value = None, None for line in response.msg.headers: if line: if line[0] in " \t": value += line.strip() else: if key and value: h.append((key, value)) key, value = line.split(":", 1) key = key.strip() value = value.strip() if key and value: h.append((key, value)) return "%s %s" % (response.status, response.reason), h, response.read() def openURL(url, headers=None, method="GET", body=None, host="127.0.0.1", port=8000, http_conn=HTTPConnection, protocol="HTTP/1.1"): """Open the given HTTP resource and return status, headers, and body.""" headers = cleanHeaders(headers, method, body, host, port) # Trying 10 times is simply in case of socket errors. # Normal case--it should run once. for trial in range(10): try: # Allow http_conn to be a class or an instance if hasattr(http_conn, "host"): conn = http_conn else: conn = http_conn(interface(host), port) conn._http_vsn_str = protocol conn._http_vsn = int("".join([x for x in protocol if x.isdigit()])) # skip_accept_encoding argument added in python version 2.4 if sys.version_info < (2, 4): def putheader(self, header, value): if header == 'Accept-Encoding' and value == 'identity': return self.__class__.putheader(self, header, value) import new conn.putheader = new.instancemethod( putheader, conn, conn.__class__) conn.putrequest(method.upper(), url, skip_host=True) elif not py3k: conn.putrequest(method.upper(), url, skip_host=True, skip_accept_encoding=True) else: import http.client # Replace the stdlib method, which only accepts ASCII url's def putrequest(self, method, url): if ( self._HTTPConnection__response and self._HTTPConnection__response.isclosed() ): self._HTTPConnection__response = None if self._HTTPConnection__state == http.client._CS_IDLE: self._HTTPConnection__state = ( http.client._CS_REQ_STARTED) else: raise http.client.CannotSendRequest() self._method = method if not url: url = ntob('/') request = ntob(' ').join( (method.encode("ASCII"), url, self._http_vsn_str.encode("ASCII"))) self._output(request) import types conn.putrequest = types.MethodType(putrequest, conn) conn.putrequest(method.upper(), url) for key, value in headers: conn.putheader(key, value.encode("Latin-1")) conn.endheaders() if body is not None: conn.send(body) # Handle response response = conn.getresponse() s, h, b = shb(response) if not hasattr(http_conn, "host"): # We made our own conn instance. Close it. conn.close() return s, h, b except socket.error: time.sleep(0.5) if trial == 9: raise # Add any exceptions which your web framework handles # normally (that you don't want server_error to trap). ignored_exceptions = [] # You'll want set this to True when you can't guarantee # that each response will immediately follow each request; # for example, when handling requests via multiple threads. ignore_all = False class ServerError(Exception): on = False def server_error(exc=None): """Server debug hook. Return True if exception handled, False if ignored. You probably want to wrap this, so you can still handle an error using your framework when it's ignored. """ if exc is None: exc = sys.exc_info() if ignore_all or exc[0] in ignored_exceptions: return False else: ServerError.on = True print("") print("".join(traceback.format_exception(*exc))) return True
mit
theflofly/tensorflow
tensorflow/python/keras/utils/io_utils.py
9
4877
# Copyright 2018 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== # pylint: disable=g-import-not-at-top """Utilities related to disk I/O.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from collections import defaultdict import numpy as np import six from tensorflow.python.util.tf_export import keras_export try: import h5py except ImportError: h5py = None @keras_export('keras.utils.HDF5Matrix') class HDF5Matrix(object): """Representation of HDF5 dataset to be used instead of a Numpy array. Example: ```python x_data = HDF5Matrix('input/file.hdf5', 'data') model.predict(x_data) ``` Providing `start` and `end` allows use of a slice of the dataset. Optionally, a normalizer function (or lambda) can be given. This will be called on every slice of data retrieved. Arguments: datapath: string, path to a HDF5 file dataset: string, name of the HDF5 dataset in the file specified in datapath start: int, start of desired slice of the specified dataset end: int, end of desired slice of the specified dataset normalizer: function to be called on data when retrieved Returns: An array-like HDF5 dataset. """ refs = defaultdict(int) def __init__(self, datapath, dataset, start=0, end=None, normalizer=None): if h5py is None: raise ImportError('The use of HDF5Matrix requires ' 'HDF5 and h5py installed.') if datapath not in list(self.refs.keys()): f = h5py.File(datapath) self.refs[datapath] = f else: f = self.refs[datapath] self.data = f[dataset] self.start = start if end is None: self.end = self.data.shape[0] else: self.end = end self.normalizer = normalizer def __len__(self): return self.end - self.start def __getitem__(self, key): if isinstance(key, slice): start, stop = key.start, key.stop if start is None: start = 0 if stop is None: stop = self.shape[0] if stop + self.start <= self.end: idx = slice(start + self.start, stop + self.start) else: raise IndexError elif isinstance(key, (int, np.integer)): if key + self.start < self.end: idx = key + self.start else: raise IndexError elif isinstance(key, np.ndarray): if np.max(key) + self.start < self.end: idx = (self.start + key).tolist() else: raise IndexError else: # Assume list/iterable if max(key) + self.start < self.end: idx = [x + self.start for x in key] else: raise IndexError if self.normalizer is not None: return self.normalizer(self.data[idx]) else: return self.data[idx] @property def shape(self): """Gets a numpy-style shape tuple giving the dataset dimensions. Returns: A numpy-style shape tuple. """ return (self.end - self.start,) + self.data.shape[1:] @property def dtype(self): """Gets the datatype of the dataset. Returns: A numpy dtype string. """ return self.data.dtype @property def ndim(self): """Gets the number of dimensions (rank) of the dataset. Returns: An integer denoting the number of dimensions (rank) of the dataset. """ return self.data.ndim @property def size(self): """Gets the total dataset size (number of elements). Returns: An integer denoting the number of elements in the dataset. """ return np.prod(self.shape) def ask_to_proceed_with_overwrite(filepath): """Produces a prompt asking about overwriting a file. Arguments: filepath: the path to the file to be overwritten. Returns: True if we can proceed with overwrite, False otherwise. """ overwrite = six.moves.input('[WARNING] %s already exists - overwrite? ' '[y/n]' % (filepath)).strip().lower() while overwrite not in ('y', 'n'): overwrite = six.moves.input('Enter "y" (overwrite) or "n" ' '(cancel).').strip().lower() if overwrite == 'n': return False print('[TIP] Next time specify overwrite=True!') return True
apache-2.0
lordtangent/arsenalsuite
python/scripts/joberror_handler.py
10
2441
#!/usr/bin/python from PyQt4.QtCore import * from PyQt4.QtSql import * from blur.Stone import * from blur.Classes import * import blur.email, blur.jabber import sys, time, re, os from math import ceil import traceback if sys.argv.count('-daemonize'): from blur.daemonize import createDaemon createDaemon() app = QCoreApplication(sys.argv) initConfig( "/etc/joberror_handler.ini", "/var/log/ab/joberror_handler.log" ) # Read values from db.ini, but dont overwrite values from reaper.ini # This allows db.ini defaults to work even if reaper.ini is non-existent config().readFromFile( "/etc/db.ini", False ) blur.RedirectOutputToLog() classes_loader() VERBOSE_DEBUG = False if VERBOSE_DEBUG: Database.current().setEchoMode( Database.EchoUpdate | Database.EchoDelete )# | Database.EchoSelect ) Database.current().connection().reconnect() service = Service.ensureServiceExists('AB_JobErrorHandler') def perform_script_action( handler_script, error ): func = getCompiledFunction( 'handleError', handler_script, handler_script.script(), handler_script.name() ) if func and callable(func): func( error ) def handle_error( handler, error ): # Does the handler match the error? if error.message().contains( handler.errorRegEx() ): perform_script_action( handler.jobErrorHandlerScript(), error ) def joberror_handler(): # Config: managerDriveLetter, managerSpoolDir, assburnerErrorStep print "JobError Handler is starting up" while True: service.pulse() errorsToCheck = JobError.select( "checked=false" ) handlersByJobType = {} jobTypeByJob = {} for error in errorsToCheck: # Get the job key, but don't load the job(if we don't have to) jobKey = error.getValue('fkeyJob').toInt()[0] # Hmm, if we haven't looked at the job yet, then we have to load # it to figure out the jobtype. if not jobKey in jobTypeByJob: job = error.job() jobTypeByJob[job.key()] = job.jobType() # Get the jobtype jobType = jobTypeByJob[jobKey] if not jobType.isRecord(): continue # Load the handlers for this jobtype if not already loaded if not jobType in handlersByJobType: handlersByJobType[jobType] = jobType.jobErrorHandlers() # Run each handler for handler in handlersByJobType[jobType]: handle_error( handler, error ) errorsToCheck.setChecked( True ) errorsToCheck.commit() time.sleep(5) if __name__ == "__main__": joberror_handler()
gpl-2.0
alexis-roche/nipy
examples/labs/need_data/parcel_intra.py
4
1546
#!/usr/bin/env python # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: nil -*- # vi: set ft=python sts=4 ts=4 sw=4 et: from __future__ import print_function # Python 2/3 compatibility __doc__ = """ Example of script to parcellate the data from one subject, using various algorithms. Note that it can take some time. author: Bertrand Thirion, 2005-2009 """ print(__doc__) from os import mkdir, getcwd, path from numpy import array from nipy.labs.spatial_models.parcel_io import fixed_parcellation # Local import from get_data_light import DATA_DIR, get_second_level_dataset # ------------------------------------ # Get the data (mask+functional image) # take several experimental conditions # time courses could be used instead n_beta = [29] mask_image = path.join(DATA_DIR, 'mask.nii.gz') betas = [path.join(DATA_DIR, 'spmT_%04d.nii.gz' % n) for n in n_beta] missing_file = array([not path.exists(m) for m in [mask_image] + betas]).any() if missing_file: get_second_level_dataset() # set the parameters n_parcels = 500 mu = 10 nn = 6 verbose = 1 # write directory write_dir = path.join(getcwd(), 'results') if not path.exists(write_dir): mkdir(write_dir) lpa = fixed_parcellation(mask_image, betas, n_parcels, nn, 'gkm', write_dir, mu, verbose) lpa = fixed_parcellation(mask_image, betas, n_parcels, nn, 'ward', write_dir, mu, verbose) lpa = fixed_parcellation(mask_image, betas, n_parcels, nn, 'ward_and_gkm', write_dir, mu, verbose)
bsd-3-clause
andrey-malets/web-page-replay
rules/rule.py
30
1507
#!/usr/bin/env python # Copyright 2015 Google Inc. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. class Rule(object): """An optional base class for rule implementations. The rule_parser looks for the 'IsType' and 'ApplyRule' methods by name, so rules are not strictly required to extend this class. """ def IsType(self, rule_type_name): """Returns True if the name matches this rule.""" raise NotImplementedError def ApplyRule(self, return_value, request, response): """Invokes this rule with the given args. Args: return_value: the prior rule's return_value (if any). request: the httparchive ArchivedHttpRequest. response: the httparchive ArchivedHttpResponse, which may be None. Returns: A (should_stop, return_value) tuple. Typically the request and response are treated as immutable, so it's the caller's job to apply the return_value (e.g., set response fields). """ raise NotImplementedError
apache-2.0
memo/tensorflow
tensorflow/contrib/distributions/python/ops/vector_laplace_linear_operator.py
15
10591
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Vectorized Laplace distribution class, directly using LinearOpeartor.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import numpy as np from tensorflow.contrib import linalg from tensorflow.contrib.distributions.python.ops import bijectors from tensorflow.contrib.distributions.python.ops import distribution_util from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops.distributions import laplace from tensorflow.python.ops.distributions import transformed_distribution __all__ = [ "VectorLaplaceLinearOperator" ] _mvn_sample_note = """ `value` is a batch vector with compatible shape if `value` is a `Tensor` whose shape can be broadcast up to either: ```python self.batch_shape + self.event_shape ``` or ```python [M1, ..., Mm] + self.batch_shape + self.event_shape ``` """ class VectorLaplaceLinearOperator( transformed_distribution.TransformedDistribution): """The vectorization of the Laplace distribution on `R^k`. The vector laplace distribution is defined over `R^k`, and parameterized by a (batch of) length-`k` `loc` vector (the means) and a (batch of) `k x k` `scale` matrix: `covariance = 2 * scale @ scale.T`, where `@` denotes matrix-multiplication. #### Mathematical Details The probability density function (pdf) is, ```none pdf(x; loc, scale) = exp(-||y||_1) / Z, y = inv(scale) @ (x - loc), Z = 2**k |det(scale)|, ``` where: * `loc` is a vector in `R^k`, * `scale` is a linear operator in `R^{k x k}`, `cov = scale @ scale.T`, * `Z` denotes the normalization constant, and, * `||y||_1` denotes the `l1` norm of `y`, `sum_i |y_i|. The VectorLaplace distribution is a member of the [location-scale family](https://en.wikipedia.org/wiki/Location-scale_family), i.e., it can be constructed as, ```none X = (X_1, ..., X_k), each X_i ~ Laplace(loc=0, scale=1) Y = (Y_1, ...,Y_k) = scale @ X + loc ``` #### About `VectorLaplace` and `Vector` distributions in TensorFlow. The `VectorLaplace` is a non-standard distribution that has useful properties. The marginals `Y_1, ..., Y_k` are *not* Laplace random variables, due to the fact that the sum of Laplace random variables is not Laplace. Instead, `Y` is a vector whose components are linear combinations of Laplace random variables. Thus, `Y` lives in the vector space generated by `vectors` of Laplace distributions. This allows the user to decide the mean and covariance (by setting `loc` and `scale`), while preserving some properties of the Laplace distribution. In particular, the tails of `Y_i` will be (up to polynomial factors) exponentially decaying. To see this last statement, note that the pdf of `Y_i` is the convolution of the pdf of `k` independent Laplace random variables. One can then show by induction that distributions with exponential (up to polynomial factors) tails are closed under convolution. #### Examples ```python ds = tf.contrib.distributions la = tf.contrib.linalg # Initialize a single 3-variate VectorLaplace with some desired covariance. mu = [1., 2, 3] cov = [[ 0.36, 0.12, 0.06], [ 0.12, 0.29, -0.13], [ 0.06, -0.13, 0.26]] scale = tf.cholesky(cov) # ==> [[ 0.6, 0. , 0. ], # [ 0.2, 0.5, 0. ], # [ 0.1, -0.3, 0.4]]) # Divide scale by sqrt(2) so that the final covariance will be what we want. vla = ds.VectorLaplaceLinearOperator( loc=mu, scale=la.LinearOperatorTriL(scale / tf.sqrt(2))) # Covariance agrees with cholesky(cov) parameterization. vla.covariance().eval() # ==> [[ 0.36, 0.12, 0.06], # [ 0.12, 0.29, -0.13], # [ 0.06, -0.13, 0.26]] # Compute the pdf of an`R^3` observation; return a scalar. vla.prob([-1., 0, 1]).eval() # shape: [] # Initialize a 2-batch of 3-variate Vector Laplace's. mu = [[1., 2, 3], [11, 22, 33]] # shape: [2, 3] scale_diag = [[1., 2, 3], [0.5, 1, 1.5]] # shape: [2, 3] vla = ds.VectorLaplaceLinearOperator( loc=mu, scale=la.LinearOperatorDiag(scale_diag)) # Compute the pdf of two `R^3` observations; return a length-2 vector. x = [[-0.9, 0, 0.1], [-10, 0, 9]] # shape: [2, 3] vla.prob(x).eval() # shape: [2] ``` """ def __init__(self, loc=None, scale=None, validate_args=False, allow_nan_stats=True, name="VectorLaplaceLinearOperator"): """Construct Vector Laplace distribution on `R^k`. The `batch_shape` is the broadcast shape between `loc` and `scale` arguments. The `event_shape` is given by last dimension of the matrix implied by `scale`. The last dimension of `loc` (if provided) must broadcast with this. Recall that `covariance = 2 * scale @ scale.T`. Additional leading dimensions (if any) will index batches. Args: loc: Floating-point `Tensor`. If this is set to `None`, `loc` is implicitly `0`. When specified, may have shape `[B1, ..., Bb, k]` where `b >= 0` and `k` is the event size. scale: Instance of `LinearOperator` with same `dtype` as `loc` and shape `[B1, ..., Bb, k, k]`. validate_args: Python `bool`, default `False`. Whether to validate input with asserts. If `validate_args` is `False`, and the inputs are invalid, correct behavior is not guaranteed. allow_nan_stats: Python `bool`, default `True`. If `False`, raise an exception if a statistic (e.g. mean/mode/etc...) is undefined for any batch member If `True`, batch members with valid parameters leading to undefined statistics will return NaN for this statistic. name: The name to give Ops created by the initializer. Raises: ValueError: if `scale` is unspecified. TypeError: if not `scale.dtype.is_floating` """ parameters = locals() if scale is None: raise ValueError("Missing required `scale` parameter.") if not scale.dtype.is_floating: raise TypeError("`scale` parameter must have floating-point dtype.") with ops.name_scope(name, values=[loc] + scale.graph_parents): # Since expand_dims doesn't preserve constant-ness, we obtain the # non-dynamic value if possible. loc = ops.convert_to_tensor(loc, name="loc") if loc is not None else loc batch_shape, event_shape = distribution_util.shapes_from_loc_and_scale( loc, scale) super(VectorLaplaceLinearOperator, self).__init__( distribution=laplace.Laplace( loc=array_ops.zeros([], dtype=scale.dtype), scale=array_ops.ones([], dtype=scale.dtype)), bijector=bijectors.AffineLinearOperator( shift=loc, scale=scale, validate_args=validate_args), batch_shape=batch_shape, event_shape=event_shape, validate_args=validate_args, name=name) self._parameters = parameters @property def loc(self): """The `loc` `Tensor` in `Y = scale @ X + loc`.""" return self.bijector.shift @property def scale(self): """The `scale` `LinearOperator` in `Y = scale @ X + loc`.""" return self.bijector.scale @distribution_util.AppendDocstring(_mvn_sample_note) def _log_prob(self, x): return super(VectorLaplaceLinearOperator, self)._log_prob(x) @distribution_util.AppendDocstring(_mvn_sample_note) def _prob(self, x): return super(VectorLaplaceLinearOperator, self)._prob(x) def _mean(self): shape = self.batch_shape.concatenate(self.event_shape) has_static_shape = shape.is_fully_defined() if not has_static_shape: shape = array_ops.concat([ self.batch_shape_tensor(), self.event_shape_tensor(), ], 0) if self.loc is None: return array_ops.zeros(shape, self.dtype) if has_static_shape and shape == self.loc.get_shape(): return array_ops.identity(self.loc) # Add dummy tensor of zeros to broadcast. This is only necessary if shape # != self.loc.shape, but we could not determine if this is the case. return array_ops.identity(self.loc) + array_ops.zeros(shape, self.dtype) def _covariance(self): # Let # W = (w1,...,wk), with wj ~ iid Laplace(0, 1). # Then this distribution is # X = loc + LW, # and since E[X] = loc, # Cov(X) = E[LW W^T L^T] = L E[W W^T] L^T. # Since E[wi wj] = 0 if i != j, and 2 if i == j, we have # Cov(X) = 2 LL^T if distribution_util.is_diagonal_scale(self.scale): return 2. * array_ops.matrix_diag(math_ops.square(self.scale.diag_part())) else: return 2. * self.scale.matmul(self.scale.to_dense(), adjoint_arg=True) def _variance(self): if distribution_util.is_diagonal_scale(self.scale): return 2. * math_ops.square(self.scale.diag_part()) elif (isinstance(self.scale, linalg.LinearOperatorUDVHUpdate) and self.scale.is_self_adjoint): return array_ops.matrix_diag_part( 2. * self.scale.matmul(self.scale.to_dense())) else: return 2. * array_ops.matrix_diag_part( self.scale.matmul(self.scale.to_dense(), adjoint_arg=True)) def _stddev(self): if distribution_util.is_diagonal_scale(self.scale): return np.sqrt(2) * math_ops.abs(self.scale.diag_part()) elif (isinstance(self.scale, linalg.LinearOperatorUDVHUpdate) and self.scale.is_self_adjoint): return np.sqrt(2) * math_ops.sqrt(array_ops.matrix_diag_part( self.scale.matmul(self.scale.to_dense()))) else: return np.sqrt(2) * math_ops.sqrt(array_ops.matrix_diag_part( self.scale.matmul(self.scale.to_dense(), adjoint_arg=True))) def _mode(self): return self._mean()
apache-2.0
Bysmyyr/chromium-crosswalk
tools/telemetry/third_party/gsutilz/third_party/boto/boto/ec2/image.py
92
16222
# Copyright (c) 2006-2010 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2010, Eucalyptus Systems, Inc. # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. from boto.ec2.ec2object import EC2Object, TaggedEC2Object from boto.ec2.blockdevicemapping import BlockDeviceMapping class ProductCodes(list): def startElement(self, name, attrs, connection): pass def endElement(self, name, value, connection): if name == 'productCode': self.append(value) class BillingProducts(list): def startElement(self, name, attrs, connection): pass def endElement(self, name, value, connection): if name == 'billingProduct': self.append(value) class Image(TaggedEC2Object): """ Represents an EC2 Image """ def __init__(self, connection=None): super(Image, self).__init__(connection) self.id = None self.location = None self.state = None self.ownerId = None # for backwards compatibility self.owner_id = None self.owner_alias = None self.is_public = False self.architecture = None self.platform = None self.type = None self.kernel_id = None self.ramdisk_id = None self.name = None self.description = None self.product_codes = ProductCodes() self.billing_products = BillingProducts() self.block_device_mapping = None self.root_device_type = None self.root_device_name = None self.virtualization_type = None self.hypervisor = None self.instance_lifecycle = None self.sriov_net_support = None def __repr__(self): return 'Image:%s' % self.id def startElement(self, name, attrs, connection): retval = super(Image, self).startElement(name, attrs, connection) if retval is not None: return retval if name == 'blockDeviceMapping': self.block_device_mapping = BlockDeviceMapping() return self.block_device_mapping elif name == 'productCodes': return self.product_codes elif name == 'billingProducts': return self.billing_products else: return None def endElement(self, name, value, connection): if name == 'imageId': self.id = value elif name == 'imageLocation': self.location = value elif name == 'imageState': self.state = value elif name == 'imageOwnerId': self.ownerId = value # for backwards compatibility self.owner_id = value elif name == 'isPublic': if value == 'false': self.is_public = False elif value == 'true': self.is_public = True else: raise Exception( 'Unexpected value of isPublic %s for image %s' % ( value, self.id ) ) elif name == 'architecture': self.architecture = value elif name == 'imageType': self.type = value elif name == 'kernelId': self.kernel_id = value elif name == 'ramdiskId': self.ramdisk_id = value elif name == 'imageOwnerAlias': self.owner_alias = value elif name == 'platform': self.platform = value elif name == 'name': self.name = value elif name == 'description': self.description = value elif name == 'rootDeviceType': self.root_device_type = value elif name == 'rootDeviceName': self.root_device_name = value elif name == 'virtualizationType': self.virtualization_type = value elif name == 'hypervisor': self.hypervisor = value elif name == 'instanceLifecycle': self.instance_lifecycle = value elif name == 'sriovNetSupport': self.sriov_net_support = value else: setattr(self, name, value) def _update(self, updated): self.__dict__.update(updated.__dict__) def update(self, validate=False, dry_run=False): """ Update the image's state information by making a call to fetch the current image attributes from the service. :type validate: bool :param validate: By default, if EC2 returns no data about the image the update method returns quietly. If the validate param is True, however, it will raise a ValueError exception if no data is returned from EC2. """ rs = self.connection.get_all_images([self.id], dry_run=dry_run) if len(rs) > 0: img = rs[0] if img.id == self.id: self._update(img) elif validate: raise ValueError('%s is not a valid Image ID' % self.id) return self.state def run(self, min_count=1, max_count=1, key_name=None, security_groups=None, user_data=None, addressing_type=None, instance_type='m1.small', placement=None, kernel_id=None, ramdisk_id=None, monitoring_enabled=False, subnet_id=None, block_device_map=None, disable_api_termination=False, instance_initiated_shutdown_behavior=None, private_ip_address=None, placement_group=None, security_group_ids=None, additional_info=None, instance_profile_name=None, instance_profile_arn=None, tenancy=None, dry_run=False): """ Runs this instance. :type min_count: int :param min_count: The minimum number of instances to start :type max_count: int :param max_count: The maximum number of instances to start :type key_name: string :param key_name: The name of the key pair with which to launch instances. :type security_groups: list of strings :param security_groups: The names of the security groups with which to associate instances. :type user_data: string :param user_data: The Base64-encoded MIME user data to be made available to the instance(s) in this reservation. :type instance_type: string :param instance_type: The type of instance to run: * t1.micro * m1.small * m1.medium * m1.large * m1.xlarge * m3.medium * m3.large * m3.xlarge * m3.2xlarge * c1.medium * c1.xlarge * m2.xlarge * m2.2xlarge * m2.4xlarge * cr1.8xlarge * hi1.4xlarge * hs1.8xlarge * cc1.4xlarge * cg1.4xlarge * cc2.8xlarge * g2.2xlarge * c3.large * c3.xlarge * c3.2xlarge * c3.4xlarge * c3.8xlarge * i2.xlarge * i2.2xlarge * i2.4xlarge * i2.8xlarge * t2.micro * t2.small * t2.medium :type placement: string :param placement: The Availability Zone to launch the instance into. :type kernel_id: string :param kernel_id: The ID of the kernel with which to launch the instances. :type ramdisk_id: string :param ramdisk_id: The ID of the RAM disk with which to launch the instances. :type monitoring_enabled: bool :param monitoring_enabled: Enable CloudWatch monitoring on the instance. :type subnet_id: string :param subnet_id: The subnet ID within which to launch the instances for VPC. :type private_ip_address: string :param private_ip_address: If you're using VPC, you can optionally use this parameter to assign the instance a specific available IP address from the subnet (e.g., 10.0.0.25). :type block_device_map: :class:`boto.ec2.blockdevicemapping.BlockDeviceMapping` :param block_device_map: A BlockDeviceMapping data structure describing the EBS volumes associated with the Image. :type disable_api_termination: bool :param disable_api_termination: If True, the instances will be locked and will not be able to be terminated via the API. :type instance_initiated_shutdown_behavior: string :param instance_initiated_shutdown_behavior: Specifies whether the instance stops or terminates on instance-initiated shutdown. Valid values are: * stop * terminate :type placement_group: string :param placement_group: If specified, this is the name of the placement group in which the instance(s) will be launched. :type additional_info: string :param additional_info: Specifies additional information to make available to the instance(s). :type security_group_ids: list of strings :param security_group_ids: The ID of the VPC security groups with which to associate instances. :type instance_profile_name: string :param instance_profile_name: The name of the IAM Instance Profile (IIP) to associate with the instances. :type instance_profile_arn: string :param instance_profile_arn: The Amazon resource name (ARN) of the IAM Instance Profile (IIP) to associate with the instances. :type tenancy: string :param tenancy: The tenancy of the instance you want to launch. An instance with a tenancy of 'dedicated' runs on single-tenant hardware and can only be launched into a VPC. Valid values are:"default" or "dedicated". NOTE: To use dedicated tenancy you MUST specify a VPC subnet-ID as well. :rtype: Reservation :return: The :class:`boto.ec2.instance.Reservation` associated with the request for machines """ return self.connection.run_instances(self.id, min_count, max_count, key_name, security_groups, user_data, addressing_type, instance_type, placement, kernel_id, ramdisk_id, monitoring_enabled, subnet_id, block_device_map, disable_api_termination, instance_initiated_shutdown_behavior, private_ip_address, placement_group, security_group_ids=security_group_ids, additional_info=additional_info, instance_profile_name=instance_profile_name, instance_profile_arn=instance_profile_arn, tenancy=tenancy, dry_run=dry_run) def deregister(self, delete_snapshot=False, dry_run=False): return self.connection.deregister_image( self.id, delete_snapshot, dry_run=dry_run ) def get_launch_permissions(self, dry_run=False): img_attrs = self.connection.get_image_attribute( self.id, 'launchPermission', dry_run=dry_run ) return img_attrs.attrs def set_launch_permissions(self, user_ids=None, group_names=None, dry_run=False): return self.connection.modify_image_attribute(self.id, 'launchPermission', 'add', user_ids, group_names, dry_run=dry_run) def remove_launch_permissions(self, user_ids=None, group_names=None, dry_run=False): return self.connection.modify_image_attribute(self.id, 'launchPermission', 'remove', user_ids, group_names, dry_run=dry_run) def reset_launch_attributes(self, dry_run=False): return self.connection.reset_image_attribute( self.id, 'launchPermission', dry_run=dry_run ) def get_kernel(self, dry_run=False): img_attrs = self.connection.get_image_attribute( self.id, 'kernel', dry_run=dry_run ) return img_attrs.kernel def get_ramdisk(self, dry_run=False): img_attrs = self.connection.get_image_attribute( self.id, 'ramdisk', dry_run=dry_run ) return img_attrs.ramdisk class ImageAttribute(object): def __init__(self, parent=None): self.name = None self.kernel = None self.ramdisk = None self.attrs = {} def startElement(self, name, attrs, connection): if name == 'blockDeviceMapping': self.attrs['block_device_mapping'] = BlockDeviceMapping() return self.attrs['block_device_mapping'] else: return None def endElement(self, name, value, connection): if name == 'launchPermission': self.name = 'launch_permission' elif name == 'group': if 'groups' in self.attrs: self.attrs['groups'].append(value) else: self.attrs['groups'] = [value] elif name == 'userId': if 'user_ids' in self.attrs: self.attrs['user_ids'].append(value) else: self.attrs['user_ids'] = [value] elif name == 'productCode': if 'product_codes' in self.attrs: self.attrs['product_codes'].append(value) else: self.attrs['product_codes'] = [value] elif name == 'imageId': self.image_id = value elif name == 'kernel': self.kernel = value elif name == 'ramdisk': self.ramdisk = value else: setattr(self, name, value) class CopyImage(object): def __init__(self, parent=None): self._parent = parent self.image_id = None def startElement(self, name, attrs, connection): pass def endElement(self, name, value, connection): if name == 'imageId': self.image_id = value
bsd-3-clause
mgagne/nova
nova/tests/unit/api/openstack/compute/test_plugins/basic.py
46
1179
# Copyright 2014 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. """Basic Test Extension""" from nova.api.openstack import extensions from nova.api.openstack import wsgi ALIAS = 'test-basic' class BasicController(wsgi.Controller): def index(self, req): data = {'param': 'val'} return data class Basic(extensions.V3APIExtensionBase): """Basic Test Extension.""" name = "BasicTest" alias = ALIAS version = 1 def get_resources(self): resource = extensions.ResourceExtension('test', BasicController()) return [resource] def get_controller_extensions(self): return []
apache-2.0
googleapis/python-dialogflow
google/cloud/dialogflow_v2beta1/services/contexts/async_client.py
1
32318
# -*- coding: utf-8 -*- # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # from collections import OrderedDict import functools import re from typing import Dict, Sequence, Tuple, Type, Union import pkg_resources import google.api_core.client_options as ClientOptions # type: ignore from google.api_core import exceptions as core_exceptions # type: ignore from google.api_core import gapic_v1 # type: ignore from google.api_core import retry as retries # type: ignore from google.auth import credentials as ga_credentials # type: ignore from google.oauth2 import service_account # type: ignore from google.cloud.dialogflow_v2beta1.services.contexts import pagers from google.cloud.dialogflow_v2beta1.types import context from google.cloud.dialogflow_v2beta1.types import context as gcd_context from google.protobuf import field_mask_pb2 # type: ignore from google.protobuf import struct_pb2 # type: ignore from .transports.base import ContextsTransport, DEFAULT_CLIENT_INFO from .transports.grpc_asyncio import ContextsGrpcAsyncIOTransport from .client import ContextsClient class ContextsAsyncClient: """Service for managing [Contexts][google.cloud.dialogflow.v2beta1.Context]. """ _client: ContextsClient DEFAULT_ENDPOINT = ContextsClient.DEFAULT_ENDPOINT DEFAULT_MTLS_ENDPOINT = ContextsClient.DEFAULT_MTLS_ENDPOINT context_path = staticmethod(ContextsClient.context_path) parse_context_path = staticmethod(ContextsClient.parse_context_path) common_billing_account_path = staticmethod( ContextsClient.common_billing_account_path ) parse_common_billing_account_path = staticmethod( ContextsClient.parse_common_billing_account_path ) common_folder_path = staticmethod(ContextsClient.common_folder_path) parse_common_folder_path = staticmethod(ContextsClient.parse_common_folder_path) common_organization_path = staticmethod(ContextsClient.common_organization_path) parse_common_organization_path = staticmethod( ContextsClient.parse_common_organization_path ) common_project_path = staticmethod(ContextsClient.common_project_path) parse_common_project_path = staticmethod(ContextsClient.parse_common_project_path) common_location_path = staticmethod(ContextsClient.common_location_path) parse_common_location_path = staticmethod(ContextsClient.parse_common_location_path) @classmethod def from_service_account_info(cls, info: dict, *args, **kwargs): """Creates an instance of this client using the provided credentials info. Args: info (dict): The service account private key info. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: ContextsAsyncClient: The constructed client. """ return ContextsClient.from_service_account_info.__func__(ContextsAsyncClient, info, *args, **kwargs) # type: ignore @classmethod def from_service_account_file(cls, filename: str, *args, **kwargs): """Creates an instance of this client using the provided credentials file. Args: filename (str): The path to the service account private key json file. args: Additional arguments to pass to the constructor. kwargs: Additional arguments to pass to the constructor. Returns: ContextsAsyncClient: The constructed client. """ return ContextsClient.from_service_account_file.__func__(ContextsAsyncClient, filename, *args, **kwargs) # type: ignore from_service_account_json = from_service_account_file @property def transport(self) -> ContextsTransport: """Returns the transport used by the client instance. Returns: ContextsTransport: The transport used by the client instance. """ return self._client.transport get_transport_class = functools.partial( type(ContextsClient).get_transport_class, type(ContextsClient) ) def __init__( self, *, credentials: ga_credentials.Credentials = None, transport: Union[str, ContextsTransport] = "grpc_asyncio", client_options: ClientOptions = None, client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO, ) -> None: """Instantiates the contexts client. Args: credentials (Optional[google.auth.credentials.Credentials]): The authorization credentials to attach to requests. These credentials identify the application to the service; if none are specified, the client will attempt to ascertain the credentials from the environment. transport (Union[str, ~.ContextsTransport]): The transport to use. If set to None, a transport is chosen automatically. client_options (ClientOptions): Custom options for the client. It won't take effect if a ``transport`` instance is provided. (1) The ``api_endpoint`` property can be used to override the default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT environment variable can also be used to override the endpoint: "always" (always use the default mTLS endpoint), "never" (always use the default regular endpoint) and "auto" (auto switch to the default mTLS endpoint if client certificate is present, this is the default value). However, the ``api_endpoint`` property takes precedence if provided. (2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable is "true", then the ``client_cert_source`` property can be used to provide client certificate for mutual TLS transport. If not provided, the default SSL client certificate will be used if present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not set, no client certificate will be used. Raises: google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport creation failed for any reason. """ self._client = ContextsClient( credentials=credentials, transport=transport, client_options=client_options, client_info=client_info, ) async def list_contexts( self, request: context.ListContextsRequest = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> pagers.ListContextsAsyncPager: r"""Returns the list of all contexts in the specified session. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.ListContextsRequest`): The request object. The request message for [Contexts.ListContexts][google.cloud.dialogflow.v2beta1.Contexts.ListContexts]. parent (:class:`str`): Required. The session to list all contexts from. Supported formats: - \`projects//agent/sessions/, - ``projects/<Project ID>/locations/<Location ID>/agent/sessions/<Session ID>``, - ``projects/<Project ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, If ``Location ID`` is not specified we assume default 'us' location. If ``Environment ID`` is not specified, we assume default 'draft' environment. If ``User ID`` is not specified, we assume default '-' user. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2beta1.services.contexts.pagers.ListContextsAsyncPager: The response message for [Contexts.ListContexts][google.cloud.dialogflow.v2beta1.Contexts.ListContexts]. Iterating over this object will yield results and resolve additional pages automatically. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = context.ListContextsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.list_contexts, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # This method is paged; wrap the response in a pager, which provides # an `__aiter__` convenience method. response = pagers.ListContextsAsyncPager( method=rpc, request=request, response=response, metadata=metadata, ) # Done; return the response. return response async def get_context( self, request: context.GetContextRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> context.Context: r"""Retrieves the specified context. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.GetContextRequest`): The request object. The request message for [Contexts.GetContext][google.cloud.dialogflow.v2beta1.Contexts.GetContext]. name (:class:`str`): Required. The name of the context. Supported formats: - ``projects/<Project ID>/agent/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>/contexts/<Context ID>``, If ``Location ID`` is not specified we assume default 'us' location. If ``Environment ID`` is not specified, we assume default 'draft' environment. If ``User ID`` is not specified, we assume default '-' user. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2beta1.types.Context: Dialogflow contexts are similar to natural language context. If a person says to you "they are orange", you need context in order to understand what "they" is referring to. Similarly, for Dialogflow to handle an end-user expression like that, it needs to be provided with context in order to correctly match an intent. Using contexts, you can control the flow of a conversation. You can configure contexts for an intent by setting input and output contexts, which are identified by string names. When an intent is matched, any configured output contexts for that intent become active. While any contexts are active, Dialogflow is more likely to match intents that are configured with input contexts that correspond to the currently active contexts. For more information about context, see the [Contexts guide](\ https://cloud.google.com/dialogflow/docs/contexts-overview). """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = context.GetContextRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.get_context, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response async def create_context( self, request: gcd_context.CreateContextRequest = None, *, parent: str = None, context: gcd_context.Context = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gcd_context.Context: r"""Creates a context. If the specified context already exists, overrides the context. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.CreateContextRequest`): The request object. The request message for [Contexts.CreateContext][google.cloud.dialogflow.v2beta1.Contexts.CreateContext]. parent (:class:`str`): Required. The session to create a context for. Supported formats: - \`projects//agent/sessions/, - ``projects/<Project ID>/locations/<Location ID>/agent/sessions/<Session ID>``, - ``projects/<Project ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, If ``Location ID`` is not specified we assume default 'us' location. If ``Environment ID`` is not specified, we assume default 'draft' environment. If ``User ID`` is not specified, we assume default '-' user. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. context (:class:`google.cloud.dialogflow_v2beta1.types.Context`): Required. The context to create. This corresponds to the ``context`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2beta1.types.Context: Dialogflow contexts are similar to natural language context. If a person says to you "they are orange", you need context in order to understand what "they" is referring to. Similarly, for Dialogflow to handle an end-user expression like that, it needs to be provided with context in order to correctly match an intent. Using contexts, you can control the flow of a conversation. You can configure contexts for an intent by setting input and output contexts, which are identified by string names. When an intent is matched, any configured output contexts for that intent become active. While any contexts are active, Dialogflow is more likely to match intents that are configured with input contexts that correspond to the currently active contexts. For more information about context, see the [Contexts guide](\ https://cloud.google.com/dialogflow/docs/contexts-overview). """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent, context]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = gcd_context.CreateContextRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent if context is not None: request.context = context # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.create_context, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response async def update_context( self, request: gcd_context.UpdateContextRequest = None, *, context: gcd_context.Context = None, update_mask: field_mask_pb2.FieldMask = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> gcd_context.Context: r"""Updates the specified context. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.UpdateContextRequest`): The request object. The request message for [Contexts.UpdateContext][google.cloud.dialogflow.v2beta1.Contexts.UpdateContext]. context (:class:`google.cloud.dialogflow_v2beta1.types.Context`): Required. The context to update. This corresponds to the ``context`` field on the ``request`` instance; if ``request`` is provided, this should not be set. update_mask (:class:`google.protobuf.field_mask_pb2.FieldMask`): Optional. The mask to control which fields get updated. This corresponds to the ``update_mask`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. Returns: google.cloud.dialogflow_v2beta1.types.Context: Dialogflow contexts are similar to natural language context. If a person says to you "they are orange", you need context in order to understand what "they" is referring to. Similarly, for Dialogflow to handle an end-user expression like that, it needs to be provided with context in order to correctly match an intent. Using contexts, you can control the flow of a conversation. You can configure contexts for an intent by setting input and output contexts, which are identified by string names. When an intent is matched, any configured output contexts for that intent become active. While any contexts are active, Dialogflow is more likely to match intents that are configured with input contexts that correspond to the currently active contexts. For more information about context, see the [Contexts guide](\ https://cloud.google.com/dialogflow/docs/contexts-overview). """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([context, update_mask]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = gcd_context.UpdateContextRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if context is not None: request.context = context if update_mask is not None: request.update_mask = update_mask # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.update_context, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata( (("context.name", request.context.name),) ), ) # Send the request. response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,) # Done; return the response. return response async def delete_context( self, request: context.DeleteContextRequest = None, *, name: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes the specified context. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.DeleteContextRequest`): The request object. The request message for [Contexts.DeleteContext][google.cloud.dialogflow.v2beta1.Contexts.DeleteContext]. name (:class:`str`): Required. The name of the context to delete. Supported formats: - ``projects/<Project ID>/agent/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>/contexts/<Context ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>/contexts/<Context ID>``, If ``Location ID`` is not specified we assume default 'us' location. If ``Environment ID`` is not specified, we assume default 'draft' environment. If ``User ID`` is not specified, we assume default '-' user. This corresponds to the ``name`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([name]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = context.DeleteContextRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if name is not None: request.name = name # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_context, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("name", request.name),)), ) # Send the request. await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) async def delete_all_contexts( self, request: context.DeleteAllContextsRequest = None, *, parent: str = None, retry: retries.Retry = gapic_v1.method.DEFAULT, timeout: float = None, metadata: Sequence[Tuple[str, str]] = (), ) -> None: r"""Deletes all active contexts in the specified session. Args: request (:class:`google.cloud.dialogflow_v2beta1.types.DeleteAllContextsRequest`): The request object. The request message for [Contexts.DeleteAllContexts][google.cloud.dialogflow.v2beta1.Contexts.DeleteAllContexts]. parent (:class:`str`): Required. The name of the session to delete all contexts from. Supported formats: - \`projects//agent/sessions/, - ``projects/<Project ID>/locations/<Location ID>/agent/sessions/<Session ID>``, - ``projects/<Project ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, - ``projects/<Project ID>/locations/<Location ID>/agent/environments/<Environment ID>/users/<User ID>/sessions/<Session ID>``, If ``Location ID`` is not specified we assume default 'us' location. If ``Environment ID`` is not specified we assume default 'draft' environment. If ``User ID`` is not specified, we assume default '-' user. This corresponds to the ``parent`` field on the ``request`` instance; if ``request`` is provided, this should not be set. retry (google.api_core.retry.Retry): Designation of what errors, if any, should be retried. timeout (float): The timeout for this request. metadata (Sequence[Tuple[str, str]]): Strings which should be sent along with the request as metadata. """ # Create or coerce a protobuf request object. # Sanity check: If we got a request object, we should *not* have # gotten any keyword arguments that map to the request. has_flattened_params = any([parent]) if request is not None and has_flattened_params: raise ValueError( "If the `request` argument is set, then none of " "the individual field arguments should be set." ) request = context.DeleteAllContextsRequest(request) # If we have keyword arguments corresponding to fields on the # request, apply these. if parent is not None: request.parent = parent # Wrap the RPC method; this adds retry and timeout information, # and friendly error handling. rpc = gapic_v1.method_async.wrap_method( self._client._transport.delete_all_contexts, default_timeout=None, client_info=DEFAULT_CLIENT_INFO, ) # Certain fields should be provided within the metadata header; # add these here. metadata = tuple(metadata) + ( gapic_v1.routing_header.to_grpc_metadata((("parent", request.parent),)), ) # Send the request. await rpc( request, retry=retry, timeout=timeout, metadata=metadata, ) try: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo( gapic_version=pkg_resources.get_distribution( "google-cloud-dialogflow", ).version, ) except pkg_resources.DistributionNotFound: DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo() __all__ = ("ContextsAsyncClient",)
apache-2.0
b-me/django
django/db/models/base.py
39
70506
from __future__ import unicode_literals import copy import inspect import warnings from itertools import chain from django.apps import apps from django.conf import settings from django.core import checks from django.core.exceptions import ( NON_FIELD_ERRORS, FieldDoesNotExist, FieldError, MultipleObjectsReturned, ObjectDoesNotExist, ValidationError, ) from django.db import ( DEFAULT_DB_ALIAS, DJANGO_VERSION_PICKLE_KEY, DatabaseError, connections, router, transaction, ) from django.db.models import signals from django.db.models.constants import LOOKUP_SEP from django.db.models.deletion import CASCADE, Collector from django.db.models.fields import AutoField from django.db.models.fields.related import ( ForeignObjectRel, ManyToOneRel, OneToOneField, lazy_related_operation, resolve_relation, ) from django.db.models.manager import ensure_default_manager from django.db.models.options import Options from django.db.models.query import Q from django.db.models.query_utils import ( DeferredAttribute, deferred_class_factory, ) from django.db.models.utils import make_model_tuple from django.utils import six from django.utils.encoding import force_str, force_text from django.utils.functional import curry from django.utils.six.moves import zip from django.utils.text import capfirst, get_text_list from django.utils.translation import ugettext_lazy as _ from django.utils.version import get_version def subclass_exception(name, parents, module, attached_to=None): """ Create exception subclass. Used by ModelBase below. If 'attached_to' is supplied, the exception will be created in a way that allows it to be pickled, assuming the returned exception class will be added as an attribute to the 'attached_to' class. """ class_dict = {'__module__': module} if attached_to is not None: def __reduce__(self): # Exceptions are special - they've got state that isn't # in self.__dict__. We assume it is all in self.args. return (unpickle_inner_exception, (attached_to, name), self.args) def __setstate__(self, args): self.args = args class_dict['__reduce__'] = __reduce__ class_dict['__setstate__'] = __setstate__ return type(name, parents, class_dict) class ModelBase(type): """ Metaclass for all models. """ def __new__(cls, name, bases, attrs): super_new = super(ModelBase, cls).__new__ # Also ensure initialization is only performed for subclasses of Model # (excluding Model class itself). parents = [b for b in bases if isinstance(b, ModelBase)] if not parents: return super_new(cls, name, bases, attrs) # Create the class. module = attrs.pop('__module__') new_class = super_new(cls, name, bases, {'__module__': module}) attr_meta = attrs.pop('Meta', None) abstract = getattr(attr_meta, 'abstract', False) if not attr_meta: meta = getattr(new_class, 'Meta', None) else: meta = attr_meta base_meta = getattr(new_class, '_meta', None) app_label = None # Look for an application configuration to attach the model to. app_config = apps.get_containing_app_config(module) if getattr(meta, 'app_label', None) is None: if app_config is None: if not abstract: raise RuntimeError( "Model class %s.%s doesn't declare an explicit " "app_label and either isn't in an application in " "INSTALLED_APPS or else was imported before its " "application was loaded. " % (module, name)) else: app_label = app_config.label new_class.add_to_class('_meta', Options(meta, app_label)) if not abstract: new_class.add_to_class( 'DoesNotExist', subclass_exception( str('DoesNotExist'), tuple( x.DoesNotExist for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (ObjectDoesNotExist,), module, attached_to=new_class)) new_class.add_to_class( 'MultipleObjectsReturned', subclass_exception( str('MultipleObjectsReturned'), tuple( x.MultipleObjectsReturned for x in parents if hasattr(x, '_meta') and not x._meta.abstract ) or (MultipleObjectsReturned,), module, attached_to=new_class)) if base_meta and not base_meta.abstract: # Non-abstract child classes inherit some attributes from their # non-abstract parent (unless an ABC comes before it in the # method resolution order). if not hasattr(meta, 'ordering'): new_class._meta.ordering = base_meta.ordering if not hasattr(meta, 'get_latest_by'): new_class._meta.get_latest_by = base_meta.get_latest_by is_proxy = new_class._meta.proxy # If the model is a proxy, ensure that the base class # hasn't been swapped out. if is_proxy and base_meta and base_meta.swapped: raise TypeError("%s cannot proxy the swapped model '%s'." % (name, base_meta.swapped)) if getattr(new_class, '_default_manager', None): if not is_proxy: # Multi-table inheritance doesn't inherit default manager from # parents. new_class._default_manager = None new_class._base_manager = None else: # Proxy classes do inherit parent's default manager, if none is # set explicitly. new_class._default_manager = new_class._default_manager._copy_to_model(new_class) new_class._base_manager = new_class._base_manager._copy_to_model(new_class) # Add all attributes to the class. for obj_name, obj in attrs.items(): new_class.add_to_class(obj_name, obj) # All the fields of any type declared on this model new_fields = chain( new_class._meta.local_fields, new_class._meta.local_many_to_many, new_class._meta.virtual_fields ) field_names = {f.name for f in new_fields} # Basic setup for proxy models. if is_proxy: base = None for parent in [kls for kls in parents if hasattr(kls, '_meta')]: if parent._meta.abstract: if parent._meta.fields: raise TypeError( "Abstract base class containing model fields not " "permitted for proxy model '%s'." % name ) else: continue if base is not None: raise TypeError("Proxy model '%s' has more than one non-abstract model base class." % name) else: base = parent if base is None: raise TypeError("Proxy model '%s' has no non-abstract model base class." % name) new_class._meta.setup_proxy(base) new_class._meta.concrete_model = base._meta.concrete_model base._meta.concrete_model._meta.proxied_children.append(new_class._meta) else: new_class._meta.concrete_model = new_class # Collect the parent links for multi-table inheritance. parent_links = {} for base in reversed([new_class] + parents): # Conceptually equivalent to `if base is Model`. if not hasattr(base, '_meta'): continue # Skip concrete parent classes. if base != new_class and not base._meta.abstract: continue # Locate OneToOneField instances. for field in base._meta.local_fields: if isinstance(field, OneToOneField): related = resolve_relation(new_class, field.remote_field.model) parent_links[make_model_tuple(related)] = field # Do the appropriate setup for any model parents. for base in parents: original_base = base if not hasattr(base, '_meta'): # Things without _meta aren't functional models, so they're # uninteresting parents. continue parent_fields = base._meta.local_fields + base._meta.local_many_to_many # Check for clashes between locally declared fields and those # on the base classes (we cannot handle shadowed fields at the # moment). for field in parent_fields: if field.name in field_names: raise FieldError( 'Local field %r in class %r clashes ' 'with field of similar name from ' 'base class %r' % (field.name, name, base.__name__) ) if not base._meta.abstract: # Concrete classes... base = base._meta.concrete_model base_key = make_model_tuple(base) if base_key in parent_links: field = parent_links[base_key] elif not is_proxy: attr_name = '%s_ptr' % base._meta.model_name field = OneToOneField( base, on_delete=CASCADE, name=attr_name, auto_created=True, parent_link=True, ) # Only add the ptr field if it's not already present; # e.g. migrations will already have it specified if not hasattr(new_class, attr_name): new_class.add_to_class(attr_name, field) else: field = None new_class._meta.parents[base] = field else: # .. and abstract ones. for field in parent_fields: new_field = copy.deepcopy(field) new_class.add_to_class(field.name, new_field) # Pass any non-abstract parent classes onto child. new_class._meta.parents.update(base._meta.parents) # Inherit managers from the abstract base classes. new_class.copy_managers(base._meta.abstract_managers) # Proxy models inherit the non-abstract managers from their base, # unless they have redefined any of them. if is_proxy: new_class.copy_managers(original_base._meta.concrete_managers) # Inherit virtual fields (like GenericForeignKey) from the parent # class for field in base._meta.virtual_fields: if base._meta.abstract and field.name in field_names: raise FieldError( 'Local field %r in class %r clashes ' 'with field of similar name from ' 'abstract base class %r' % (field.name, name, base.__name__) ) new_class.add_to_class(field.name, copy.deepcopy(field)) if abstract: # Abstract base models can't be instantiated and don't appear in # the list of models for an app. We do the final setup for them a # little differently from normal models. attr_meta.abstract = False new_class.Meta = attr_meta return new_class new_class._prepare() new_class._meta.apps.register_model(new_class._meta.app_label, new_class) return new_class def copy_managers(cls, base_managers): # This is in-place sorting of an Options attribute, but that's fine. base_managers.sort() for _, mgr_name, manager in base_managers: # NOQA (redefinition of _) val = getattr(cls, mgr_name, None) if not val or val is manager: new_manager = manager._copy_to_model(cls) cls.add_to_class(mgr_name, new_manager) def add_to_class(cls, name, value): # We should call the contribute_to_class method only if it's bound if not inspect.isclass(value) and hasattr(value, 'contribute_to_class'): value.contribute_to_class(cls, name) else: setattr(cls, name, value) def _prepare(cls): """ Creates some methods once self._meta has been populated. """ opts = cls._meta opts._prepare(cls) if opts.order_with_respect_to: cls.get_next_in_order = curry(cls._get_next_or_previous_in_order, is_next=True) cls.get_previous_in_order = curry(cls._get_next_or_previous_in_order, is_next=False) # defer creating accessors on the foreign class until we are # certain it has been created def make_foreign_order_accessors(cls, model, field): setattr( field.remote_field.model, 'get_%s_order' % cls.__name__.lower(), curry(method_get_order, cls) ) setattr( field.remote_field.model, 'set_%s_order' % cls.__name__.lower(), curry(method_set_order, cls) ) wrt = opts.order_with_respect_to lazy_related_operation(make_foreign_order_accessors, cls, wrt.remote_field.model, field=wrt) # Give the class a docstring -- its definition. if cls.__doc__ is None: cls.__doc__ = "%s(%s)" % (cls.__name__, ", ".join(f.name for f in opts.fields)) get_absolute_url_override = settings.ABSOLUTE_URL_OVERRIDES.get(opts.label_lower) if get_absolute_url_override: setattr(cls, 'get_absolute_url', get_absolute_url_override) ensure_default_manager(cls) signals.class_prepared.send(sender=cls) class ModelState(object): """ A class for storing instance state """ def __init__(self, db=None): self.db = db # If true, uniqueness validation checks will consider this a new, as-yet-unsaved object. # Necessary for correct validation of new instances of objects with explicit (non-auto) PKs. # This impacts validation only; it has no effect on the actual save. self.adding = True class Model(six.with_metaclass(ModelBase)): _deferred = False def __init__(self, *args, **kwargs): signals.pre_init.send(sender=self.__class__, args=args, kwargs=kwargs) # Set up the storage for instance state self._state = ModelState() # There is a rather weird disparity here; if kwargs, it's set, then args # overrides it. It should be one or the other; don't duplicate the work # The reason for the kwargs check is that standard iterator passes in by # args, and instantiation for iteration is 33% faster. args_len = len(args) if args_len > len(self._meta.concrete_fields): # Daft, but matches old exception sans the err msg. raise IndexError("Number of args exceeds number of fields") if not kwargs: fields_iter = iter(self._meta.concrete_fields) # The ordering of the zip calls matter - zip throws StopIteration # when an iter throws it. So if the first iter throws it, the second # is *not* consumed. We rely on this, so don't change the order # without changing the logic. for val, field in zip(args, fields_iter): setattr(self, field.attname, val) else: # Slower, kwargs-ready version. fields_iter = iter(self._meta.fields) for val, field in zip(args, fields_iter): setattr(self, field.attname, val) kwargs.pop(field.name, None) # Maintain compatibility with existing calls. if isinstance(field.remote_field, ManyToOneRel): kwargs.pop(field.attname, None) # Now we're left with the unprocessed fields that *must* come from # keywords, or default. for field in fields_iter: is_related_object = False # This slightly odd construct is so that we can access any # data-descriptor object (DeferredAttribute) without triggering its # __get__ method. if (field.attname not in kwargs and (isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute) or field.column is None)): # This field will be populated on request. continue if kwargs: if isinstance(field.remote_field, ForeignObjectRel): try: # Assume object instance was passed in. rel_obj = kwargs.pop(field.name) is_related_object = True except KeyError: try: # Object instance wasn't passed in -- must be an ID. val = kwargs.pop(field.attname) except KeyError: val = field.get_default() else: # Object instance was passed in. Special case: You can # pass in "None" for related objects if it's allowed. if rel_obj is None and field.null: val = None else: try: val = kwargs.pop(field.attname) except KeyError: # This is done with an exception rather than the # default argument on pop because we don't want # get_default() to be evaluated, and then not used. # Refs #12057. val = field.get_default() else: val = field.get_default() if is_related_object: # If we are passed a related instance, set it using the # field.name instead of field.attname (e.g. "user" instead of # "user_id") so that the object gets properly cached (and type # checked) by the RelatedObjectDescriptor. setattr(self, field.name, rel_obj) else: setattr(self, field.attname, val) if kwargs: for prop in list(kwargs): try: if isinstance(getattr(self.__class__, prop), property): setattr(self, prop, kwargs.pop(prop)) except AttributeError: pass if kwargs: raise TypeError("'%s' is an invalid keyword argument for this function" % list(kwargs)[0]) super(Model, self).__init__() signals.post_init.send(sender=self.__class__, instance=self) @classmethod def from_db(cls, db, field_names, values): if cls._deferred: new = cls(**dict(zip(field_names, values))) else: new = cls(*values) new._state.adding = False new._state.db = db return new def __repr__(self): try: u = six.text_type(self) except (UnicodeEncodeError, UnicodeDecodeError): u = '[Bad Unicode data]' return force_str('<%s: %s>' % (self.__class__.__name__, u)) def __str__(self): if six.PY2 and hasattr(self, '__unicode__'): return force_text(self).encode('utf-8') return '%s object' % self.__class__.__name__ def __eq__(self, other): if not isinstance(other, Model): return False if self._meta.concrete_model != other._meta.concrete_model: return False my_pk = self._get_pk_val() if my_pk is None: return self is other return my_pk == other._get_pk_val() def __ne__(self, other): return not self.__eq__(other) def __hash__(self): if self._get_pk_val() is None: raise TypeError("Model instances without primary key value are unhashable") return hash(self._get_pk_val()) def __reduce__(self): """ Provides pickling support. Normally, this just dispatches to Python's standard handling. However, for models with deferred field loading, we need to do things manually, as they're dynamically created classes and only module-level classes can be pickled by the default path. """ data = self.__dict__ data[DJANGO_VERSION_PICKLE_KEY] = get_version() if not self._deferred: class_id = self._meta.app_label, self._meta.object_name return model_unpickle, (class_id, [], simple_class_factory), data defers = [] for field in self._meta.fields: if isinstance(self.__class__.__dict__.get(field.attname), DeferredAttribute): defers.append(field.attname) model = self._meta.proxy_for_model class_id = model._meta.app_label, model._meta.object_name return (model_unpickle, (class_id, defers, deferred_class_factory), data) def __setstate__(self, state): msg = None pickled_version = state.get(DJANGO_VERSION_PICKLE_KEY) if pickled_version: current_version = get_version() if current_version != pickled_version: msg = ("Pickled model instance's Django version %s does" " not match the current version %s." % (pickled_version, current_version)) else: msg = "Pickled model instance's Django version is not specified." if msg: warnings.warn(msg, RuntimeWarning, stacklevel=2) self.__dict__.update(state) def _get_pk_val(self, meta=None): if not meta: meta = self._meta return getattr(self, meta.pk.attname) def _set_pk_val(self, value): return setattr(self, self._meta.pk.attname, value) pk = property(_get_pk_val, _set_pk_val) def get_deferred_fields(self): """ Returns a set containing names of deferred fields for this instance. """ return { f.attname for f in self._meta.concrete_fields if isinstance(self.__class__.__dict__.get(f.attname), DeferredAttribute) } def refresh_from_db(self, using=None, fields=None, **kwargs): """ Reloads field values from the database. By default, the reloading happens from the database this instance was loaded from, or by the read router if this instance wasn't loaded from any database. The using parameter will override the default. Fields can be used to specify which fields to reload. The fields should be an iterable of field attnames. If fields is None, then all non-deferred fields are reloaded. When accessing deferred fields of an instance, the deferred loading of the field will call this method. """ if fields is not None: if len(fields) == 0: return if any(LOOKUP_SEP in f for f in fields): raise ValueError( 'Found "%s" in fields argument. Relations and transforms ' 'are not allowed in fields.' % LOOKUP_SEP) db = using if using is not None else self._state.db if self._deferred: non_deferred_model = self._meta.proxy_for_model else: non_deferred_model = self.__class__ db_instance_qs = non_deferred_model._default_manager.using(db).filter(pk=self.pk) # Use provided fields, if not set then reload all non-deferred fields. if fields is not None: fields = list(fields) db_instance_qs = db_instance_qs.only(*fields) elif self._deferred: deferred_fields = self.get_deferred_fields() fields = [f.attname for f in self._meta.concrete_fields if f.attname not in deferred_fields] db_instance_qs = db_instance_qs.only(*fields) db_instance = db_instance_qs.get() non_loaded_fields = db_instance.get_deferred_fields() for field in self._meta.concrete_fields: if field.attname in non_loaded_fields: # This field wasn't refreshed - skip ahead. continue setattr(self, field.attname, getattr(db_instance, field.attname)) # Throw away stale foreign key references. if field.is_relation and field.get_cache_name() in self.__dict__: rel_instance = getattr(self, field.get_cache_name()) local_val = getattr(db_instance, field.attname) related_val = None if rel_instance is None else getattr(rel_instance, field.target_field.attname) if local_val != related_val: del self.__dict__[field.get_cache_name()] self._state.db = db_instance._state.db def serializable_value(self, field_name): """ Returns the value of the field name for this instance. If the field is a foreign key, returns the id value, instead of the object. If there's no Field object with this name on the model, the model attribute's value is returned directly. Used to serialize a field's value (in the serializer, or form output, for example). Normally, you would just access the attribute directly and not use this method. """ try: field = self._meta.get_field(field_name) except FieldDoesNotExist: return getattr(self, field_name) return getattr(self, field.attname) def save(self, force_insert=False, force_update=False, using=None, update_fields=None): """ Saves the current instance. Override this in a subclass if you want to control the saving process. The 'force_insert' and 'force_update' parameters can be used to insist that the "save" must be an SQL insert or update (or equivalent for non-SQL backends), respectively. Normally, they should not be set. """ # Ensure that a model instance without a PK hasn't been assigned to # a ForeignKey or OneToOneField on this model. If the field is # nullable, allowing the save() would result in silent data loss. for field in self._meta.concrete_fields: if field.is_relation: # If the related field isn't cached, then an instance hasn't # been assigned and there's no need to worry about this check. try: getattr(self, field.get_cache_name()) except AttributeError: continue obj = getattr(self, field.name, None) # A pk may have been assigned manually to a model instance not # saved to the database (or auto-generated in a case like # UUIDField), but we allow the save to proceed and rely on the # database to raise an IntegrityError if applicable. If # constraints aren't supported by the database, there's the # unavoidable risk of data corruption. if obj and obj.pk is None: raise ValueError( "save() prohibited to prevent data loss due to " "unsaved related object '%s'." % field.name ) using = using or router.db_for_write(self.__class__, instance=self) if force_insert and (force_update or update_fields): raise ValueError("Cannot force both insert and updating in model saving.") if update_fields is not None: # If update_fields is empty, skip the save. We do also check for # no-op saves later on for inheritance cases. This bailout is # still needed for skipping signal sending. if len(update_fields) == 0: return update_fields = frozenset(update_fields) field_names = set() for field in self._meta.fields: if not field.primary_key: field_names.add(field.name) if field.name != field.attname: field_names.add(field.attname) non_model_fields = update_fields.difference(field_names) if non_model_fields: raise ValueError("The following fields do not exist in this " "model or are m2m fields: %s" % ', '.join(non_model_fields)) # If saving to the same database, and this model is deferred, then # automatically do a "update_fields" save on the loaded fields. elif not force_insert and self._deferred and using == self._state.db: field_names = set() for field in self._meta.concrete_fields: if not field.primary_key and not hasattr(field, 'through'): field_names.add(field.attname) deferred_fields = [ f.attname for f in self._meta.fields if (f.attname not in self.__dict__ and isinstance(self.__class__.__dict__[f.attname], DeferredAttribute)) ] loaded_fields = field_names.difference(deferred_fields) if loaded_fields: update_fields = frozenset(loaded_fields) self.save_base(using=using, force_insert=force_insert, force_update=force_update, update_fields=update_fields) save.alters_data = True def save_base(self, raw=False, force_insert=False, force_update=False, using=None, update_fields=None): """ Handles the parts of saving which should be done only once per save, yet need to be done in raw saves, too. This includes some sanity checks and signal sending. The 'raw' argument is telling save_base not to save any parent models and not to do any changes to the values before save. This is used by fixture loading. """ using = using or router.db_for_write(self.__class__, instance=self) assert not (force_insert and (force_update or update_fields)) assert update_fields is None or len(update_fields) > 0 cls = origin = self.__class__ # Skip proxies, but keep the origin as the proxy model. if cls._meta.proxy: cls = cls._meta.concrete_model meta = cls._meta if not meta.auto_created: signals.pre_save.send(sender=origin, instance=self, raw=raw, using=using, update_fields=update_fields) with transaction.atomic(using=using, savepoint=False): if not raw: self._save_parents(cls, using, update_fields) updated = self._save_table(raw, cls, force_insert, force_update, using, update_fields) # Store the database on which the object was saved self._state.db = using # Once saved, this is no longer a to-be-added instance. self._state.adding = False # Signal that the save is complete if not meta.auto_created: signals.post_save.send(sender=origin, instance=self, created=(not updated), update_fields=update_fields, raw=raw, using=using) save_base.alters_data = True def _save_parents(self, cls, using, update_fields): """ Saves all the parents of cls using values from self. """ meta = cls._meta for parent, field in meta.parents.items(): # Make sure the link fields are synced between parent and self. if (field and getattr(self, parent._meta.pk.attname) is None and getattr(self, field.attname) is not None): setattr(self, parent._meta.pk.attname, getattr(self, field.attname)) self._save_parents(cls=parent, using=using, update_fields=update_fields) self._save_table(cls=parent, using=using, update_fields=update_fields) # Set the parent's PK value to self. if field: setattr(self, field.attname, self._get_pk_val(parent._meta)) # Since we didn't have an instance of the parent handy set # attname directly, bypassing the descriptor. Invalidate # the related object cache, in case it's been accidentally # populated. A fresh instance will be re-built from the # database if necessary. cache_name = field.get_cache_name() if hasattr(self, cache_name): delattr(self, cache_name) def _save_table(self, raw=False, cls=None, force_insert=False, force_update=False, using=None, update_fields=None): """ Does the heavy-lifting involved in saving. Updates or inserts the data for a single table. """ meta = cls._meta non_pks = [f for f in meta.local_concrete_fields if not f.primary_key] if update_fields: non_pks = [f for f in non_pks if f.name in update_fields or f.attname in update_fields] pk_val = self._get_pk_val(meta) if pk_val is None: pk_val = meta.pk.get_pk_value_on_save(self) setattr(self, meta.pk.attname, pk_val) pk_set = pk_val is not None if not pk_set and (force_update or update_fields): raise ValueError("Cannot force an update in save() with no primary key.") updated = False # If possible, try an UPDATE. If that doesn't update anything, do an INSERT. if pk_set and not force_insert: base_qs = cls._base_manager.using(using) values = [(f, None, (getattr(self, f.attname) if raw else f.pre_save(self, False))) for f in non_pks] forced_update = update_fields or force_update updated = self._do_update(base_qs, using, pk_val, values, update_fields, forced_update) if force_update and not updated: raise DatabaseError("Forced update did not affect any rows.") if update_fields and not updated: raise DatabaseError("Save with update_fields did not affect any rows.") if not updated: if meta.order_with_respect_to: # If this is a model with an order_with_respect_to # autopopulate the _order field field = meta.order_with_respect_to order_value = cls._base_manager.using(using).filter( **{field.name: getattr(self, field.attname)}).count() self._order = order_value fields = meta.local_concrete_fields if not pk_set: fields = [f for f in fields if not isinstance(f, AutoField)] update_pk = bool(meta.has_auto_field and not pk_set) result = self._do_insert(cls._base_manager, using, fields, update_pk, raw) if update_pk: setattr(self, meta.pk.attname, result) return updated def _do_update(self, base_qs, using, pk_val, values, update_fields, forced_update): """ This method will try to update the model. If the model was updated (in the sense that an update query was done and a matching row was found from the DB) the method will return True. """ filtered = base_qs.filter(pk=pk_val) if not values: # We can end up here when saving a model in inheritance chain where # update_fields doesn't target any field in current model. In that # case we just say the update succeeded. Another case ending up here # is a model with just PK - in that case check that the PK still # exists. return update_fields is not None or filtered.exists() if self._meta.select_on_save and not forced_update: if filtered.exists(): # It may happen that the object is deleted from the DB right after # this check, causing the subsequent UPDATE to return zero matching # rows. The same result can occur in some rare cases when the # database returns zero despite the UPDATE being executed # successfully (a row is matched and updated). In order to # distinguish these two cases, the object's existence in the # database is again checked for if the UPDATE query returns 0. return filtered._update(values) > 0 or filtered.exists() else: return False return filtered._update(values) > 0 def _do_insert(self, manager, using, fields, update_pk, raw): """ Do an INSERT. If update_pk is defined then this method should return the new pk for the model. """ return manager._insert([self], fields=fields, return_id=update_pk, using=using, raw=raw) def delete(self, using=None, keep_parents=False): using = using or router.db_for_write(self.__class__, instance=self) assert self._get_pk_val() is not None, ( "%s object can't be deleted because its %s attribute is set to None." % (self._meta.object_name, self._meta.pk.attname) ) collector = Collector(using=using) collector.collect([self], keep_parents=keep_parents) return collector.delete() delete.alters_data = True def _get_FIELD_display(self, field): value = getattr(self, field.attname) return force_text(dict(field.flatchoices).get(value, value), strings_only=True) def _get_next_or_previous_by_FIELD(self, field, is_next, **kwargs): if not self.pk: raise ValueError("get_next/get_previous cannot be used on unsaved objects.") op = 'gt' if is_next else 'lt' order = '' if is_next else '-' param = force_text(getattr(self, field.attname)) q = Q(**{'%s__%s' % (field.name, op): param}) q = q | Q(**{field.name: param, 'pk__%s' % op: self.pk}) qs = self.__class__._default_manager.using(self._state.db).filter(**kwargs).filter(q).order_by( '%s%s' % (order, field.name), '%spk' % order ) try: return qs[0] except IndexError: raise self.DoesNotExist("%s matching query does not exist." % self.__class__._meta.object_name) def _get_next_or_previous_in_order(self, is_next): cachename = "__%s_order_cache" % is_next if not hasattr(self, cachename): op = 'gt' if is_next else 'lt' order = '_order' if is_next else '-_order' order_field = self._meta.order_with_respect_to obj = self._default_manager.filter(**{ order_field.name: getattr(self, order_field.attname) }).filter(**{ '_order__%s' % op: self._default_manager.values('_order').filter(**{ self._meta.pk.name: self.pk }) }).order_by(order)[:1].get() setattr(self, cachename, obj) return getattr(self, cachename) def prepare_database_save(self, field): if self.pk is None: raise ValueError("Unsaved model instance %r cannot be used in an ORM query." % self) return getattr(self, field.remote_field.get_related_field().attname) def clean(self): """ Hook for doing any extra model-wide validation after clean() has been called on every field by self.clean_fields. Any ValidationError raised by this method will not be associated with a particular field; it will have a special-case association with the field defined by NON_FIELD_ERRORS. """ pass def validate_unique(self, exclude=None): """ Checks unique constraints on the model and raises ``ValidationError`` if any failed. """ unique_checks, date_checks = self._get_unique_checks(exclude=exclude) errors = self._perform_unique_checks(unique_checks) date_errors = self._perform_date_checks(date_checks) for k, v in date_errors.items(): errors.setdefault(k, []).extend(v) if errors: raise ValidationError(errors) def _get_unique_checks(self, exclude=None): """ Gather a list of checks to perform. Since validate_unique could be called from a ModelForm, some fields may have been excluded; we can't perform a unique check on a model that is missing fields involved in that check. Fields that did not validate should also be excluded, but they need to be passed in via the exclude argument. """ if exclude is None: exclude = [] unique_checks = [] unique_togethers = [(self.__class__, self._meta.unique_together)] for parent_class in self._meta.get_parent_list(): if parent_class._meta.unique_together: unique_togethers.append((parent_class, parent_class._meta.unique_together)) for model_class, unique_together in unique_togethers: for check in unique_together: for name in check: # If this is an excluded field, don't add this check. if name in exclude: break else: unique_checks.append((model_class, tuple(check))) # These are checks for the unique_for_<date/year/month>. date_checks = [] # Gather a list of checks for fields declared as unique and add them to # the list of checks. fields_with_class = [(self.__class__, self._meta.local_fields)] for parent_class in self._meta.get_parent_list(): fields_with_class.append((parent_class, parent_class._meta.local_fields)) for model_class, fields in fields_with_class: for f in fields: name = f.name if name in exclude: continue if f.unique: unique_checks.append((model_class, (name,))) if f.unique_for_date and f.unique_for_date not in exclude: date_checks.append((model_class, 'date', name, f.unique_for_date)) if f.unique_for_year and f.unique_for_year not in exclude: date_checks.append((model_class, 'year', name, f.unique_for_year)) if f.unique_for_month and f.unique_for_month not in exclude: date_checks.append((model_class, 'month', name, f.unique_for_month)) return unique_checks, date_checks def _perform_unique_checks(self, unique_checks): errors = {} for model_class, unique_check in unique_checks: # Try to look up an existing object with the same values as this # object's values for all the unique field. lookup_kwargs = {} for field_name in unique_check: f = self._meta.get_field(field_name) lookup_value = getattr(self, f.attname) if lookup_value is None: # no value, skip the lookup continue if f.primary_key and not self._state.adding: # no need to check for unique primary key when editing continue lookup_kwargs[str(field_name)] = lookup_value # some fields were skipped, no reason to do the check if len(unique_check) != len(lookup_kwargs): continue qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) # Note that we need to use the pk as defined by model_class, not # self.pk. These can be different fields because model inheritance # allows single model to have effectively multiple primary keys. # Refs #17615. model_class_pk = self._get_pk_val(model_class._meta) if not self._state.adding and model_class_pk is not None: qs = qs.exclude(pk=model_class_pk) if qs.exists(): if len(unique_check) == 1: key = unique_check[0] else: key = NON_FIELD_ERRORS errors.setdefault(key, []).append(self.unique_error_message(model_class, unique_check)) return errors def _perform_date_checks(self, date_checks): errors = {} for model_class, lookup_type, field, unique_for in date_checks: lookup_kwargs = {} # there's a ticket to add a date lookup, we can remove this special # case if that makes it's way in date = getattr(self, unique_for) if date is None: continue if lookup_type == 'date': lookup_kwargs['%s__day' % unique_for] = date.day lookup_kwargs['%s__month' % unique_for] = date.month lookup_kwargs['%s__year' % unique_for] = date.year else: lookup_kwargs['%s__%s' % (unique_for, lookup_type)] = getattr(date, lookup_type) lookup_kwargs[field] = getattr(self, field) qs = model_class._default_manager.filter(**lookup_kwargs) # Exclude the current object from the query if we are editing an # instance (as opposed to creating a new one) if not self._state.adding and self.pk is not None: qs = qs.exclude(pk=self.pk) if qs.exists(): errors.setdefault(field, []).append( self.date_error_message(lookup_type, field, unique_for) ) return errors def date_error_message(self, lookup_type, field_name, unique_for): opts = self._meta field = opts.get_field(field_name) return ValidationError( message=field.error_messages['unique_for_date'], code='unique_for_date', params={ 'model': self, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'lookup_type': lookup_type, 'field': field_name, 'field_label': six.text_type(capfirst(field.verbose_name)), 'date_field': unique_for, 'date_field_label': six.text_type(capfirst(opts.get_field(unique_for).verbose_name)), } ) def unique_error_message(self, model_class, unique_check): opts = model_class._meta params = { 'model': self, 'model_class': model_class, 'model_name': six.text_type(capfirst(opts.verbose_name)), 'unique_check': unique_check, } # A unique field if len(unique_check) == 1: field = opts.get_field(unique_check[0]) params['field_label'] = six.text_type(capfirst(field.verbose_name)) return ValidationError( message=field.error_messages['unique'], code='unique', params=params, ) # unique_together else: field_labels = [capfirst(opts.get_field(f).verbose_name) for f in unique_check] params['field_labels'] = six.text_type(get_text_list(field_labels, _('and'))) return ValidationError( message=_("%(model_name)s with this %(field_labels)s already exists."), code='unique_together', params=params, ) def full_clean(self, exclude=None, validate_unique=True): """ Calls clean_fields, clean, and validate_unique, on the model, and raises a ``ValidationError`` for any errors that occurred. """ errors = {} if exclude is None: exclude = [] else: exclude = list(exclude) try: self.clean_fields(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) # Form.clean() is run even if other validation fails, so do the # same with Model.clean() for consistency. try: self.clean() except ValidationError as e: errors = e.update_error_dict(errors) # Run unique checks, but only for fields that passed validation. if validate_unique: for name in errors.keys(): if name != NON_FIELD_ERRORS and name not in exclude: exclude.append(name) try: self.validate_unique(exclude=exclude) except ValidationError as e: errors = e.update_error_dict(errors) if errors: raise ValidationError(errors) def clean_fields(self, exclude=None): """ Cleans all fields and raises a ValidationError containing a dict of all validation errors if any occur. """ if exclude is None: exclude = [] errors = {} for f in self._meta.fields: if f.name in exclude: continue # Skip validation for empty fields with blank=True. The developer # is responsible for making sure they have a valid value. raw_value = getattr(self, f.attname) if f.blank and raw_value in f.empty_values: continue try: setattr(self, f.attname, f.clean(raw_value, self)) except ValidationError as e: errors[f.name] = e.error_list if errors: raise ValidationError(errors) @classmethod def check(cls, **kwargs): errors = [] errors.extend(cls._check_swappable()) errors.extend(cls._check_model()) errors.extend(cls._check_managers(**kwargs)) if not cls._meta.swapped: errors.extend(cls._check_fields(**kwargs)) errors.extend(cls._check_m2m_through_same_relationship()) errors.extend(cls._check_long_column_names()) clash_errors = cls._check_id_field() + cls._check_field_name_clashes() errors.extend(clash_errors) # If there are field name clashes, hide consequent column name # clashes. if not clash_errors: errors.extend(cls._check_column_name_clashes()) errors.extend(cls._check_index_together()) errors.extend(cls._check_unique_together()) errors.extend(cls._check_ordering()) return errors @classmethod def _check_swappable(cls): """ Check if the swapped model exists. """ errors = [] if cls._meta.swapped: try: apps.get_model(cls._meta.swapped) except ValueError: errors.append( checks.Error( "'%s' is not of the form 'app_label.app_name'." % cls._meta.swappable, hint=None, obj=None, id='models.E001', ) ) except LookupError: app_label, model_name = cls._meta.swapped.split('.') errors.append( checks.Error( "'%s' references '%s.%s', which has not been " "installed, or is abstract." % ( cls._meta.swappable, app_label, model_name ), hint=None, obj=None, id='models.E002', ) ) return errors @classmethod def _check_model(cls): errors = [] if cls._meta.proxy: if cls._meta.local_fields or cls._meta.local_many_to_many: errors.append( checks.Error( "Proxy model '%s' contains model fields." % cls.__name__, hint=None, obj=None, id='models.E017', ) ) return errors @classmethod def _check_managers(cls, **kwargs): """ Perform all manager checks. """ errors = [] for __, manager, __ in cls._meta.managers: errors.extend(manager.check(**kwargs)) return errors @classmethod def _check_fields(cls, **kwargs): """ Perform all field checks. """ errors = [] for field in cls._meta.local_fields: errors.extend(field.check(**kwargs)) for field in cls._meta.local_many_to_many: errors.extend(field.check(from_model=cls, **kwargs)) return errors @classmethod def _check_m2m_through_same_relationship(cls): """ Check if no relationship model is used by more than one m2m field. """ errors = [] seen_intermediary_signatures = [] fields = cls._meta.local_many_to_many # Skip when the target model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.model, ModelBase)) # Skip when the relationship model wasn't found. fields = (f for f in fields if isinstance(f.remote_field.through, ModelBase)) for f in fields: signature = (f.remote_field.model, cls, f.remote_field.through) if signature in seen_intermediary_signatures: errors.append( checks.Error( "The model has two many-to-many relations through " "the intermediate model '%s'." % f.remote_field.through._meta.label, hint=None, obj=cls, id='models.E003', ) ) else: seen_intermediary_signatures.append(signature) return errors @classmethod def _check_id_field(cls): """ Check if `id` field is a primary key. """ fields = list(f for f in cls._meta.local_fields if f.name == 'id' and f != cls._meta.pk) # fields is empty or consists of the invalid "id" field if fields and not fields[0].primary_key and cls._meta.pk.name == 'id': return [ checks.Error( "'id' can only be used as a field name if the field also " "sets 'primary_key=True'.", hint=None, obj=cls, id='models.E004', ) ] else: return [] @classmethod def _check_field_name_clashes(cls): """ Ref #17673. """ errors = [] used_fields = {} # name or attname -> field # Check that multi-inheritance doesn't cause field name shadowing. for parent in cls._meta.get_parent_list(): for f in parent._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None if clash: errors.append( checks.Error( "The field '%s' from parent model " "'%s' clashes with the field '%s' " "from parent model '%s'." % ( clash.name, clash.model._meta, f.name, f.model._meta ), hint=None, obj=cls, id='models.E005', ) ) used_fields[f.name] = f used_fields[f.attname] = f # Check that fields defined in the model don't clash with fields from # parents. for f in cls._meta.local_fields: clash = used_fields.get(f.name) or used_fields.get(f.attname) or None # Note that we may detect clash between user-defined non-unique # field "id" and automatically added unique field "id", both # defined at the same model. This special case is considered in # _check_id_field and here we ignore it. id_conflict = (f.name == "id" and clash and clash.name == "id" and clash.model == cls) if clash and not id_conflict: errors.append( checks.Error( "The field '%s' clashes with the field '%s' " "from model '%s'." % ( f.name, clash.name, clash.model._meta ), hint=None, obj=f, id='models.E006', ) ) used_fields[f.name] = f used_fields[f.attname] = f return errors @classmethod def _check_column_name_clashes(cls): # Store a list of column names which have already been used by other fields. used_column_names = [] errors = [] for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Ensure the column name is not already in use. if column_name and column_name in used_column_names: errors.append( checks.Error( "Field '%s' has column name '%s' that is used by " "another field." % (f.name, column_name), hint="Specify a 'db_column' for the field.", obj=cls, id='models.E007' ) ) else: used_column_names.append(column_name) return errors @classmethod def _check_index_together(cls): """ Check the value of "index_together" option. """ if not isinstance(cls._meta.index_together, (tuple, list)): return [ checks.Error( "'index_together' must be a list or tuple.", hint=None, obj=cls, id='models.E008', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.index_together): return [ checks.Error( "All 'index_together' elements must be lists or tuples.", hint=None, obj=cls, id='models.E009', ) ] else: errors = [] for fields in cls._meta.index_together: errors.extend(cls._check_local_fields(fields, "index_together")) return errors @classmethod def _check_unique_together(cls): """ Check the value of "unique_together" option. """ if not isinstance(cls._meta.unique_together, (tuple, list)): return [ checks.Error( "'unique_together' must be a list or tuple.", hint=None, obj=cls, id='models.E010', ) ] elif any(not isinstance(fields, (tuple, list)) for fields in cls._meta.unique_together): return [ checks.Error( "All 'unique_together' elements must be lists or tuples.", hint=None, obj=cls, id='models.E011', ) ] else: errors = [] for fields in cls._meta.unique_together: errors.extend(cls._check_local_fields(fields, "unique_together")) return errors @classmethod def _check_local_fields(cls, fields, option): from django.db import models # In order to avoid hitting the relation tree prematurely, we use our # own fields_map instead of using get_field() forward_fields_map = { field.name: field for field in cls._meta._get_fields(reverse=False) } errors = [] for field_name in fields: try: field = forward_fields_map[field_name] except KeyError: errors.append( checks.Error( "'%s' refers to the non-existent field '%s'." % ( option, field_name, ), hint=None, obj=cls, id='models.E012', ) ) else: if isinstance(field.remote_field, models.ManyToManyRel): errors.append( checks.Error( "'%s' refers to a ManyToManyField '%s', but " "ManyToManyFields are not permitted in '%s'." % ( option, field_name, option, ), hint=None, obj=cls, id='models.E013', ) ) elif field not in cls._meta.local_fields: errors.append( checks.Error( ("'%s' refers to field '%s' which is not local " "to model '%s'.") % ( option, field_name, cls._meta.object_name, ), hint=("This issue may be caused by multi-table " "inheritance."), obj=cls, id='models.E016', ) ) return errors @classmethod def _check_ordering(cls): """ Check "ordering" option -- is it a list of strings and do all fields exist? """ if cls._meta._ordering_clash: return [ checks.Error( "'ordering' and 'order_with_respect_to' cannot be used together.", hint=None, obj=cls, id='models.E021', ), ] if cls._meta.order_with_respect_to or not cls._meta.ordering: return [] if not isinstance(cls._meta.ordering, (list, tuple)): return [ checks.Error( ("'ordering' must be a tuple or list " "(even if you want to order by only one field)."), hint=None, obj=cls, id='models.E014', ) ] errors = [] fields = cls._meta.ordering # Skip '?' fields. fields = (f for f in fields if f != '?') # Convert "-field" to "field". fields = ((f[1:] if f.startswith('-') else f) for f in fields) # Skip ordering in the format field1__field2 (FIXME: checking # this format would be nice, but it's a little fiddly). fields = (f for f in fields if '__' not in f) # Skip ordering on pk. This is always a valid order_by field # but is an alias and therefore won't be found by opts.get_field. fields = {f for f in fields if f != 'pk'} # Check for invalid or non-existent fields in ordering. invalid_fields = [] # Any field name that is not present in field_names does not exist. # Also, ordering by m2m fields is not allowed. opts = cls._meta valid_fields = set(chain.from_iterable( (f.name, f.attname) if not (f.auto_created and not f.concrete) else (f.field.related_query_name(),) for f in chain(opts.fields, opts.related_objects) )) invalid_fields.extend(fields - valid_fields) for invalid_field in invalid_fields: errors.append( checks.Error( "'ordering' refers to the non-existent field '%s'." % invalid_field, hint=None, obj=cls, id='models.E015', ) ) return errors @classmethod def _check_long_column_names(cls): """ Check that any auto-generated column names are shorter than the limits for each database in which the model will be created. """ errors = [] allowed_len = None db_alias = None # Find the minimum max allowed length among all specified db_aliases. for db in settings.DATABASES.keys(): # skip databases where the model won't be created if not router.allow_migrate_model(db, cls): continue connection = connections[db] max_name_length = connection.ops.max_name_length() if max_name_length is None or connection.features.truncates_names: continue else: if allowed_len is None: allowed_len = max_name_length db_alias = db elif max_name_length < allowed_len: allowed_len = max_name_length db_alias = db if allowed_len is None: return errors for f in cls._meta.local_fields: _, column_name = f.get_attname_column() # Check if auto-generated name for the field is too long # for the database. if (f.db_column is None and column_name is not None and len(column_name) > allowed_len): errors.append( checks.Error( 'Autogenerated column name too long for field "%s". ' 'Maximum length is "%s" for database "%s".' % (column_name, allowed_len, db_alias), hint="Set the column name manually using 'db_column'.", obj=cls, id='models.E018', ) ) for f in cls._meta.local_many_to_many: # Check if auto-generated name for the M2M field is too long # for the database. for m2m in f.remote_field.through._meta.local_fields: _, rel_name = m2m.get_attname_column() if (m2m.db_column is None and rel_name is not None and len(rel_name) > allowed_len): errors.append( checks.Error( 'Autogenerated column name too long for M2M field ' '"%s". Maximum length is "%s" for database "%s".' % (rel_name, allowed_len, db_alias), hint=("Use 'through' to create a separate model " "for M2M and then set column_name using " "'db_column'."), obj=cls, id='models.E019', ) ) return errors ############################################ # HELPER FUNCTIONS (CURRIED MODEL METHODS) # ############################################ # ORDERING METHODS ######################### def method_set_order(ordered_obj, self, id_list, using=None): if using is None: using = DEFAULT_DB_ALIAS rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.remote_field.field_name) order_name = ordered_obj._meta.order_with_respect_to.name # FIXME: It would be nice if there was an "update many" version of update # for situations like this. with transaction.atomic(using=using, savepoint=False): for i, j in enumerate(id_list): ordered_obj.objects.filter(**{'pk': j, order_name: rel_val}).update(_order=i) def method_get_order(ordered_obj, self): rel_val = getattr(self, ordered_obj._meta.order_with_respect_to.remote_field.field_name) order_name = ordered_obj._meta.order_with_respect_to.name pk_name = ordered_obj._meta.pk.name return [r[pk_name] for r in ordered_obj.objects.filter(**{order_name: rel_val}).values(pk_name)] ######## # MISC # ######## def simple_class_factory(model, attrs): """ Needed for dynamic classes. """ return model def model_unpickle(model_id, attrs, factory): """ Used to unpickle Model subclasses with deferred fields. """ if isinstance(model_id, tuple): if not apps.ready: apps.populate(settings.INSTALLED_APPS) model = apps.get_model(*model_id) else: # Backwards compat - the model was cached directly in earlier versions. model = model_id cls = factory(model, attrs) return cls.__new__(cls) model_unpickle.__safe_for_unpickle__ = True def unpickle_inner_exception(klass, exception_name): # Get the exception class from the class it is attached to: exception = getattr(klass, exception_name) return exception.__new__(exception)
bsd-3-clause
xtenex/raft
extras/Raft2CaptureProcessor.py
11
9240
# # A urllib2 compatible processor module to generate RAFT capture files # # Copyright (c) 2011 by RAFT Team # # Permission is hereby granted, free of charge, to any person obtaining a copy # of this software and associated documentation files (the "Software"), to deal # in the Software without restriction, including without limitation the rights # to use, copy, modify, merge, publish, distribute, sublicense, and/or sell # copies of the Software, and to permit persons to whom the Software is # furnished to do so, subject to the following conditions: # # The above copyright notice and this permission notice shall be included in # all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR # IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, # FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE # AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER # LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN # THE SOFTWARE # import urllib2, StringIO, os, time, bz2, sys import re, string import threading import cStringIO from urllib2 import urlparse from xml.sax.saxutils import escape, quoteattr class RaftCaptureProcessor(urllib2.BaseHandler): class _wrapper(StringIO.StringIO): def __init__(self, parent, request, response): request = request self.response = response data = parent.write_capture(request, response) StringIO.StringIO.__init__(self, data) def __getattr__(self, name): return getattr(self.response,name) def __init__(self, directory, cut_count = 10000): self.lock = threading.Lock() self.directory = directory self.re_nonprintable = re.compile('[^%s]' % re.escape('0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ!"#$%&\'()*+,-./:;<=>?@[\\]^_`{|}~ \t\n\r')) self.cut_count = cut_count # TODO: add max size as well self.open_file() def open_file(self): now = time.time() self.filename = os.path.join(self.directory, 'RaftCapture-{0}.xml.bz2'.format(int(now*1000))) self.ofhandle = bz2.BZ2File(self.filename, 'wb') self.ofhandle.write('<raft version="1.0">\n') self.write_count = 0 def close(self): self.ofhandle.write('</raft>') self.ofhandle.close() def http_request(self, req): return req def http_response(self, req, response): return RaftCaptureProcessor._wrapper(self, req, response) def https_request(self, req): return req def https_response(self, req, response): return RaftCaptureProcessor._wrapper(self, req, response) def write_capture(self, request, response): acquired = False try: acquired = self.lock.acquire() return self.__write_capture(request, response) finally: if acquired: self.lock.release() def __write_capture(self, request, response): ohandle = cStringIO.StringIO() response_body = '' saved_exception = None try: ohandle.write('<capture>\n') ohandle.write('<request>\n') method = request.get_method() url = request.get_full_url() parsed = urlparse.urlsplit(url) relative_url = parsed.path if parsed.query: relative_url += '?' + parsed.query if parsed.fragment: # TODO: will this ever happen? relative_url += '#' + parsed.fragment ohandle.write('<method>%s</method>\n' % escape(method)) ohandle.write('<url>%s</url>\n' % escape(url)) ohandle.write('<host>%s</host>\n' % escape(request.get_host())) try: # ghetto addr = response.fp._sock.fp._sock.getpeername() if addr: ohandle.write('<hostip>%s</hostip>\n' % escape(addr[0])) except Exception as error: pass ohandle.write('<datetime>%s</datetime>\n' % escape(time.asctime(time.gmtime())+' GMT')) # TODO: can we calculate request time and elapsed? request_headers = '%s %s HTTP/1.1\r\n' % (method, relative_url) # TODO: is there access to the HTTP version? for item in request.header_items(): request_headers += item[0] + ': ' + '\r\n\t'.join(item[1:]) + '\r\n' if self.re_nonprintable.search(request_headers): ohandle.write('<headers encoding="base64">%s</headers>\n' % request_headers.encode('base64')) else: ohandle.write('<headers>%s</headers>\n' % escape(request_headers)) if request.has_data(): request_body = request.get_data() if self.re_nonprintable.search(request_body): ohandle.write('<body encoding="base64">%s</body>\n' % request_body.encode('base64')) else: ohandle.write('<body>%s</body>\n' % escape(request_body)) ohandle.write('</request>\n') ohandle.write('<response>\n') status = int(response.getcode()) ohandle.write('<status>%d</status>\n' % status) headers = response.info() if 'HEAD' == method or status < 200 or status in (204, 304,): response_body = '' else: try: response_body = response.read() except urllib2.IncompleteRead, e: saved_exception = e response_headers = 'HTTP/1.1 %d %s\r\n' % (status, response.msg) # TODO: is there access to the HTTP version? response_headers += ''.join(headers.headers) content_type = headers.getheader('Content-Type') content_length = headers.getheader('Content-Length') if content_type: ohandle.write('<content_type>%s</content_type>\n' % escape(content_type)) if content_length: ohandle.write('<content_length>%d</content_length>\n' % int(content_length)) if self.re_nonprintable.search(response_headers): ohandle.write('<headers encoding="base64">%s</headers>\n' % response_headers.encode('base64')) else: ohandle.write('<headers>%s</headers>\n' % escape(response_headers)) if response_body: if self.re_nonprintable.search(response_body): ohandle.write('<body encoding="base64">%s</body>\n' % response_body.encode('base64')) else: ohandle.write('<body>%s</body>\n' % escape(response_body)) ohandle.write('</response>\n') ohandle.write('</capture>\n') self.ofhandle.write(ohandle.getvalue()) ohandle.close() self.write_count += 1 if 0 == (self.write_count % self.cut_count): self.close() self.open_file() except Exception, e: sys.stderr.write('*** unhandled error in RaftCaptureProcessor: %s\n' % (e)) if saved_exception: raise(saved_exception) return response_body class IgnoreRedirect(urllib2.HTTPRedirectHandler): def http_error_301(self, req, fp, code, msg, hdrs): return fp def http_error_302(self, req, fp, code, msg, hdrs): return fp def http_error_303(self, req, fp, code, msg, hdrs): return fp def http_error_307(self, req, fp, code, msg, hdrs): return fp if '__main__' == __name__: # test and sample code from contextlib import closing if len(sys.argv) == 1: targets = ['www.bing.com'] else: count = 0 targets = [] for line in open(sys.argv[1], 'r'): hostname = line.rstrip() if ',' in hostname: hostname = hostname.split(',',1)[1] targets.append(hostname) count += 1 if count > 10: break with closing(RaftCaptureProcessor('.')) as raftCapture: # proxyHandler = urllib2.ProxyHandler({'http':'localhost:8080', 'https':'localhost:8080'}) opener = urllib2.build_opener(raftCapture, ) for target in targets: url = 'http://'+target+'/' req = urllib2.Request(url) req.add_header('User-agent', 'Mozilla/5.0 (Windows NT 5.1; rv:2.0) Gecko/20100101 Firefox/4.0') try: response = opener.open(req, timeout=5) except urllib2.HTTPError, error: response = error except urllib2.URLError, error: sys.stdout.write('failed on %s: %s\n' % (url, error)) sys.stdout.flush() response = None if False and response: print('%d %s' % (response.getcode(), response.msg)) print(''.join(response.headers.headers)) print(response.read())
gpl-3.0
20tab/django-filer
filer/admin/permissionadmin.py
14
1644
#-*- coding: utf-8 -*- import inspect from django.contrib import admin from filer import settings from filer.fields import folder class PermissionAdmin(admin.ModelAdmin): fieldsets = ( (None, {'fields': (('type', 'folder',))}), (None, {'fields': (('user', 'group', 'everybody'),)}), (None, {'fields': ( ('can_edit', 'can_read', 'can_add_children') )} ), ) raw_id_fields = ('user', 'group',) list_filter = ['user'] list_display = ['__unicode__', 'folder', 'user'] def formfield_for_foreignkey(self, db_field, request, **kwargs): db = kwargs.get('using') if db_field.name == 'folder': if 'admin_site' in inspect.getargspec(folder.AdminFolderWidget.__init__)[0]: # Django 1.4 widget_instance = folder.AdminFolderWidget(db_field.rel, self.admin_site, using=db) else: # Django <= 1.3 widget_instance = folder.AdminFolderWidget(db_field.rel, using=db) kwargs['widget'] = widget_instance return super(PermissionAdmin, self).formfield_for_foreignkey(db_field, request, **kwargs) def get_model_perms(self, request): # don't display the permissions admin if permissions are disabled. # This method is easier for testing than not registering the admin at all at import time enable_permissions = settings.FILER_ENABLE_PERMISSIONS and request.user.has_perm('filer.add_folderpermission') return { 'add': enable_permissions, 'change': enable_permissions, 'delete': enable_permissions, }
bsd-3-clause
pymedusa/SickRage
ext/requests/sessions.py
14
29316
# -*- coding: utf-8 -*- """ requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). """ import os import sys import time from datetime import timedelta from collections import OrderedDict from .auth import _basic_auth_str from .compat import cookielib, is_py3, urljoin, urlparse, Mapping from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from ._internal_utils import to_native_string from .utils import to_key_val_list, default_headers, DEFAULT_PORTS from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter from .utils import ( requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, get_auth_from_url, rewind_body ) from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI # Preferred clock, based on which one is more accurate on a given system. if sys.platform == 'win32': try: # Python 3.4+ preferred_clock = time.perf_counter except AttributeError: # Earlier than Python 3. preferred_clock = time.clock else: preferred_clock = time.time def merge_setting(request_setting, session_setting, dict_class=OrderedDict): """Determines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` """ if session_setting is None: return request_setting if request_setting is None: return session_setting # Bypass if not a dictionary (e.g. verify) if not ( isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) ): return request_setting merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) # Remove keys that are set to None. Extract keys first to avoid altering # the dictionary during iteration. none_keys = [k for (k, v) in merged_setting.items() if v is None] for key in none_keys: del merged_setting[key] return merged_setting def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): """Properly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. """ if session_hooks is None or session_hooks.get('response') == []: return request_hooks if request_hooks is None or request_hooks.get('response') == []: return session_hooks return merge_setting(request_hooks, session_hooks, dict_class) class SessionRedirectMixin(object): def get_redirect_target(self, resp): """Receives a Response. Returns a redirect URI or ``None``""" # Due to the nature of how requests processes redirects this method will # be called at least once upon the original response and at least twice # on each subsequent redirect response (if any). # If a custom mixin is used to handle this logic, it may be advantageous # to cache the redirect location onto the response object as a private # attribute. if resp.is_redirect: location = resp.headers['location'] # Currently the underlying http module on py3 decode headers # in latin1, but empirical evidence suggests that latin1 is very # rarely used with non-ASCII characters in HTTP headers. # It is more likely to get UTF8 header rather than latin1. # This causes incorrect handling of UTF8 encoded location headers. # To solve this, we re-encode the location in latin1. if is_py3: location = location.encode('latin1') return to_native_string(location, 'utf8') return None def should_strip_auth(self, old_url, new_url): """Decide whether Authorization header should be removed when redirecting""" old_parsed = urlparse(old_url) new_parsed = urlparse(new_url) if old_parsed.hostname != new_parsed.hostname: return True # Special case: allow http -> https redirect when using the standard # ports. This isn't specified by RFC 7235, but is kept to avoid # breaking backwards compatibility with older versions of requests # that allowed any redirects on the same host. if (old_parsed.scheme == 'http' and old_parsed.port in (80, None) and new_parsed.scheme == 'https' and new_parsed.port in (443, None)): return False # Handle default port usage corresponding to scheme. changed_port = old_parsed.port != new_parsed.port changed_scheme = old_parsed.scheme != new_parsed.scheme default_port = (DEFAULT_PORTS.get(old_parsed.scheme, None), None) if (not changed_scheme and old_parsed.port in default_port and new_parsed.port in default_port): return False # Standard case: root URI must match return changed_port or changed_scheme def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, yield_requests=False, **adapter_kwargs): """Receives a Response. Returns a generator of Responses or Requests.""" hist = [] # keep track of history url = self.get_redirect_target(resp) previous_fragment = urlparse(req.url).fragment while url: prepared_request = req.copy() # Update history and keep track of redirects. # resp.history must ignore the original request in this loop hist.append(resp) resp.history = hist[1:] try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) if len(resp.history) >= self.max_redirects: raise TooManyRedirects('Exceeded {} redirects.'.format(self.max_redirects), response=resp) # Release the connection back into the pool. resp.close() # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) url = ':'.join([to_native_string(parsed_rurl.scheme), url]) # Normalize url case and attach previous fragment if needed (RFC 7231 7.1.2) parsed = urlparse(url) if parsed.fragment == '' and previous_fragment: parsed = parsed._replace(fragment=previous_fragment) elif parsed.fragment: previous_fragment = parsed.fragment url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) self.rebuild_method(prepared_request, resp) # https://github.com/psf/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): # https://github.com/psf/requests/issues/3490 purged_headers = ('Content-Length', 'Content-Type', 'Transfer-Encoding') for header in purged_headers: prepared_request.headers.pop(header, None) prepared_request.body = None headers = prepared_request.headers headers.pop('Cookie', None) # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared # request, use the old one that we haven't yet touched. extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) merge_cookies(prepared_request._cookies, self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) # Rebuild auth and proxy information. proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) # A failed tell() sets `_body_position` to `object()`. This non-None # value ensures `rewindable` will be True, allowing us to raise an # UnrewindableBodyError, instead of hanging the connection. rewindable = ( prepared_request._body_position is not None and ('Content-Length' in headers or 'Transfer-Encoding' in headers) ) # Attempt to rewind consumed file-like object. if rewindable: rewind_body(prepared_request) # Override the original request. req = prepared_request if yield_requests: yield req else: resp = self.send( req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, allow_redirects=False, **adapter_kwargs ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) # extract redirect url, if any, for the next loop url = self.get_redirect_target(resp) yield resp def rebuild_auth(self, prepared_request, response): """When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """ headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers and self.should_strip_auth(response.request.url, url): # If we get redirected to a new host, we should strip out any # authentication headers. del headers['Authorization'] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) def rebuild_proxies(self, prepared_request, proxies): """This method re-evaluates the proxy configuration by considering the environment variables. If we are redirected to a URL covered by NO_PROXY, we strip the proxy configuration. Otherwise, we set missing proxy keys for this URL (in case they were stripped by a previous redirect). This method also replaces the Proxy-Authorization header where necessary. :rtype: dict """ proxies = proxies if proxies is not None else {} headers = prepared_request.headers url = prepared_request.url scheme = urlparse(url).scheme new_proxies = proxies.copy() no_proxy = proxies.get('no_proxy') bypass_proxy = should_bypass_proxies(url, no_proxy=no_proxy) if self.trust_env and not bypass_proxy: environ_proxies = get_environ_proxies(url, no_proxy=no_proxy) proxy = environ_proxies.get(scheme, environ_proxies.get('all')) if proxy: new_proxies.setdefault(scheme, proxy) if 'Proxy-Authorization' in headers: del headers['Proxy-Authorization'] try: username, password = get_auth_from_url(new_proxies[scheme]) except KeyError: username, password = None, None if username and password: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return new_proxies def rebuild_method(self, prepared_request, response): """When being redirected we may want to change the method of the request based on certain specs or browser behavior. """ method = prepared_request.method # https://tools.ietf.org/html/rfc7231#section-6.4.4 if response.status_code == codes.see_other and method != 'HEAD': method = 'GET' # Do what the browsers do, despite standards... # First, turn 302s into GETs. if response.status_code == codes.found and method != 'HEAD': method = 'GET' # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. if response.status_code == codes.moved and method == 'POST': method = 'GET' prepared_request.method = method class Session(SessionRedirectMixin): """A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('https://httpbin.org/get') <Response [200]> Or as a context manager:: >>> with requests.Session() as s: ... s.get('https://httpbin.org/get') <Response [200]> """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', 'cert', 'adapters', 'stream', 'trust_env', 'max_redirects', ] def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol or protocol and host to the URL of the proxy #: (e.g. {'http': 'foo.bar:3128', 'http://host.name': 'foo.bar:4012'}) to #: be used on each :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. self.verify = True #: SSL client certificate default, if String, path to ssl client #: cert file (.pem). If Tuple, ('cert', 'key') pair. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. #: This defaults to requests.models.DEFAULT_REDIRECT_LIMIT, which is #: 30. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Trust environment settings for proxy configuration, default #: authentication and similar. self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) def __enter__(self): return self def __exit__(self, *args): self.close() def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. :rtype: requests.PreparedRequest """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a :ref:`(connect timeout, read timeout) <timeouts>` tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol or protocol and hostname to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) Either a boolean, in which case it controls whether we verify the server's TLS certificate, or a string, in which case it must be a path to a CA bundle to use. Defaults to ``True``. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :rtype: requests.Response """ # Create the Request. req = Request( method=method.upper(), url=url, headers=headers, files=files, data=data or {}, json=json, params=params or {}, auth=auth, cookies=cookies, hooks=hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { 'timeout': timeout, 'allow_redirects': allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp def get(self, url, **kwargs): r"""Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('GET', url, **kwargs) def options(self, url, **kwargs): r"""Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', True) return self.request('OPTIONS', url, **kwargs) def head(self, url, **kwargs): r"""Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) def post(self, url, data=None, json=None, **kwargs): r"""Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): r"""Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('PUT', url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): r"""Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, list of tuples, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('PATCH', url, data=data, **kwargs) def delete(self, url, **kwargs): r"""Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. :rtype: requests.Response """ return self.request('DELETE', url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest. :rtype: requests.Response """ # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if isinstance(request, Request): raise ValueError('You can only send PreparedRequests.') # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') hooks = request.hooks # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request start = preferred_clock() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) elapsed = preferred_clock() - start r.elapsed = timedelta(seconds=elapsed) # Response manipulation hooks r = dispatch_hook('response', hooks, r, **kwargs) # Persist cookies if r.history: # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) # Resolve redirects if allowed. if allow_redirects: # Redirect resolving generator. gen = self.resolve_redirects(r, request, **kwargs) history = [resp for resp in gen] else: history = [] # Shuffle things around if there's history. if history: # Insert the first (original) request at the start history.insert(0, r) # Get the last request made r = history.pop() r.history = history # If redirects aren't being followed, store the response on the Request for Response.next(). if not allow_redirects: try: r._next = next(self.resolve_redirects(r, request, yield_requests=True, **kwargs)) except StopIteration: pass if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): """ Check the environment and merge it with some settings. :rtype: dict """ # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. no_proxy = proxies.get('no_proxy') if proxies is not None else None env_proxies = get_environ_proxies(url, no_proxy=no_proxy) for (k, v) in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration and be compatible # with cURL. if verify is True or verify is None: verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {'verify': verify, 'proxies': proxies, 'stream': stream, 'cert': cert} def get_adapter(self, url): """ Returns the appropriate connection adapter for the given URL. :rtype: requests.adapters.BaseAdapter """ for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix.lower()): return adapter # Nothing matches :-/ raise InvalidSchema("No connection adapters were found for {!r}".format(url)) def close(self): """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by prefix length. """ self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) def __getstate__(self): state = {attr: getattr(self, attr, None) for attr in self.__attrs__} return state def __setstate__(self, state): for attr, value in state.items(): setattr(self, attr, value) def session(): """ Returns a :class:`Session` for context-management. .. deprecated:: 1.0.0 This method has been deprecated since version 1.0.0 and is only kept for backwards compatibility. New code should use :class:`~requests.sessions.Session` to create a session. This may be removed at a future date. :rtype: Session """ return Session()
gpl-3.0
mjkoster/HypermediaToolkit
MachineHypermediaToolkit/resource/HypermediaCollection.py
3
7316
""" Hypermedia Collection extends the Hypermedia Resource class by adding hypermedia based URI routing and senml modeling of items and subresources. Routing uses link relations rel=grp, rel=sub and rel=item routing is done by identifying subresources that match the next uri segment to be routed until the last segment in the request uri is identified for resource selection. Each collection routes requests in a self contained way, using only knowledge of it's own uri path and it's direct subresources. group links labeled grp will be processed by forwarding the request to the href uri of the group link items are senml modeled values in the local context of the collection, stored in an array of objects. items are processed locally using the SenmlItems class in the same way an instance of the Links class is used to store the collection's links. subresources are sub-collections in the context of the collection. requests to subresources are routed to the subresources selected items and subresources are selected by either matching the resource uri with the request uri, or matching the collection uri with the request uri and seleting the resource(s) using query filtering on the link attributes, or by selecting the collection uri and matching resource names with senml names "n" in the update body. routing and group forwarding is done in this class, and resource processing e.g. GET, POST, is done in the respective Content Handlers. resource endpoints are subresource collections with a single item, the item being referenced using the name of the collection e.g. /a/b/c is the URI of a resource endpoint. The resource c is a collection with a single item that is referenced by the base name and returns representations like this: senml { "bn": "/a/b/c", "e": {"sv": "test"} } collection+senml { "l": {"href": "", "rel": "item"}, "bn": "/a/b/c", "e": {"sv": "test"} } Items and subresources are created by POSTing representations containing items and optionally links in the senml+collection content-format. THe location of the created resource will consist of the collection uri and the resource name specified in the "href" link attribute or the "n" attribute of the corresponding senml element. Links that fail to select a resource are returned with a status code of 404 Not Found """ import MachineHypermediaToolkit.terms as v from HypermediaResource import HypermediaResource from copy import deepcopy from Items import SenmlItems from PlainTextHandler import PlainTextHandler from SenmlHandler import SenmlHandler from SenmlCollectionHandler import SenmlCollectionHandler class HypermediaCollection(HypermediaResource): def __init__(self, rootResource=None, uriPath=["/"], resourceLink=None, resourceItem=None ): HypermediaResource.__init__(self) self._uriPath = uriPath self._pathString = "/" for pathElement in uriPath[1:]: self._pathString += (pathElement + "/") self._pathLen = len(self._uriPath) if ["/"] == uriPath : self._rootResource = self else: self._rootResource = rootResource self._unrouted = 0 self._itemArray = SenmlItems() self._subresources = {} """ merge the constructor rt link attribute values into the self link """ if None != resourceLink: if v._rt in resourceLink: self._linkArray.selectMerge({v._rel:v._self}, {v._rt: resourceLink[v._rt]}) """ if there is an item in the constructor, null the resource name and add it to items """ if None != resourceItem : resourceItem[v._n] = v._null self._itemArray.add(resourceItem) self._linkArray.selectMerge({v._href:v._null},{ v._rel: v._item}) PlainTextHandler(self) SenmlHandler(self) SenmlCollectionHandler(self) """ Route requests using hyperlinks. Link relations "item" and "sub" are used to identify local items in the collection and sub resources, respectively.""" def routeRequest(self, request): self._request = request self._unrouted = len(request[v.uriPath]) - self._pathLen if 0 == self._unrouted: """ this resource is selected, process content-format """ self._processGroup(self._request) self.handleRequest(self._request) else: self._resourceName = self._request[v.uriPath][self._pathLen] # if there is both sub and item, route sub and ignore item if [] != self._linkArray.get({v._href:self._resourceName, v._rel:v._sub}): """ route request to subresource item""" self._subresources[self._resourceName].routeRequest(self._request) elif 1 == self._unrouted and [] != self._linkArray.get({v._href:self._resourceName, v._rel:v._item}) : """ item in the local collection is selected, handle content-format in this context""" self.handleRequest(self._request) else: """ nothing to route or process """ self._request[v.response][v.status] = v.NotFound def _processGroup(self, request): """invoke a proxy or promise to forward the request to each resource marked with rel=grp """ self._groupLinks = self._linkArray.get({v._rel:v._grp}) if [] != self._groupLinks: request[v.response][v.payload] = [] request[v.response][v.code] = [] """ make request instances """ self._requests = [] for self._link in self._groupLinks: print "group link: ", self._link self._requests.append( deepcopy(request) ) for self._request in self._requests: """ overwrite uriPath """ self._request[v.uriPath] = ["/"] for self._pathElement in self._groupLinks.popleft()[v._href].split("/"): if len(self._pathElement) > 0: self._request[v.uriPath].append(self._pathElement) """ route request to root resource if path starts with / """ if "/" == self._request[v.uriPath][0]: self._rootResource.routeRequest(self._request) else: self.routeRequest(self._request) """ collect the results """ for self._request in self._requests: request[v.response][v.payload].append(self._request[v.response][v.payload]) if v.Success != self._request[v.response][v.status] and \ v.Created != self._request[v.response][v.status]: request[v.response][v.status] = v.BadRequest request[v.response][v.code].append(self._request[v.response][v.code]) return self._requests else: return None def _createSubresource(self, resourceLink, resourceItem=None): resourceName = resourceLink[v._href] self._subresources[resourceName] = \ HypermediaCollection( self._rootResource, self._uriPath + [resourceName], resourceLink, resourceItem) return self._subresources[resourceName]
apache-2.0
DShokes/ArcREST
src/arcrest/packages/ntlm3/HTTPNtlmAuthHandler.py
6
6364
# This library is free software: you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation, either # version 3 of the License, or (at your option) any later version. # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # You should have received a copy of the GNU Lesser General Public # License along with this library. If not, see <http://www.gnu.org/licenses/> or <http://www.gnu.org/licenses/lgpl.txt>. from ..six.moves import urllib from ..six.moves.http_client import HTTPConnection, HTTPSConnection import socket import re from . import ntlm class AbstractNtlmAuthHandler: def __init__(self, password_mgr=None, debuglevel=0): if password_mgr is None: password_mgr = urllib.request.HTTPPasswordMgr() self.passwd = password_mgr self.add_password = self.passwd.add_password self._debuglevel = debuglevel def set_http_debuglevel(self, level): self._debuglevel = level def http_error_authentication_required(self, auth_header_field, req, fp, headers): auth_header_value = headers.get(auth_header_field, None) if auth_header_field: if auth_header_value is not None and 'ntlm' in auth_header_value.lower(): fp.close() return self.retry_using_http_NTLM_auth(req, auth_header_field, None, headers) def retry_using_http_NTLM_auth(self, req, auth_header_field, realm, headers): user, pw = self.passwd.find_user_password(realm, req.get_full_url()) if pw is not None: user_parts = user.split('\\', 1) if len(user_parts) == 1: UserName = user_parts[0] DomainName = '' type1_flags = ntlm.NTLM_TYPE1_FLAGS & ~ntlm.NTLM_NegotiateOemDomainSupplied else: DomainName = user_parts[0].upper() UserName = user_parts[1] type1_flags = ntlm.NTLM_TYPE1_FLAGS # ntlm secures a socket, so we must use the same socket for the complete handshake headers = dict(req.headers) headers.update(req.unredirected_hdrs) auth = 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(user, type1_flags) if req.headers.get(self.auth_header, None) == auth: return None headers[self.auth_header] = auth host = req.host if not host: raise urllib.request.URLError('no host given') h = None if req.get_full_url().startswith('https://'): h = HTTPSConnection(host) # will parse host:port else: h = HTTPConnection(host) # will parse host:port h.set_debuglevel(self._debuglevel) # we must keep the connection because NTLM authenticates the connection, not single requests headers["Connection"] = "Keep-Alive" headers = dict((name.title(), val) for name, val in headers.items()) # For some reason, six doesn't do this translation correctly # TODO rsanders low - find bug in six & fix it try: selector = req.selector except AttributeError: selector = req.get_selector() h.request(req.get_method(), selector, req.data, headers) r = h.getresponse() r.begin() r._safe_read(int(r.getheader('content-length'))) if r.getheader('set-cookie'): # this is important for some web applications that store authentication-related info in cookies (it took a long time to figure out) headers['Cookie'] = r.getheader('set-cookie') r.fp = None # remove the reference to the socket, so that it can not be closed by the response object (we want to keep the socket open) auth_header_value = r.getheader(auth_header_field, None) # some Exchange servers send two WWW-Authenticate headers, one with the NTLM challenge # and another with the 'Negotiate' keyword - make sure we operate on the right one m = re.match('(NTLM [A-Za-z0-9+\-/=]+)', auth_header_value) if m: auth_header_value, = m.groups() (ServerChallenge, NegotiateFlags) = ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value[5:]) auth = 'NTLM %s' % ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge, UserName, DomainName, pw, NegotiateFlags) headers[self.auth_header] = auth headers["Connection"] = "Close" headers = dict((name.title(), val) for name, val in headers.items()) try: h.request(req.get_method(), selector, req.data, headers) # none of the configured handlers are triggered, for example redirect-responses are not handled! response = h.getresponse() def notimplemented(): raise NotImplementedError response.readline = notimplemented infourl = urllib.response.addinfourl(response, response.msg, req.get_full_url()) infourl.code = response.status infourl.msg = response.reason return infourl except socket.error as err: raise urllib.URLError(err) else: return None class HTTPNtlmAuthHandler(AbstractNtlmAuthHandler, urllib.request.BaseHandler): auth_header = 'Authorization' def http_error_401(self, req, fp, code, msg, headers): return self.http_error_authentication_required('www-authenticate', req, fp, headers) class ProxyNtlmAuthHandler(AbstractNtlmAuthHandler, urllib.request.BaseHandler): """ CAUTION: this class has NOT been tested at all!!! use at your own risk """ auth_header = 'Proxy-authorization' def http_error_407(self, req, fp, code, msg, headers): return self.http_error_authentication_required('proxy-authenticate', req, fp, headers)
apache-2.0
aiyyoi/DevFest-MaxBond
MaxBond/env/lib/python2.7/site-packages/pip/_vendor/requests/cookies.py
821
16686
# -*- coding: utf-8 -*- """ Compatibility code to be able to use `cookielib.CookieJar` with requests. requests.utils imports from here, so be careful with imports. """ import time import collections from .compat import cookielib, urlparse, urlunparse, Morsel try: import threading # grr, pyflakes: this fixes "redefinition of unused 'threading'" threading except ImportError: import dummy_threading as threading class MockRequest(object): """Wraps a `requests.Request` to mimic a `urllib2.Request`. The code in `cookielib.CookieJar` expects this interface in order to correctly manage cookie policies, i.e., determine whether a cookie can be set, given the domains of the request and the cookie. The original request object is read-only. The client is responsible for collecting the new headers via `get_new_headers()` and interpreting them appropriately. You probably want `get_cookie_header`, defined below. """ def __init__(self, request): self._r = request self._new_headers = {} self.type = urlparse(self._r.url).scheme def get_type(self): return self.type def get_host(self): return urlparse(self._r.url).netloc def get_origin_req_host(self): return self.get_host() def get_full_url(self): # Only return the response's URL if the user hadn't set the Host # header if not self._r.headers.get('Host'): return self._r.url # If they did set it, retrieve it and reconstruct the expected domain host = self._r.headers['Host'] parsed = urlparse(self._r.url) # Reconstruct the URL as we expect it return urlunparse([ parsed.scheme, host, parsed.path, parsed.params, parsed.query, parsed.fragment ]) def is_unverifiable(self): return True def has_header(self, name): return name in self._r.headers or name in self._new_headers def get_header(self, name, default=None): return self._r.headers.get(name, self._new_headers.get(name, default)) def add_header(self, key, val): """cookielib has no legitimate use for this method; add it back if you find one.""" raise NotImplementedError("Cookie headers should be added with add_unredirected_header()") def add_unredirected_header(self, name, value): self._new_headers[name] = value def get_new_headers(self): return self._new_headers @property def unverifiable(self): return self.is_unverifiable() @property def origin_req_host(self): return self.get_origin_req_host() @property def host(self): return self.get_host() class MockResponse(object): """Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`. ...what? Basically, expose the parsed HTTP headers from the server response the way `cookielib` expects to see them. """ def __init__(self, headers): """Make a MockResponse for `cookielib` to read. :param headers: a httplib.HTTPMessage or analogous carrying the headers """ self._headers = headers def info(self): return self._headers def getheaders(self, name): self._headers.getheaders(name) def extract_cookies_to_jar(jar, request, response): """Extract the cookies from the response into a CookieJar. :param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar) :param request: our own requests.Request object :param response: urllib3.HTTPResponse object """ if not (hasattr(response, '_original_response') and response._original_response): return # the _original_response field is the wrapped httplib.HTTPResponse object, req = MockRequest(request) # pull out the HTTPMessage with the headers and put it in the mock: res = MockResponse(response._original_response.msg) jar.extract_cookies(res, req) def get_cookie_header(jar, request): """Produce an appropriate Cookie header string to be sent with `request`, or None.""" r = MockRequest(request) jar.add_cookie_header(r) return r.get_new_headers().get('Cookie') def remove_cookie_by_name(cookiejar, name, domain=None, path=None): """Unsets a cookie by name, by default over all domains and paths. Wraps CookieJar.clear(), is O(n). """ clearables = [] for cookie in cookiejar: if cookie.name == name: if domain is None or domain == cookie.domain: if path is None or path == cookie.path: clearables.append((cookie.domain, cookie.path, cookie.name)) for domain, path, name in clearables: cookiejar.clear(domain, path, name) class CookieConflictError(RuntimeError): """There are two cookies that meet the criteria specified in the cookie jar. Use .get and .set and include domain and path args in order to be more specific.""" class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): """Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. This is the CookieJar we create by default for requests and sessions that don't specify one, since some clients may expect response.cookies and session.cookies to support dict operations. Don't use the dict interface internally; it's just for compatibility with with external client code. All `requests` code should work out of the box with externally provided instances of CookieJar, e.g., LWPCookieJar and FileCookieJar. Caution: dictionary operations that are normally O(1) may be O(n). Unlike a regular CookieJar, this class is pickleable. """ def get(self, name, default=None, domain=None, path=None): """Dict-like get() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains. Caution: operation is O(n), not O(1).""" try: return self._find_no_duplicates(name, domain, path) except KeyError: return default def set(self, name, value, **kwargs): """Dict-like set() that also supports optional domain and path args in order to resolve naming collisions from using one cookie jar over multiple domains.""" # support client code that unsets cookies by assignment of a None value: if value is None: remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path')) return if isinstance(value, Morsel): c = morsel_to_cookie(value) else: c = create_cookie(name, value, **kwargs) self.set_cookie(c) return c def iterkeys(self): """Dict-like iterkeys() that returns an iterator of names of cookies from the jar. See itervalues() and iteritems().""" for cookie in iter(self): yield cookie.name def keys(self): """Dict-like keys() that returns a list of names of cookies from the jar. See values() and items().""" return list(self.iterkeys()) def itervalues(self): """Dict-like itervalues() that returns an iterator of values of cookies from the jar. See iterkeys() and iteritems().""" for cookie in iter(self): yield cookie.value def values(self): """Dict-like values() that returns a list of values of cookies from the jar. See keys() and items().""" return list(self.itervalues()) def iteritems(self): """Dict-like iteritems() that returns an iterator of name-value tuples from the jar. See iterkeys() and itervalues().""" for cookie in iter(self): yield cookie.name, cookie.value def items(self): """Dict-like items() that returns a list of name-value tuples from the jar. See keys() and values(). Allows client-code to call "dict(RequestsCookieJar) and get a vanilla python dict of key value pairs.""" return list(self.iteritems()) def list_domains(self): """Utility method to list all the domains in the jar.""" domains = [] for cookie in iter(self): if cookie.domain not in domains: domains.append(cookie.domain) return domains def list_paths(self): """Utility method to list all the paths in the jar.""" paths = [] for cookie in iter(self): if cookie.path not in paths: paths.append(cookie.path) return paths def multiple_domains(self): """Returns True if there are multiple domains in the jar. Returns False otherwise.""" domains = [] for cookie in iter(self): if cookie.domain is not None and cookie.domain in domains: return True domains.append(cookie.domain) return False # there is only one domain in jar def get_dict(self, domain=None, path=None): """Takes as an argument an optional domain and path and returns a plain old Python dict of name-value pairs of cookies that meet the requirements.""" dictionary = {} for cookie in iter(self): if (domain is None or cookie.domain == domain) and (path is None or cookie.path == path): dictionary[cookie.name] = cookie.value return dictionary def __getitem__(self, name): """Dict-like __getitem__() for compatibility with client code. Throws exception if there are more than one cookie with name. In that case, use the more explicit get() method instead. Caution: operation is O(n), not O(1).""" return self._find_no_duplicates(name) def __setitem__(self, name, value): """Dict-like __setitem__ for compatibility with client code. Throws exception if there is already a cookie of that name in the jar. In that case, use the more explicit set() method instead.""" self.set(name, value) def __delitem__(self, name): """Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name().""" remove_cookie_by_name(self, name) def set_cookie(self, cookie, *args, **kwargs): if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'): cookie.value = cookie.value.replace('\\"', '') return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs) def update(self, other): """Updates this jar with cookies from another CookieJar or dict-like""" if isinstance(other, cookielib.CookieJar): for cookie in other: self.set_cookie(cookie) else: super(RequestsCookieJar, self).update(other) def _find(self, name, domain=None, path=None): """Requests uses this method internally to get cookie values. Takes as args name and optional domain and path. Returns a cookie.value. If there are conflicting cookies, _find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown if there are conflicting cookies.""" for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: return cookie.value raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def _find_no_duplicates(self, name, domain=None, path=None): """__get_item__ and get call _find_no_duplicates -- never used in Requests internally. Takes as args name and optional domain and path. Returns a cookie.value. Throws KeyError if cookie is not found and CookieConflictError if there are multiple cookies that match name and optionally domain and path.""" toReturn = None for cookie in iter(self): if cookie.name == name: if domain is None or cookie.domain == domain: if path is None or cookie.path == path: if toReturn is not None: # if there are multiple cookies that meet passed in criteria raise CookieConflictError('There are multiple cookies with name, %r' % (name)) toReturn = cookie.value # we will eventually return this as long as no cookie conflict if toReturn: return toReturn raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) def __getstate__(self): """Unlike a normal CookieJar, this class is pickleable.""" state = self.__dict__.copy() # remove the unpickleable RLock object state.pop('_cookies_lock') return state def __setstate__(self, state): """Unlike a normal CookieJar, this class is pickleable.""" self.__dict__.update(state) if '_cookies_lock' not in self.__dict__: self._cookies_lock = threading.RLock() def copy(self): """Return a copy of this RequestsCookieJar.""" new_cj = RequestsCookieJar() new_cj.update(self) return new_cj def create_cookie(name, value, **kwargs): """Make a cookie from underspecified parameters. By default, the pair of `name` and `value` will be set for the domain '' and sent on every request (this is sometimes called a "supercookie"). """ result = dict( version=0, name=name, value=value, port=None, domain='', path='/', secure=False, expires=None, discard=True, comment=None, comment_url=None, rest={'HttpOnly': None}, rfc2109=False,) badargs = set(kwargs) - set(result) if badargs: err = 'create_cookie() got unexpected keyword arguments: %s' raise TypeError(err % list(badargs)) result.update(kwargs) result['port_specified'] = bool(result['port']) result['domain_specified'] = bool(result['domain']) result['domain_initial_dot'] = result['domain'].startswith('.') result['path_specified'] = bool(result['path']) return cookielib.Cookie(**result) def morsel_to_cookie(morsel): """Convert a Morsel object into a Cookie containing the one k/v pair.""" expires = None if morsel['max-age']: expires = time.time() + morsel['max-age'] elif morsel['expires']: time_template = '%a, %d-%b-%Y %H:%M:%S GMT' expires = time.mktime( time.strptime(morsel['expires'], time_template)) - time.timezone return create_cookie( comment=morsel['comment'], comment_url=bool(morsel['comment']), discard=False, domain=morsel['domain'], expires=expires, name=morsel.key, path=morsel['path'], port=None, rest={'HttpOnly': morsel['httponly']}, rfc2109=False, secure=bool(morsel['secure']), value=morsel.value, version=morsel['version'] or 0, ) def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True): """Returns a CookieJar from a key/value dictionary. :param cookie_dict: Dict of key/values to insert into CookieJar. :param cookiejar: (optional) A cookiejar to add the cookies to. :param overwrite: (optional) If False, will not replace cookies already in the jar with new ones. """ if cookiejar is None: cookiejar = RequestsCookieJar() if cookie_dict is not None: names_from_jar = [cookie.name for cookie in cookiejar] for name in cookie_dict: if overwrite or (name not in names_from_jar): cookiejar.set_cookie(create_cookie(name, cookie_dict[name])) return cookiejar def merge_cookies(cookiejar, cookies): """Add cookies to cookiejar and returns a merged CookieJar. :param cookiejar: CookieJar object to add the cookies to. :param cookies: Dictionary or CookieJar object to be added. """ if not isinstance(cookiejar, cookielib.CookieJar): raise ValueError('You can only merge into CookieJar') if isinstance(cookies, dict): cookiejar = cookiejar_from_dict( cookies, cookiejar=cookiejar, overwrite=False) elif isinstance(cookies, cookielib.CookieJar): try: cookiejar.update(cookies) except AttributeError: for cookie_in_jar in cookies: cookiejar.set_cookie(cookie_in_jar) return cookiejar
mit
JeoffScott/dyplom
vendor/doctrine/orm/docs/en/conf.py
2448
6497
# -*- coding: utf-8 -*- # # Doctrine 2 ORM documentation build configuration file, created by # sphinx-quickstart on Fri Dec 3 18:10:24 2010. # # This file is execfile()d with the current directory set to its containing dir. # # Note that not all possible configuration values are present in this # autogenerated file. # # All configuration values have a default; values that are commented out # serve to show the default. import sys, os # If extensions (or modules to document with autodoc) are in another directory, # add these directories to sys.path here. If the directory is relative to the # documentation root, use os.path.abspath to make it absolute, like shown here. sys.path.append(os.path.abspath('_exts')) # -- General configuration ----------------------------------------------------- # Add any Sphinx extension module names here, as strings. They can be extensions # coming with Sphinx (named 'sphinx.ext.*') or your custom ones. extensions = ['configurationblock'] # Add any paths that contain templates here, relative to this directory. templates_path = ['_templates'] # The suffix of source filenames. source_suffix = '.rst' # The encoding of source files. #source_encoding = 'utf-8' # The master toctree document. master_doc = 'index' # General information about the project. project = u'Doctrine 2 ORM' copyright = u'2010-12, Doctrine Project Team' # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the # built documents. # # The short X.Y version. version = '2' # The full version, including alpha/beta/rc tags. release = '2' # The language for content autogenerated by Sphinx. Refer to documentation # for a list of supported languages. language = 'en' # There are two options for replacing |today|: either, you set today to some # non-false value, then it is used: #today = '' # Else, today_fmt is used as the format for a strftime call. #today_fmt = '%B %d, %Y' # List of documents that shouldn't be included in the build. #unused_docs = [] # List of directories, relative to source directory, that shouldn't be searched # for source files. exclude_trees = ['_build'] # The reST default role (used for this markup: `text`) to use for all documents. #default_role = None # If true, '()' will be appended to :func: etc. cross-reference text. #add_function_parentheses = True # If true, the current module name will be prepended to all description # unit titles (such as .. function::). #add_module_names = True # If true, sectionauthor and moduleauthor directives will be shown in the # output. They are ignored by default. show_authors = True # The name of the Pygments (syntax highlighting) style to use. pygments_style = 'sphinx' # A list of ignored prefixes for module index sorting. #modindex_common_prefix = [] # -- Options for HTML output --------------------------------------------------- # The theme to use for HTML and HTML Help pages. Major themes that come with # Sphinx are currently 'default' and 'sphinxdoc'. html_theme = 'doctrine' # Theme options are theme-specific and customize the look and feel of a theme # further. For a list of options available for each theme, see the # documentation. #html_theme_options = {} # Add any paths that contain custom themes here, relative to this directory. html_theme_path = ['_theme'] # The name for this set of Sphinx documents. If None, it defaults to # "<project> v<release> documentation". #html_title = None # A shorter title for the navigation bar. Default is the same as html_title. #html_short_title = None # The name of an image file (relative to this directory) to place at the top # of the sidebar. #html_logo = None # The name of an image file (within the static path) to use as favicon of the # docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32 # pixels large. #html_favicon = None # Add any paths that contain custom static files (such as style sheets) here, # relative to this directory. They are copied after the builtin static files, # so a file named "default.css" will overwrite the builtin "default.css". html_static_path = ['_static'] # If not '', a 'Last updated on:' timestamp is inserted at every page bottom, # using the given strftime format. #html_last_updated_fmt = '%b %d, %Y' # If true, SmartyPants will be used to convert quotes and dashes to # typographically correct entities. #html_use_smartypants = True # Custom sidebar templates, maps document names to template names. #html_sidebars = {} # Additional templates that should be rendered to pages, maps page names to # template names. #html_additional_pages = {} # If false, no module index is generated. #html_use_modindex = True # If false, no index is generated. #html_use_index = True # If true, the index is split into individual pages for each letter. #html_split_index = False # If true, links to the reST sources are added to the pages. #html_show_sourcelink = True # If true, an OpenSearch description file will be output, and all pages will # contain a <link> tag referring to it. The value of this option must be the # base URL from which the finished HTML is served. #html_use_opensearch = '' # If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml"). #html_file_suffix = '' # Output file base name for HTML help builder. htmlhelp_basename = 'Doctrine2ORMdoc' # -- Options for LaTeX output -------------------------------------------------- # The paper size ('letter' or 'a4'). #latex_paper_size = 'letter' # The font size ('10pt', '11pt' or '12pt'). #latex_font_size = '10pt' # Grouping the document tree into LaTeX files. List of tuples # (source start file, target name, title, author, documentclass [howto/manual]). latex_documents = [ ('index', 'Doctrine2ORM.tex', u'Doctrine 2 ORM Documentation', u'Doctrine Project Team', 'manual'), ] # The name of an image file (relative to this directory) to place at the top of # the title page. #latex_logo = None # For "manual" documents, if this is true, then toplevel headings are parts, # not chapters. #latex_use_parts = False # Additional stuff for the LaTeX preamble. #latex_preamble = '' # Documents to append as an appendix to all manuals. #latex_appendices = [] # If false, no module index is generated. #latex_use_modindex = True primary_domain = "dcorm" def linkcode_resolve(domain, info): if domain == 'dcorm': return 'http://' return None
bsd-3-clause
390910131/Misago
misago/conf/dbsettings.py
8
3143
from misago.core import threadstore CACHE_KEY = 'misago_db_settings' class DBSettings(object): def __init__(self): self._settings = self._read_cache() self._overrides = {} def _read_cache(self): from misago.core.cache import cache data = cache.get(CACHE_KEY, 'nada') if data == 'nada': data = self._read_db() cache.set(CACHE_KEY, data) return data def _read_db(self): from misago.conf.models import Setting data = {} for setting in Setting.objects.iterator(): if setting.is_lazy: data[setting.setting] = { 'value': True if setting.value else None, 'is_lazy': setting.is_lazy, 'is_public': setting.is_public, } else: data[setting.setting] = { 'value': setting.value, 'is_lazy': setting.is_lazy, 'is_public': setting.is_public, } return data def get_public_settings(self): public_settings = {} for name, setting in self._settings.items(): if setting['is_public']: public_settings[name] = setting['value'] return public_settings def get_lazy_setting(self, setting): from misago.conf.models import Setting try: if self._settings[setting]['is_lazy']: if not self._settings[setting].get('real_value'): real_value = Setting.objects.get(setting=setting).value self._settings[setting]['real_value'] = real_value return self._settings[setting]['real_value'] else: raise ValueError("Setting %s is not lazy" % setting) except (KeyError, Setting.DoesNotExist): raise AttributeError("Setting %s is undefined" % setting) def flush_cache(self): from misago.core.cache import cache cache.delete(CACHE_KEY) def __getattr__(self, attr): try: return self._settings[attr]['value'] except KeyError: raise AttributeError("Setting %s is undefined" % attr) def override_setting(self, setting, new_value): if not setting in self._overrides: self._overrides[setting] = self._settings[setting]['value'] self._settings[setting]['value'] = new_value self._settings[setting]['real_value'] = new_value return new_value def reset_settings(self): for setting, original_value in self._overrides.items(): self._settings[setting]['value'] = original_value self._settings[setting].pop('real_value', None) class _DBSettingsGateway(object): def get_db_settings(self): dbsettings = threadstore.get(CACHE_KEY) if not dbsettings: dbsettings = DBSettings() threadstore.set(CACHE_KEY, dbsettings) return dbsettings def __getattr__(self, attr): return getattr(self.get_db_settings(), attr) db_settings = _DBSettingsGateway()
gpl-2.0
CiscoSystems/vespa
neutron/plugins/nicira/extensions/nvp_networkgw.py
9
6480
# vim: tabstop=4 shiftwidth=4 softtabstop=4 # # Copyright 2013 VMware. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # # @author: Salvatore Orlando, VMware from abc import abstractmethod from oslo.config import cfg from neutron.api import extensions from neutron.api.v2 import attributes from neutron.api.v2 import base from neutron import manager from neutron import quota RESOURCE_NAME = "network_gateway" # Use dash for alias and collection name EXT_ALIAS = RESOURCE_NAME.replace('_', '-') COLLECTION_NAME = "%ss" % EXT_ALIAS DEVICE_ID_ATTR = 'id' IFACE_NAME_ATTR = 'interface_name' # Attribute Map for Network Gateway Resource # TODO(salvatore-orlando): add admin state as other neutron resources RESOURCE_ATTRIBUTE_MAP = { COLLECTION_NAME: { 'id': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'name': {'allow_post': True, 'allow_put': True, 'validate': {'type:string': None}, 'is_visible': True, 'default': ''}, 'default': {'allow_post': False, 'allow_put': False, 'is_visible': True}, 'devices': {'allow_post': True, 'allow_put': False, 'validate': {'type:device_list': None}, 'is_visible': True}, 'ports': {'allow_post': False, 'allow_put': False, 'default': [], 'is_visible': True}, 'tenant_id': {'allow_post': True, 'allow_put': False, 'validate': {'type:string': None}, 'required_by_policy': True, 'is_visible': True} } } def _validate_device_list(data, valid_values=None): """Validate the list of service definitions.""" if not data: # Devices must be provided msg = _("Cannot create a gateway with an empty device list") return msg try: for device in data: key_specs = {DEVICE_ID_ATTR: {'type:regex': attributes.UUID_PATTERN, 'required': True}, IFACE_NAME_ATTR: {'type:string': None, 'required': False}} err_msg = attributes._validate_dict( device, key_specs=key_specs) if err_msg: return err_msg unexpected_keys = [key for key in device if key not in key_specs] if unexpected_keys: err_msg = (_("Unexpected keys found in device description:%s") % ",".join(unexpected_keys)) return err_msg except TypeError: return (_("%s: provided data are not iterable") % _validate_device_list.__name__) nw_gw_quota_opts = [ cfg.IntOpt('quota_network_gateway', default=5, help=_('Number of network gateways allowed per tenant, ' '-1 for unlimited')) ] cfg.CONF.register_opts(nw_gw_quota_opts, 'QUOTAS') attributes.validators['type:device_list'] = _validate_device_list class Nvp_networkgw(object): """API extension for Layer-2 Gateway support. The Layer-2 gateway feature allows for connecting neutron networks with external networks at the layer-2 level. No assumption is made on the location of the external network, which might not even be directly reachable from the hosts where the VMs are deployed. This is achieved by instantiating 'network gateways', and then connecting Neutron network to them. """ @classmethod def get_name(cls): return "Neutron-NVP Network Gateway" @classmethod def get_alias(cls): return EXT_ALIAS @classmethod def get_description(cls): return "Connects Neutron networks with external networks at layer 2" @classmethod def get_namespace(cls): return "http://docs.openstack.org/ext/neutron/network-gateway/api/v1.0" @classmethod def get_updated(cls): return "2012-11-30T10:00:00-00:00" @classmethod def get_resources(cls): """Returns Ext Resources.""" plugin = manager.NeutronManager.get_plugin() params = RESOURCE_ATTRIBUTE_MAP.get(COLLECTION_NAME, dict()) member_actions = {'connect_network': 'PUT', 'disconnect_network': 'PUT'} # register quotas for network gateways quota.QUOTAS.register_resource_by_name(RESOURCE_NAME) collection_name = COLLECTION_NAME.replace('_', '-') controller = base.create_resource(collection_name, RESOURCE_NAME, plugin, params, member_actions=member_actions) return [extensions.ResourceExtension(COLLECTION_NAME, controller, member_actions=member_actions)] def get_extended_resources(self, version): if version == "2.0": return RESOURCE_ATTRIBUTE_MAP else: return {} class NetworkGatewayPluginBase(object): @abstractmethod def create_network_gateway(self, context, network_gateway): pass @abstractmethod def update_network_gateway(self, context, id, network_gateway): pass @abstractmethod def get_network_gateway(self, context, id, fields=None): pass @abstractmethod def delete_network_gateway(self, context, id): pass @abstractmethod def get_network_gateways(self, context, filters=None, fields=None): pass @abstractmethod def connect_network(self, context, network_gateway_id, network_mapping_info): pass @abstractmethod def disconnect_network(self, context, network_gateway_id, network_mapping_info): pass
apache-2.0
DarkFenX/Pyfa
graphs/data/base/getter.py
2
3453
# ============================================================================= # Copyright (C) 2010 Diego Duclos # # This file is part of pyfa. # # pyfa is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # pyfa is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with pyfa. If not, see <http://www.gnu.org/licenses/>. # ============================================================================= import math from abc import ABCMeta, abstractmethod class PointGetter(metaclass=ABCMeta): def __init__(self, graph): self.graph = graph @abstractmethod def getRange(self, xRange, miscParams, src, tgt): raise NotImplementedError @abstractmethod def getPoint(self, x, miscParams, src, tgt): raise NotImplementedError class SmoothPointGetter(PointGetter, metaclass=ABCMeta): _baseResolution = 200 _extraDepth = 0 def getRange(self, xRange, miscParams, src, tgt): xs = [] ys = [] commonData = self._getCommonData(miscParams=miscParams, src=src, tgt=tgt) def addExtraPoints(x1, y1, x2, y2, depth): if depth <= 0 or y1 == y2: return newX = (x1 + x2) / 2 newY = self._calculatePoint(x=newX, miscParams=miscParams, src=src, tgt=tgt, commonData=commonData) addExtraPoints(x1=prevX, y1=prevY, x2=newX, y2=newY, depth=depth - 1) xs.append(newX) ys.append(newY) addExtraPoints(x1=newX, y1=newY, x2=x2, y2=y2, depth=depth - 1) prevX = None prevY = None # Go through X points defined by our resolution setting for x in self._xIterLinear(xRange): y = self._calculatePoint(x=x, miscParams=miscParams, src=src, tgt=tgt, commonData=commonData) if prevX is not None and prevY is not None: # And if Y values of adjacent data points are not equal, add extra points # depending on extra depth setting addExtraPoints(x1=prevX, y1=prevY, x2=x, y2=y, depth=self._extraDepth) prevX = x prevY = y xs.append(x) ys.append(y) return xs, ys def getPoint(self, x, miscParams, src, tgt): commonData = self._getCommonData(miscParams=miscParams, src=src, tgt=tgt) return self._calculatePoint(x=x, miscParams=miscParams, src=src, tgt=tgt, commonData=commonData) def _xIterLinear(self, xRange): xLow = min(xRange) xHigh = max(xRange) # Resolution defines amount of ranges between points here, # not amount of points step = (xHigh - xLow) / self._baseResolution if step == 0 or math.isnan(step): yield xLow else: for i in range(self._baseResolution + 1): yield xLow + step * i def _getCommonData(self, miscParams, src, tgt): return {} @abstractmethod def _calculatePoint(self, x, miscParams, src, tgt, commonData): raise NotImplementedError
gpl-3.0
amir-qayyum-khan/edx-platform
common/djangoapps/track/tests/test_util.py
239
1203
from datetime import datetime import json from pytz import UTC from django.test import TestCase from track.utils import DateTimeJSONEncoder class TestDateTimeJSONEncoder(TestCase): def test_datetime_encoding(self): a_naive_datetime = datetime(2012, 05, 01, 07, 27, 10, 20000) a_tz_datetime = datetime(2012, 05, 01, 07, 27, 10, 20000, tzinfo=UTC) a_date = a_naive_datetime.date() an_iso_datetime = '2012-05-01T07:27:10.020000+00:00' an_iso_date = '2012-05-01' obj = { 'number': 100, 'string': 'hello', 'object': {'a': 1}, 'a_datetime': a_naive_datetime, 'a_tz_datetime': a_tz_datetime, 'a_date': a_date, } to_json = json.dumps(obj, cls=DateTimeJSONEncoder) from_json = json.loads(to_json) self.assertEqual(from_json['number'], 100) self.assertEqual(from_json['string'], 'hello') self.assertEqual(from_json['object'], {'a': 1}) self.assertEqual(from_json['a_datetime'], an_iso_datetime) self.assertEqual(from_json['a_tz_datetime'], an_iso_datetime) self.assertEqual(from_json['a_date'], an_iso_date)
agpl-3.0
kmolab/kmolab.github.io
data/Brython-3.3.4/Lib/unittest/suite.py
748
9715
"""TestSuite""" import sys from . import case from . import util __unittest = True def _call_if_exists(parent, attr): func = getattr(parent, attr, lambda: None) func() class BaseTestSuite(object): """A simple test suite that doesn't provide class or module shared fixtures. """ def __init__(self, tests=()): self._tests = [] self.addTests(tests) def __repr__(self): return "<%s tests=%s>" % (util.strclass(self.__class__), list(self)) def __eq__(self, other): if not isinstance(other, self.__class__): return NotImplemented return list(self) == list(other) def __ne__(self, other): return not self == other def __iter__(self): return iter(self._tests) def countTestCases(self): cases = 0 for test in self: cases += test.countTestCases() return cases def addTest(self, test): # sanity checks if not callable(test): raise TypeError("{} is not callable".format(repr(test))) if isinstance(test, type) and issubclass(test, (case.TestCase, TestSuite)): raise TypeError("TestCases and TestSuites must be instantiated " "before passing them to addTest()") self._tests.append(test) def addTests(self, tests): if isinstance(tests, str): raise TypeError("tests must be an iterable of tests, not a string") for test in tests: self.addTest(test) def run(self, result): for test in self: if result.shouldStop: break test(result) return result def __call__(self, *args, **kwds): return self.run(*args, **kwds) def debug(self): """Run the tests without collecting errors in a TestResult""" for test in self: test.debug() class TestSuite(BaseTestSuite): """A test suite is a composite test consisting of a number of TestCases. For use, create an instance of TestSuite, then add test case instances. When all tests have been added, the suite can be passed to a test runner, such as TextTestRunner. It will run the individual test cases in the order in which they were added, aggregating the results. When subclassing, do not forget to call the base class constructor. """ def run(self, result, debug=False): topLevel = False if getattr(result, '_testRunEntered', False) is False: result._testRunEntered = topLevel = True for test in self: if result.shouldStop: break if _isnotsuite(test): self._tearDownPreviousClass(test, result) self._handleModuleFixture(test, result) self._handleClassSetUp(test, result) result._previousTestClass = test.__class__ if (getattr(test.__class__, '_classSetupFailed', False) or getattr(result, '_moduleSetUpFailed', False)): continue if not debug: test(result) else: test.debug() if topLevel: self._tearDownPreviousClass(None, result) self._handleModuleTearDown(result) result._testRunEntered = False return result def debug(self): """Run the tests without collecting errors in a TestResult""" debug = _DebugResult() self.run(debug, True) ################################ def _handleClassSetUp(self, test, result): previousClass = getattr(result, '_previousTestClass', None) currentClass = test.__class__ if currentClass == previousClass: return if result._moduleSetUpFailed: return if getattr(currentClass, "__unittest_skip__", False): return try: currentClass._classSetupFailed = False except TypeError: # test may actually be a function # so its class will be a builtin-type pass setUpClass = getattr(currentClass, 'setUpClass', None) if setUpClass is not None: _call_if_exists(result, '_setupStdout') try: setUpClass() except Exception as e: if isinstance(result, _DebugResult): raise currentClass._classSetupFailed = True className = util.strclass(currentClass) errorName = 'setUpClass (%s)' % className self._addClassOrModuleLevelException(result, e, errorName) finally: _call_if_exists(result, '_restoreStdout') def _get_previous_module(self, result): previousModule = None previousClass = getattr(result, '_previousTestClass', None) if previousClass is not None: previousModule = previousClass.__module__ return previousModule def _handleModuleFixture(self, test, result): previousModule = self._get_previous_module(result) currentModule = test.__class__.__module__ if currentModule == previousModule: return self._handleModuleTearDown(result) result._moduleSetUpFailed = False try: module = sys.modules[currentModule] except KeyError: return setUpModule = getattr(module, 'setUpModule', None) if setUpModule is not None: _call_if_exists(result, '_setupStdout') try: setUpModule() except Exception as e: if isinstance(result, _DebugResult): raise result._moduleSetUpFailed = True errorName = 'setUpModule (%s)' % currentModule self._addClassOrModuleLevelException(result, e, errorName) finally: _call_if_exists(result, '_restoreStdout') def _addClassOrModuleLevelException(self, result, exception, errorName): error = _ErrorHolder(errorName) addSkip = getattr(result, 'addSkip', None) if addSkip is not None and isinstance(exception, case.SkipTest): addSkip(error, str(exception)) else: result.addError(error, sys.exc_info()) def _handleModuleTearDown(self, result): previousModule = self._get_previous_module(result) if previousModule is None: return if result._moduleSetUpFailed: return try: module = sys.modules[previousModule] except KeyError: return tearDownModule = getattr(module, 'tearDownModule', None) if tearDownModule is not None: _call_if_exists(result, '_setupStdout') try: tearDownModule() except Exception as e: if isinstance(result, _DebugResult): raise errorName = 'tearDownModule (%s)' % previousModule self._addClassOrModuleLevelException(result, e, errorName) finally: _call_if_exists(result, '_restoreStdout') def _tearDownPreviousClass(self, test, result): previousClass = getattr(result, '_previousTestClass', None) currentClass = test.__class__ if currentClass == previousClass: return if getattr(previousClass, '_classSetupFailed', False): return if getattr(result, '_moduleSetUpFailed', False): return if getattr(previousClass, "__unittest_skip__", False): return tearDownClass = getattr(previousClass, 'tearDownClass', None) if tearDownClass is not None: _call_if_exists(result, '_setupStdout') try: tearDownClass() except Exception as e: if isinstance(result, _DebugResult): raise className = util.strclass(previousClass) errorName = 'tearDownClass (%s)' % className self._addClassOrModuleLevelException(result, e, errorName) finally: _call_if_exists(result, '_restoreStdout') class _ErrorHolder(object): """ Placeholder for a TestCase inside a result. As far as a TestResult is concerned, this looks exactly like a unit test. Used to insert arbitrary errors into a test suite run. """ # Inspired by the ErrorHolder from Twisted: # http://twistedmatrix.com/trac/browser/trunk/twisted/trial/runner.py # attribute used by TestResult._exc_info_to_string failureException = None def __init__(self, description): self.description = description def id(self): return self.description def shortDescription(self): return None def __repr__(self): return "<ErrorHolder description=%r>" % (self.description,) def __str__(self): return self.id() def run(self, result): # could call result.addError(...) - but this test-like object # shouldn't be run anyway pass def __call__(self, result): return self.run(result) def countTestCases(self): return 0 def _isnotsuite(test): "A crude way to tell apart testcases and suites with duck-typing" try: iter(test) except TypeError: return True return False class _DebugResult(object): "Used by the TestSuite to hold previous class when running in debug." _previousTestClass = None _moduleSetUpFailed = False shouldStop = False
agpl-3.0
ankanch/tieba-zhuaqu
DSV-user-application-plugin-dev-kit/default-plugins/multiwords/main.py
3
1524
from tkinter import * # 导入 Tkinter 库 import multiwords root = Tk() root.resizable(False,False) root.title("统计对比多个词语") #KCC基本分析组件 #该组件用于列举出包含指定词语的帖子/回帖 ##插件信息定义 KCC_PLUGIN_NAME="multiwordS" KCC_PLUGIN_DESCRIPTION="用来统计多个词语的频率并以条形统计图显示" KCC_PLUGIN_COPYRIGHT="kanch" ##定义结束 def btnclick(): root.update() wordlist = wordentry.get() print("word=",wordlist) multiwords.compareMultiWords(wordlist) def centerWindow(rt): rt.update() # update window ,must do curWidth = rt.winfo_reqwidth() # get current width curHeight = rt.winfo_height() # get current height scnWidth,scnHeight = rt.maxsize() # get screen width and height tmpcnf = '%dx%d+%d+%d'%(curWidth,curHeight, (scnWidth-curWidth)/2,(scnHeight-curHeight)/2) rt.geometry(tmpcnf) return rt data = StringVar(root) scale = IntVar(root) Label(root,text="KCC数据分析模块 - 基本分析套件\n该模块用于显示指定词语的时间频率关系图",width=35,height=5).pack() Label(root,text="请输入要分析的词语(用空格隔开):",width=25,height=2).pack() wordentry = Entry(root,text="请输入内容",width=25,textvariable=data) wordentry.pack(ipadx=4,ipady=4) Button(root, text="显示结果", width=15,relief=GROOVE,command=btnclick).pack(pady=16,ipadx=8,ipady=8) root = centerWindow(root) root.mainloop() # 进入消息循环
gpl-3.0
abenzbiria/clients_odoo
addons/account/report/account_balance.py
183
6162
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import time from openerp.osv import osv from openerp.report import report_sxw from common_report_header import common_report_header class account_balance(report_sxw.rml_parse, common_report_header): _name = 'report.account.account.balance' def __init__(self, cr, uid, name, context=None): super(account_balance, self).__init__(cr, uid, name, context=context) self.sum_debit = 0.00 self.sum_credit = 0.00 self.date_lst = [] self.date_lst_string = '' self.result_acc = [] self.localcontext.update({ 'time': time, 'lines': self.lines, 'sum_debit': self._sum_debit, 'sum_credit': self._sum_credit, 'get_fiscalyear':self._get_fiscalyear, 'get_filter': self._get_filter, 'get_start_period': self.get_start_period, 'get_end_period': self.get_end_period , 'get_account': self._get_account, 'get_journal': self._get_journal, 'get_start_date':self._get_start_date, 'get_end_date':self._get_end_date, 'get_target_move': self._get_target_move, }) self.context = context def set_context(self, objects, data, ids, report_type=None): new_ids = ids if (data['model'] == 'ir.ui.menu'): new_ids = 'chart_account_id' in data['form'] and [data['form']['chart_account_id']] or [] objects = self.pool.get('account.account').browse(self.cr, self.uid, new_ids) return super(account_balance, self).set_context(objects, data, new_ids, report_type=report_type) def _get_account(self, data): if data['model']=='account.account': return self.pool.get('account.account').browse(self.cr, self.uid, data['form']['id']).company_id.name return super(account_balance ,self)._get_account(data) def lines(self, form, ids=None, done=None): def _process_child(accounts, disp_acc, parent): account_rec = [acct for acct in accounts if acct['id']==parent][0] currency_obj = self.pool.get('res.currency') acc_id = self.pool.get('account.account').browse(self.cr, self.uid, account_rec['id']) currency = acc_id.currency_id and acc_id.currency_id or acc_id.company_id.currency_id res = { 'id': account_rec['id'], 'type': account_rec['type'], 'code': account_rec['code'], 'name': account_rec['name'], 'level': account_rec['level'], 'debit': account_rec['debit'], 'credit': account_rec['credit'], 'balance': account_rec['balance'], 'parent_id': account_rec['parent_id'], 'bal_type': '', } self.sum_debit += account_rec['debit'] self.sum_credit += account_rec['credit'] if disp_acc == 'movement': if not currency_obj.is_zero(self.cr, self.uid, currency, res['credit']) or not currency_obj.is_zero(self.cr, self.uid, currency, res['debit']) or not currency_obj.is_zero(self.cr, self.uid, currency, res['balance']): self.result_acc.append(res) elif disp_acc == 'not_zero': if not currency_obj.is_zero(self.cr, self.uid, currency, res['balance']): self.result_acc.append(res) else: self.result_acc.append(res) if account_rec['child_id']: for child in account_rec['child_id']: _process_child(accounts,disp_acc,child) obj_account = self.pool.get('account.account') if not ids: ids = self.ids if not ids: return [] if not done: done={} ctx = self.context.copy() ctx['fiscalyear'] = form['fiscalyear_id'] if form['filter'] == 'filter_period': ctx['period_from'] = form['period_from'] ctx['period_to'] = form['period_to'] elif form['filter'] == 'filter_date': ctx['date_from'] = form['date_from'] ctx['date_to'] = form['date_to'] ctx['state'] = form['target_move'] parents = ids child_ids = obj_account._get_children_and_consol(self.cr, self.uid, ids, ctx) if child_ids: ids = child_ids accounts = obj_account.read(self.cr, self.uid, ids, ['type','code','name','debit','credit','balance','parent_id','level','child_id'], ctx) for parent in parents: if parent in done: continue done[parent] = 1 _process_child(accounts,form['display_account'],parent) return self.result_acc class report_trialbalance(osv.AbstractModel): _name = 'report.account.report_trialbalance' _inherit = 'report.abstract_report' _template = 'account.report_trialbalance' _wrapped_report_class = account_balance # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
LamCiuLoeng/gap
gapproject/controllers/order.py
1
32750
# -*- coding: utf-8 -*- from datetime import datetime as dt import json import os import random import subprocess import traceback import urllib2 import zipfile import zlib # turbogears imports from tg import expose, redirect, validate, flash, request, response, override_template, config from tg.decorators import paginate # third party imports #from pylons.i18n import ugettext as _ from repoze.what import predicates, authorize from repoze.what.predicates import not_anonymous, in_group, has_permission from sqlalchemy.sql import * import transaction from reportlab.pdfgen import canvas # project specific imports from gapproject.lib.base import BaseController from gapproject.model import * from gapproject.util.common import * from gapproject.widgets import * from gapproject.widgets.order import * from gapproject.util.gap_const import * class OrderViewController(BaseController): @expose('gapproject.templates.order.order_form_view_export') @tabFocus(tab_type="main") def view(self, **kw): (flag, id)=rpacDecrypt(kw.get("code", "")) if not flag: flash("Please don't access the resource illegally!") redirect('/order/index') order_header=getOr404(OrderInfoHeader, id) duration = dt.now() - order_header.orderDate def got_value(order_detail): return order_detail.price.price * order_detail.qty total_value = reduce(lambda x, y: x + y, map(got_value, order_header.order_details)) if duration.days > 1: can_update = False else: can_update = True if len(order_header.order_details)<1 : flash("There's no order related to this PO!", "warn") redirect('/order/index') return {"order_header": order_header, "order_details": order_header.order_details, 'total_value': total_value, "can_update": can_update, 'return_url': '/', } class OrderController(BaseController): #Uncomment this line if your controller requires an authenticated user allow_only = authorize.not_anonymous() @expose('gapproject.templates.order.index') @paginate('collections', items_per_page = 30) @tabFocus(tab_type="main") def index(self, **kw): region_select_form = regionSearchFormInstance if kw: redirect("/order/placeOrder", regionID = kw.get("regionID", False)) else: return dict(search_form = region_select_form, title = 'Please select region') @expose('gapproject.templates.order.order_form') @tabFocus(tab_type="main") def placeOrder(self, **kw): try: region = Region.get_region(kw.get("regionID", False)) categorys = Category.get_categorys() first_order = OrderInfoHeader.first_order(request.identity["user"].user_id) return {"region": region, "categorys": categorys, 'first_order': first_order, "return_url": '/', } except: file=open('log.txt', 'w') traceback.print_exc(None, file) file.close() flash("The service is not avaiable now!", "warn") redirect('/') @expose() def save(self, **kw): DBSession.begin(subtransactions=True) order_detail_inputs = {} new_orders = [] for k in kw: if k.endswith("_ext"): name, index, ext = k.split("_") if index not in order_detail_inputs: if kw[k] and len(kw[k]) > 0: order_detail_inputs[index] = {name: kw[k]} else: flash("the order information is not complete!", "warn") redirect("/order/index") else: if kw[k] and len(kw[k]) > 0: order_detail_inputs[index][name] = kw[k] else: flash("the order infromation is not complete!", "warn") redirect("/order/index") try: latest_order = OrderInfoHeader.latest_order() today = dt.today() if latest_order is not None and latest_order.issuedDate.month == today.month and latest_order.issuedDate.day == today.day: dailySequence = latest_order.dailySequence + 1 else: dailySequence = 1 if 'x' in order_detail_inputs.keys(): order_detail_inputs.pop('x') order_header_params = {'orderNO': ''.join(['GAP', dt.now().strftime("%Y%m%d"), '%05d' % dailySequence ]), 'buyerPO': None, 'vendorPO': None, 'billCompany': None, 'billAddress': None, 'billAttn': None, 'billTel': None, 'billFax': None, 'billEmail': None, 'shipCompany': None, 'shipAddress': None, 'shipAttn': None, 'shipTel': None, 'shipFax': None, 'shipEmail': None, 'contact': None, 'remark': None, 'dailySequence': dailySequence, 'shipInstruction': None, 'orderDate': None, 'issuedDate': dt.now(), 'issuedBy': request.identity["user"], 'lastModifyTime': dt.now(), 'lastModifyBy': request.identity['user'], } for key in order_header_params.iterkeys(): if key in kw and kw[key]: order_header_params[key] = kw[key] region = Region.get_region(kw['region']) order_header_params['region'] = region if 'otherInstruction' in kw and kw['otherInstruction']: order_header_params['shipInstruction'] = kw['otherInstruction'] order_header = OrderInfoHeader(**order_header_params) new_orders.append(order_header) srt_order_details = sorted(order_detail_inputs.items(), key = lambda x: x[0]) for str_order_detail in srt_order_details: item_no = str_order_detail[1]['item'] item = Item.get_item_by_name(item_no.strip()) price = Price.get_price(int(region.id), item.id) order_detail = OrderInfoDetail(header = order_header, item = item, price = price, qty = int(str_order_detail[1]['quantity']) ) new_orders.append(order_detail) DBSession.add_all(new_orders) sendTo = [request.identity["user"].email_address, region.regionMailAddress] ccTo = config.gap_email_cc.split(";") for contact in region.region_contacts: if contact.is_active(): ccTo.append(contact.email) self._sendNotifyEmail(sendTo, ccTo, order_header.orderNO, order_header.id) DBSession.commit() except: DBSession.rollback() file = open('log.txt', 'w') traceback.print_exc(None, file) file.close() flash("The service is not avaiable now!", "warn") redirect('/index') # else: # # Reserve Item # DBSession.flush() # try: # warehouse = region.region_warehouse[0] # reserveItemQty = 0 # details = order_header.order_details # for d in details: # availableQty = d.item.availableQtyByWarehouse(warehouse.id) # if availableQty >= d.qty: # reserveItem = ReserveItem() # reserveItem.qty = d.qty # reserveItem.item = d.item # reserveItem.warehouse = warehouse # reserveItem.orderID = order_header.id # reserveItem.orderDetail = d # reserveItem.issuedBy = request.identity["user"] # reserveItem.lastModifyBy = request.identity["user"] # DBSession.add(reserveItem) # DBSession.flush() # reserveItemQty += 1 # if reserveItemQty <= 0: # order_header.status = RESERVED_FAIL # elif reserveItemQty > 0 and reserveItemQty < len(details): # order_header.status = PARTIAL_RESERVED_SUCCESS # elif reserveItemQty > 0 and reserveItemQty == len(details): # order_header.status = ALL_RESERVED_SUCCESS # except: # transaction.doom() # traceback.print_exc() flash("The manual order has been confirmed successfully!") redirect("/order/view?code=%s"%(rpacEncrypt(order_header.id))) def _sendNotifyEmail(self, sendTo, ccTo, customerPO, hederID, content=None, attach=[], title=None): sendFrom="r-pac-GAP-ordering-system" if title: subject=title else: subject="Order[%s] has been confirmed successfully!"%customerPO if content : text=content else: text="\n".join([ "Thank you for your confirmation!", "You could view the order's detail information via the link below:", "%s/order/view?code=%s"%(config.website_url, rpacEncrypt(hederID)), "\n\n************************************************************************************", "This e-mail is sent by the r-pac GAP ordering system automatically.", "Please don't reply this e-mail directly!", "************************************************************************************" ]) sendEmail(sendFrom, sendTo, subject, text, ccTo, attach) @expose('gapproject.templates.order.order_form_view') @tabFocus(tab_type="main") def view(self, **kw): (flag, id)=rpacDecrypt(kw.get("code", "")) if not flag: flash("Please don't access the resource illegally!") redirect('/order/index') order_header=getOr404(OrderInfoHeader, id) duration = dt.now() - order_header.orderDate def got_value(order_detail): return order_detail.price.price * order_detail.qty total_value = reduce(lambda x, y: x + y, map(got_value, order_header.order_details)) if duration.days > 1: can_update = False else: can_update = True if len(order_header.order_details)<1 : flash("There's no order related to this PO!", "warn") redirect('/order/index') shipItems = DBSession.query(ShipItemHeader).filter(and_(ShipItemHeader.active == 0, ShipItemHeader.orderID == order_header.id)).order_by(ShipItemHeader.id).all() return {"order_header": order_header, "order_details": order_header.order_details, "can_update": can_update, "total_value": total_value, 'return_url': '/', 'shipItems': shipItems } @expose('gapproject.templates.order.search') @paginate('collections', items_per_page=25) @tabFocus(tab_type="view") def search(self, **kw): try: search_form = orderSearchFormInstance if kw: result = self._query_result(kw) return dict(search_form = search_form, collections = result, values = kw, return_url = '/', ) else: return dict(search_form = search_form, collections = [], values = {}, return_url = '/', ) except: flash("The service is not avaiable now,please try it later.", status="warn") traceback.print_exc() redirect('/') def _query_result(self, kw): try: conditions = [] if kw.get("orderNO", False): conditions.append(OrderInfoHeader.orderNO == kw.get("orderNO", "")) if kw.get("orderDate", False): b_date = dt.strptime(kw.get("orderDate", '2009-12-1200:00:00') + "00:00:00", "%Y-%m-%d%H:%M:%S") conditions.append(OrderInfoHeader.orderDate >= b_date) if kw.get("vendorPO", False): conditions.append(OrderInfoHeader.vendorPO.op("ILIKE")("%%%s%%" % kw.get("vendorPO", "").strip())) if kw.get("item_no", False): item = DBSession.query(Item)\ .filter(Item.item_number == kw.get("item_no", "").strip())\ .first() dtlHeaderIDs = DBSession.query(OrderInfoDetail.headerID).filter(OrderInfoDetail.itemID == item.id).all() conditions.append(OrderInfoHeader.id.in_([id[0] for id in dtlHeaderIDs])) if len(conditions): order = DBSession.query(OrderInfoHeader).filter(OrderInfoHeader.status != 0) for condition in conditions: order = order.filter(condition) # if in_group("AE") or in_group("Admin"): if has_permission("MAIN_INVENTORY_ENQUIRY_BY_WAREHOUSE"): result = order.order_by(desc(OrderInfoHeader.orderDate)).all() else: result = order.filter(OrderInfoHeader.issuedBy == request.identity['user'])\ .order_by(desc(OrderInfoHeader.orderDate)).all() else: # if in_group("AE") or in_group("Admin"): if has_permission("MAIN_INVENTORY_ENQUIRY_BY_WAREHOUSE"): result=DBSession.query(OrderInfoHeader)\ .filter(OrderInfoHeader.status != 0)\ .order_by(desc(OrderInfoHeader.orderDate))\ .all() else: result=DBSession.query(OrderInfoHeader)\ .filter(OrderInfoHeader.status != 0)\ .filter(OrderInfoHeader.issuedBy == request.identity['user'])\ .order_by(desc(OrderInfoHeader.orderDate))\ .all() return result except: traceback.print_exc() @expose('gapproject.templates.order.order_form_update') @tabFocus(tab_type="main") def updateOrder(self, **kw): (flag, id)=rpacDecrypt(kw.get("code", "")) if not flag: flash("Please don't access the resource illegally!") redirect('/') order = getOr404(OrderInfoHeader, id) if len(order.order_details) < 1: flash("There's no order related to this PO!", "warn") redirect('/') return {"order_header": order, "order_details": order.order_details, 'return_url': '/', } @expose() def saveUpdate(self, **kw): DBSession.begin(subtransactions=True) try: new_orders = [] order = DBSession.query(OrderInfoHeader).get(kw['order_id']) order_header_fields = ['invoiceNO', 'invoiceTotal', 'shippedDate', ] order_header_params = {'lastModifyTime': dt.now(), 'lastModifyBy': request.identity['user'], 'status': 1 # order updated } for key in order_header_fields: if key in kw and kw[key]: order_header_params[key] = kw[key] for key, val in order_header_params.iteritems(): if key in dir(order): setattr(order, key, val) if order.invoiceNO is not None and len(order.invoiceNO) > 0: order.status = 2 # order completed DBSession.add(order) sendTo = [request.identity["user"].email_address, order.region.regionMailAddress] ccTo = config.gap_email_cc.split(";") for contact in order.region.region_contacts: ccTo.append(contact.email) self._sendNotifyEmail(sendTo, ccTo, order.orderNO, order.id, title = "Order[%s] has been completed successfully!" % order.orderNO) DBSession.commit() except: DBSession.rollback() file = open('log.txt', 'w') traceback.print_exc(None, file) file.close() flash("The service is not avaiable now!", "warn") raise else: flash("The manual order has been confirmed successfully!") redirect("/order/view?code=%s"%(rpacEncrypt(order.id))) @expose('gapproject.templates.order.order_form_update_vendor') @tabFocus(tab_type="main") def vendorUpdate(self, **kw): (flag, id)=rpacDecrypt(kw.get("code", "")) if not flag: flash("Please don't access the resource illegally!") redirect('/') order = getOr404(OrderInfoHeader, id) duration = dt.now() - order.orderDate if order.status >= SHIPPED_PART: # @20120528 flash("The order is shipping!") redirect('/') if duration.days > 1: flash("The order was created more than 24 hours and can not be modified!") redirect('/') if len(order.order_details) < 1: flash("There's no order related to this PO!", "warn") redirect('/') return {"order": order, "order_details": order.order_details, 'return_url': '/', } @expose() def saveVendorUpdate(self, **kw): DBSession.begin(subtransactions=True) try: new_orders = [] order_detail_inputs = {} order = DBSession.query(OrderInfoHeader).get(kw['order_id']) order_header_params = {'buyerPO': None, 'vendorPO': None, 'billCompany': None, 'billAddress': None, 'billAttn': None, 'billTel': None, 'billFax': None, 'billEmail': None, 'shipCompany': None, 'shipAddress': None, 'shipAttn': None, 'shipTel': None, 'shipFax': None, 'shipEmail': None, 'contact': None, 'remark': None, 'shipInstruction': None, 'orderDate': None, 'lastModifyTime': dt.now(), 'lastModifyBy': request.identity['user'], } for key in order_header_params.iterkeys(): if key in kw and kw[key]: order_header_params[key] = kw[key] if 'otherInstruction' in kw and kw['otherInstruction']: order_header_params['shipInstruction'] = kw['otherInstruction'] for key, val in order_header_params.iteritems(): if key in dir(order): setattr(order, key, val) new_orders.append(order) for k in kw: if k.startswith("quantity_"): order_detail = OrderInfoDetail.get_detail(int(k.split("_")[1])) if order_detail.qty != int(kw[k]): order_detail.qty = int(kw[k]) new_orders.append(order_detail) for k in kw: if k.endswith("_ext"): name, index, ext = k.split("_") if index not in order_detail_inputs: if kw[k] and len(kw[k]) > 0: order_detail_inputs[index] = {name: kw[k]} else: flash("the order information is not complete!", "warn") raise redirect("/order/index") else: if kw[k] and len(kw[k]) > 0: order_detail_inputs[index][name] = kw[k] else: flash("the order infromation is not complete!", "warn") raise redirect("/order/index") for detail in order.order_details: if str(detail.id) in order_detail_inputs.keys(): if detail.item.item_number == order_detail_inputs[str(detail.id)]['item']: if str(detail.qty) != order_detail_inputs[str(detail.id)]['quantity']: detail.qty = int(order_detail_inputs[str(detail.id)]['quantity']) new_orders.append(detail) else: detail.item = OrderInfoDetail.get_item(order_detail_inputs[str(detail.id)]['item']) detail.price = Price.get_price(oder.region.id, detail.item.id) new_orders.append(detail) # for key_val in order_detail_inputs.keys(): # if int(key_val) not in [detail.id for detail in order.order_details]: # print '*' * 20, '\n', key_val # item = OrderInfoDetail.get_item(order_detail_inputs[key_val]['item']) # price = Price.get_price(order.region.id, # OrderInfoDetail.get_item(order_detail_inputs[key_val]['item']).id) # detail = OrderInfoDetail(header = order, # item = item, # price = price, # qty = int(order_detail_inputs[key_val]['quantity']) # ) # # new_orders.append(detail) DBSession.add_all(new_orders) sendTo = [request.identity["user"].email_address, order.region.regionMailAddress] ccTo = config.gap_email_cc.split(";") for contact in order.region.region_contacts: ccTo.append(contact.email) self._sendNotifyEmail(sendTo, ccTo, order.orderNO, order.id, title = "Order[%s] has been revised successfully!" % order.orderNO) DBSession.commit() except: DBSession.rollback() file = open('log.txt', 'w') traceback.print_exc(None, file) file.close() flash("The service is not avaiable now!", "warn") raise # else: # # Reserve Item # DBSession.flush() # try: # warehouse = order.region.region_warehouse[0] # reserveItemQty = 0 # details = order.order_details # for d in details: # availableQty = d.item.availableQtyByWarehouse(warehouse.id) # DBSession.query(ReserveItem).filter(and_(ReserveItem.orderDetailID == d.id, # ReserveItem.active == 0)).update({ReserveItem.active: 1}) # DBSession.flush() # if availableQty >= d.qty: # reserveItem = ReserveItem() # reserveItem.qty = d.qty # reserveItem.item = d.item # reserveItem.warehouse = warehouse # reserveItem.orderID = order.id # reserveItem.orderDetail = d # reserveItem.issuedBy = request.identity["user"] # reserveItem.lastModifyBy = request.identity["user"] # DBSession.add(reserveItem) # DBSession.flush() # reserveItemQty += 1 # if reserveItemQty <= 0: # order.status = RESERVED_FAIL # elif reserveItemQty > 0 and reserveItemQty < len(details): # order.status = PARTIAL_RESERVED_SUCCESS # elif reserveItemQty > 0 and reserveItemQty == len(details): # order.status = ALL_RESERVED_SUCCESS # except: # transaction.doom() # traceback.print_exc() flash("The manual order has been confirmed successfully!") redirect("/order/view?code=%s"%(rpacEncrypt(order.id))) @expose() def getAjaxField(self, **kw): try: fieldName = kw["fieldName"] value = kw["q"] result = [] if fieldName == 'item_no': rs = DBSession.query(Item) \ .filter(Item.item_number.op('ILIKE')('%%%s%%'%str(value).strip())) \ .filter(Item.active == 0) \ .all() result = ["%s|%d" % (v.item_number, v.id) for v in rs] elif fieldName == 'vendorPO': rs = DBSession.query(OrderInfoHeader) \ .filter(OrderInfoHeader.vendorPO.op('ILIKE')('%%%s%%'%str(value).strip())) \ .filter(OrderInfoHeader.status != 0) \ .all() result = ["%s|%d" % (v.vendorPO, v.id) for v in rs] elif fieldName == 'orderNO': rs = DBSession.query(OrderInfoHeader) \ .filter(OrderInfoHeader.orderNO.op('ILIKE')('%%%s%%'%str(value).strip())) \ .filter(OrderInfoHeader.status != 0) \ .all() result = ["%s|%d" % (v.orderNO, v.id) for v in rs] elif fieldName == 'item_detail': region_id = kw['region_id'] rs = DBSession.query(Item,Price).filter(and_( Item.active == 0, Item.item_number.op('ILIKE')('%%%s%%'%str(value).strip()), Price.active == 0, Item.id == Price.itemID, Price.regionID == region_id, )).limit(10) result = ["%s|%d|%s|%s|%s|%s|%.2f" % (i.item_number, i.id,i.width or '',i.length or '',i.gusset or '',i.lip or '',p.price * 1000) for i,p in rs] else: result = [] data = "\n".join(result) return data except: traceback.print_exc() @expose('json') def ajaxItemInfo(self, **kw): try: item_no = kw.get("item_no", False) item = DBSession.query(Item) \ .filter(Item.item_number == item_no.strip()) \ .filter(Item.active == 0) \ .first() price = DBSession.query(Price) \ .filter(Price.itemID == item.id) \ .filter(Price.regionID == int(kw.get("region_id", False))) \ .first() result = {'width': item.width if len(item.width) > 0 else '', 'length': item.length if len(item.length) > 0 else '', 'gusset': item.gusset if len(item.gusset) > 0 else '', 'lip': item.lip if len(item.lip) > 0 else '', 'price': price.price } return result except: traceback.print_exc() @expose() @tabFocus(tab_type="main") def cancel(self, **kw): (flag, id) = rpacDecrypt(kw.get("code", "")) if not flag: flash("Please don't access the resource illegally!") redirect('/') ph = getOr404(OrderInfoHeader, id) if len(ph.order_details) < 1: flash("There's no order related to this PO!", "warn") redirect('/') try: ph.status = 0 DBSession.add(ph) # if ph.order_reserve_item: # DBSession.query(ReserveItem).filter(and_(ReserveItem.id.in_([r.id for r in ph.order_reserve_item]), # ReserveItem.active == 0)).update({ReserveItem.active: 1}) flash("The order has been canceled successfully!") except: traceback.print_exc() flash("There's an error occured during cancel this order!") redirect('/') @expose() def exportPDFFile(self, **kw): try: order = OrderInfoHeader.get_order(kw['id']) pdf_file = os.path.join(config.download_dir, '%s.pdf' % order.orderNO) phantomjs = os.path.join(config.public_dir, 'phantomjs', 'phantomjs.exe') rasterize = os.path.join(config.public_dir, 'phantomjs', 'rasterize.js') http_url = 'http://%s/viewOrder/view?code=%s' % (request.headers.get('Host'), rpacEncrypt(order.id)) print '*'*20, '\n', phantomjs, '\n', rasterize, '\n', http_url, '\n', pdf_file cmd = '%s %s %s %s' % (phantomjs, rasterize, http_url, pdf_file) sp = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT) while 1: if sp.poll() is not None: #print 'exec command completed.' break else: line = sp.stdout.readline().strip() pd_zip_file = os.path.join(config.download_dir, "gap_%s%d.zip" % (dt.now().strftime("%Y%m%d%H%M%S"), random.randint(1, 1000))) out_zip_file = zipfile.ZipFile(pd_zip_file, "w", zlib.DEFLATED) out_zip_file.write(os.path.abspath(os.path.join(config.download_dir, '%s.pdf' % order.orderNO)), os.path.basename(os.path.join(config.download_dir, '%s.pdf' % order.orderNO))) out_zip_file.close() # try: # os.remove(pdf_file) # except: # pass return (serveFile(unicode(pd_zip_file)), pd_zip_file) except: file = open('log.txt', 'w') traceback.print_exc(None, file) file.close()
mit
secretsquirrel/sslstrip
sslstrip.py
72
4066
#!/usr/bin/env python """sslstrip is a MITM tool that implements Moxie Marlinspike's SSL stripping attacks.""" __author__ = "Moxie Marlinspike" __email__ = "moxie@thoughtcrime.org" __license__= """ Copyright (c) 2004-2009 Moxie Marlinspike <moxie@thoughtcrime.org> This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 3 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA """ from twisted.web import http from twisted.internet import reactor from sslstrip.StrippingProxy import StrippingProxy from sslstrip.URLMonitor import URLMonitor from sslstrip.CookieCleaner import CookieCleaner import sys, getopt, logging, traceback, string, os gVersion = "0.9" def usage(): print "\nsslstrip " + gVersion + " by Moxie Marlinspike" print "Usage: sslstrip <options>\n" print "Options:" print "-w <filename>, --write=<filename> Specify file to log to (optional)." print "-p , --post Log only SSL POSTs. (default)" print "-s , --ssl Log all SSL traffic to and from server." print "-a , --all Log all SSL and HTTP traffic to and from server." print "-l <port>, --listen=<port> Port to listen on (default 10000)." print "-f , --favicon Substitute a lock favicon on secure requests." print "-k , --killsessions Kill sessions in progress." print "-h Print this help message." print "" def parseOptions(argv): logFile = 'sslstrip.log' logLevel = logging.WARNING listenPort = 10000 spoofFavicon = False killSessions = False try: opts, args = getopt.getopt(argv, "hw:l:psafk", ["help", "write=", "post", "ssl", "all", "listen=", "favicon", "killsessions"]) for opt, arg in opts: if opt in ("-h", "--help"): usage() sys.exit() elif opt in ("-w", "--write"): logFile = arg elif opt in ("-p", "--post"): logLevel = logging.WARNING elif opt in ("-s", "--ssl"): logLevel = logging.INFO elif opt in ("-a", "--all"): logLevel = logging.DEBUG elif opt in ("-l", "--listen"): listenPort = arg elif opt in ("-f", "--favicon"): spoofFavicon = True elif opt in ("-k", "--killsessions"): killSessions = True return (logFile, logLevel, listenPort, spoofFavicon, killSessions) except getopt.GetoptError: usage() sys.exit(2) def main(argv): (logFile, logLevel, listenPort, spoofFavicon, killSessions) = parseOptions(argv) logging.basicConfig(level=logLevel, format='%(asctime)s %(message)s', filename=logFile, filemode='w') URLMonitor.getInstance().setFaviconSpoofing(spoofFavicon) CookieCleaner.getInstance().setEnabled(killSessions) strippingFactory = http.HTTPFactory(timeout=10) strippingFactory.protocol = StrippingProxy reactor.listenTCP(int(listenPort), strippingFactory) print "\nsslstrip " + gVersion + " by Moxie Marlinspike running..." reactor.run() if __name__ == '__main__': main(sys.argv[1:])
gpl-3.0
rbaghdadi/ISIR
utils/speedup_model/src/model/model_bn_ELU.py
2
1139
import torch import torch.nn as nn import torch.nn.functional as F class Model_BN_ELU(nn.Module): def __init__(self, input_size, output_size, hidden_sizes=[10, 10, 10], drops=[0.4, 0.4, 0.4]): super().__init__() hidden_sizes = [input_size] + hidden_sizes self.hidden_layers = nn.ModuleList() self.batch_norm_layers = nn.ModuleList() self.dropouts= nn.ModuleList() for i in range(len(hidden_sizes)-1): self.hidden_layers.append(nn.Linear(hidden_sizes[i], hidden_sizes[i+1], bias=False)) self.batch_norm_layers.append(nn.BatchNorm1d(hidden_sizes[i+1])) nn.init.xavier_uniform_(self.hidden_layers[i].weight) self.dropouts.append(nn.Dropout(drops[i])) self.predict = nn.Linear(hidden_sizes[-1], output_size) nn.init.xavier_uniform_(self.predict.weight) self.ELU=nn.ELU() def forward(self, x): for i in range(len(self.hidden_layers)): x = self.dropouts[i](self.ELU(self.batch_norm_layers[i](self.hidden_layers[i](x)))) x = self.predict(x) return F.relu(x)
mit
pyramania/scipy
scipy/optimize/tests/test_lsq_linear.py
59
5067
import numpy as np from numpy.linalg import lstsq from numpy.testing import (assert_allclose, assert_equal, assert_, run_module_suite, assert_raises) from scipy.sparse import rand from scipy.sparse.linalg import aslinearoperator from scipy.optimize import lsq_linear A = np.array([ [0.171, -0.057], [-0.049, -0.248], [-0.166, 0.054], ]) b = np.array([0.074, 1.014, -0.383]) class BaseMixin(object): def __init__(self): self.rnd = np.random.RandomState(0) def test_dense_no_bounds(self): for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, lstsq(A, b)[0]) def test_dense_bounds(self): # Solutions for comparison are taken from MATLAB. lb = np.array([-1, -10]) ub = np.array([1, 0]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, lstsq(A, b)[0]) lb = np.array([0.0, -np.inf]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, np.inf), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, np.array([0.0, -4.084174437334673]), atol=1e-6) lb = np.array([-1, 0]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, np.inf), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, np.array([0.448427311733504, 0]), atol=1e-15) ub = np.array([np.inf, -5]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (-np.inf, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, np.array([-0.105560998682388, -5])) ub = np.array([-1, np.inf]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (-np.inf, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, np.array([-1, -4.181102129483254])) lb = np.array([0, -4]) ub = np.array([1, 0]) for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, np.array([0.005236663400791, -4])) def test_dense_rank_deficient(self): A = np.array([[-0.307, -0.184]]) b = np.array([0.773]) lb = [-0.1, -0.1] ub = [0.1, 0.1] for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.x, [-0.1, -0.1]) A = np.array([ [0.334, 0.668], [-0.516, -1.032], [0.192, 0.384], ]) b = np.array([-1.436, 0.135, 0.909]) lb = [0, -1] ub = [1, -0.5] for lsq_solver in self.lsq_solvers: res = lsq_linear(A, b, (lb, ub), method=self.method, lsq_solver=lsq_solver) assert_allclose(res.optimality, 0, atol=1e-11) def test_full_result(self): lb = np.array([0, -4]) ub = np.array([1, 0]) res = lsq_linear(A, b, (lb, ub), method=self.method) assert_allclose(res.x, [0.005236663400791, -4]) r = A.dot(res.x) - b assert_allclose(res.cost, 0.5 * np.dot(r, r)) assert_allclose(res.fun, r) assert_allclose(res.optimality, 0.0, atol=1e-12) assert_equal(res.active_mask, [0, -1]) assert_(res.nit < 15) assert_(res.status == 1 or res.status == 3) assert_(isinstance(res.message, str)) assert_(res.success) class SparseMixin(object): def test_sparse_and_LinearOperator(self): m = 5000 n = 1000 A = rand(m, n, random_state=0) b = self.rnd.randn(m) res = lsq_linear(A, b) assert_allclose(res.optimality, 0, atol=1e-6) A = aslinearoperator(A) res = lsq_linear(A, b) assert_allclose(res.optimality, 0, atol=1e-6) def test_sparse_bounds(self): m = 5000 n = 1000 A = rand(m, n, random_state=0) b = self.rnd.randn(m) lb = self.rnd.randn(n) ub = lb + 1 res = lsq_linear(A, b, (lb, ub)) assert_allclose(res.optimality, 0.0, atol=1e-8) res = lsq_linear(A, b, (lb, ub), lsmr_tol=1e-13) assert_allclose(res.optimality, 0.0, atol=1e-8) res = lsq_linear(A, b, (lb, ub), lsmr_tol='auto') assert_allclose(res.optimality, 0.0, atol=1e-8) class TestTRF(BaseMixin, SparseMixin): method = 'trf' lsq_solvers = ['exact', 'lsmr'] class TestBVLS(BaseMixin): method = 'bvls' lsq_solvers = ['exact'] if __name__ == '__main__': run_module_suite()
bsd-3-clause
biziwalker/mtasa
vendor/google-breakpad/src/tools/gyp/pylib/gyp/easy_xml.py
1049
4803
# Copyright (c) 2011 Google Inc. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import re import os def XmlToString(content, encoding='utf-8', pretty=False): """ Writes the XML content to disk, touching the file only if it has changed. Visual Studio files have a lot of pre-defined structures. This function makes it easy to represent these structures as Python data structures, instead of having to create a lot of function calls. Each XML element of the content is represented as a list composed of: 1. The name of the element, a string, 2. The attributes of the element, a dictionary (optional), and 3+. The content of the element, if any. Strings are simple text nodes and lists are child elements. Example 1: <test/> becomes ['test'] Example 2: <myelement a='value1' b='value2'> <childtype>This is</childtype> <childtype>it!</childtype> </myelement> becomes ['myelement', {'a':'value1', 'b':'value2'}, ['childtype', 'This is'], ['childtype', 'it!'], ] Args: content: The structured content to be converted. encoding: The encoding to report on the first XML line. pretty: True if we want pretty printing with indents and new lines. Returns: The XML content as a string. """ # We create a huge list of all the elements of the file. xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding] if pretty: xml_parts.append('\n') _ConstructContentList(xml_parts, content, pretty) # Convert it to a string return ''.join(xml_parts) def _ConstructContentList(xml_parts, specification, pretty, level=0): """ Appends the XML parts corresponding to the specification. Args: xml_parts: A list of XML parts to be appended to. specification: The specification of the element. See EasyXml docs. pretty: True if we want pretty printing with indents and new lines. level: Indentation level. """ # The first item in a specification is the name of the element. if pretty: indentation = ' ' * level new_line = '\n' else: indentation = '' new_line = '' name = specification[0] if not isinstance(name, str): raise Exception('The first item of an EasyXml specification should be ' 'a string. Specification was ' + str(specification)) xml_parts.append(indentation + '<' + name) # Optionally in second position is a dictionary of the attributes. rest = specification[1:] if rest and isinstance(rest[0], dict): for at, val in sorted(rest[0].iteritems()): xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True))) rest = rest[1:] if rest: xml_parts.append('>') all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True) multi_line = not all_strings if multi_line and new_line: xml_parts.append(new_line) for child_spec in rest: # If it's a string, append a text node. # Otherwise recurse over that child definition if isinstance(child_spec, str): xml_parts.append(_XmlEscape(child_spec)) else: _ConstructContentList(xml_parts, child_spec, pretty, level + 1) if multi_line and indentation: xml_parts.append(indentation) xml_parts.append('</%s>%s' % (name, new_line)) else: xml_parts.append('/>%s' % new_line) def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False, win32=False): """ Writes the XML content to disk, touching the file only if it has changed. Args: content: The structured content to be written. path: Location of the file. encoding: The encoding to report on the first line of the XML file. pretty: True if we want pretty printing with indents and new lines. """ xml_string = XmlToString(content, encoding, pretty) if win32 and os.linesep != '\r\n': xml_string = xml_string.replace('\n', '\r\n') # Get the old content try: f = open(path, 'r') existing = f.read() f.close() except: existing = None # It has changed, write it if existing != xml_string: f = open(path, 'w') f.write(xml_string) f.close() _xml_escape_map = { '"': '&quot;', "'": '&apos;', '<': '&lt;', '>': '&gt;', '&': '&amp;', '\n': '&#xA;', '\r': '&#xD;', } _xml_escape_re = re.compile( "(%s)" % "|".join(map(re.escape, _xml_escape_map.keys()))) def _XmlEscape(value, attr=False): """ Escape a string for inclusion in XML.""" def replace(match): m = match.string[match.start() : match.end()] # don't replace single quotes in attrs if attr and m == "'": return m return _xml_escape_map[m] return _xml_escape_re.sub(replace, value)
gpl-3.0
Hakuba/youtube-dl
youtube_dl/extractor/stitcher.py
30
2934
from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( determine_ext, int_or_none, js_to_json, unescapeHTML, ) class StitcherIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?stitcher\.com/podcast/(?:[^/]+/)+e/(?:(?P<display_id>[^/#?&]+?)-)?(?P<id>\d+)(?:[/#?&]|$)' _TESTS = [{ 'url': 'http://www.stitcher.com/podcast/the-talking-machines/e/40789481?autoplay=true', 'md5': '391dd4e021e6edeb7b8e68fbf2e9e940', 'info_dict': { 'id': '40789481', 'ext': 'mp3', 'title': 'Machine Learning Mastery and Cancer Clusters', 'description': 'md5:55163197a44e915a14a1ac3a1de0f2d3', 'duration': 1604, 'thumbnail': 're:^https?://.*\.jpg', }, }, { 'url': 'http://www.stitcher.com/podcast/panoply/vulture-tv/e/the-rare-hourlong-comedy-plus-40846275?autoplay=true', 'info_dict': { 'id': '40846275', 'display_id': 'the-rare-hourlong-comedy-plus', 'ext': 'mp3', 'title': "The CW's 'Crazy Ex-Girlfriend'", 'description': 'md5:04f1e2f98eb3f5cbb094cea0f9e19b17', 'duration': 2235, 'thumbnail': 're:^https?://.*\.jpg', }, 'params': { 'skip_download': True, }, }, { # escaped title 'url': 'http://www.stitcher.com/podcast/marketplace-on-stitcher/e/40910226?autoplay=true', 'only_matching': True, }, { 'url': 'http://www.stitcher.com/podcast/panoply/getting-in/e/episode-2a-how-many-extracurriculars-should-i-have-40876278?autoplay=true', 'only_matching': True, }] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) audio_id = mobj.group('id') display_id = mobj.group('display_id') or audio_id webpage = self._download_webpage(url, display_id) episode = self._parse_json( js_to_json(self._search_regex( r'(?s)var\s+stitcher\s*=\s*({.+?});\n', webpage, 'episode config')), display_id)['config']['episode'] title = unescapeHTML(episode['title']) formats = [{ 'url': episode[episode_key], 'ext': determine_ext(episode[episode_key]) or 'mp3', 'vcodec': 'none', } for episode_key in ('episodeURL',) if episode.get(episode_key)] description = self._search_regex( r'Episode Info:\s*</span>([^<]+)<', webpage, 'description', fatal=False) duration = int_or_none(episode.get('duration')) thumbnail = episode.get('episodeImage') return { 'id': audio_id, 'display_id': display_id, 'title': title, 'description': description, 'duration': duration, 'thumbnail': thumbnail, 'formats': formats, }
unlicense
jjs0sbw/CSPLN
apps/scaffolding/linux/web2py/gluon/contrib/login_methods/email_auth.py
44
1493
import smtplib import logging def email_auth(server="smtp.gmail.com:587", domain="@gmail.com", tls_mode=None): """ to use email_login: from gluon.contrib.login_methods.email_auth import email_auth auth.settings.login_methods.append(email_auth("smtp.gmail.com:587", "@gmail.com")) """ def email_auth_aux(email, password, server=server, domain=domain, tls_mode=tls_mode): if domain: if not isinstance(domain, (list, tuple)): domain = [str(domain)] if not [d for d in domain if email[-len(d):] == d]: return False (host, port) = server.split(':') if tls_mode is None: # then auto detect tls_mode = port == '587' try: server = None server = smtplib.SMTP(host, port) server.ehlo() if tls_mode: server.starttls() server.ehlo() server.login(email, password) server.quit() return True except: logging.exception('email_auth() failed') if server: try: server.quit() except: # server might already close connection after error pass return False return email_auth_aux
gpl-3.0
Danisan/odoo-1
addons/sale_crm/__openerp__.py
260
2036
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Opportunity to Quotation', 'version': '1.0', 'category': 'Hidden', 'description': """ This module adds a shortcut on one or several opportunity cases in the CRM. =========================================================================== This shortcut allows you to generate a sales order based on the selected case. If different cases are open (a list), it generates one sale order by case. The case is then closed and linked to the generated sales order. We suggest you to install this module, if you installed both the sale and the crm modules. """, 'author': 'OpenERP SA', 'website': 'https://www.odoo.com/page/crm', 'depends': ['sale', 'crm', 'web_kanban_gauge'], 'data': [ 'wizard/crm_make_sale_view.xml', 'sale_crm_view.xml', 'security/sale_crm_security.xml', 'security/ir.model.access.csv', ], 'demo': [], 'test': ['test/sale_crm.yml'], 'installable': True, 'auto_install': True, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Manishearth/servo
tests/wpt/web-platform-tests/subresource-integrity/tools/generate_javascript.py
210
1297
from os import path, listdir from hashlib import sha512, sha256, md5 from base64 import b64encode import re JS_DIR = path.normpath(path.join(__file__, "..", "..")) ''' Yield each file in the javascript directory ''' def js_files(): for f in listdir(JS_DIR): if path.isfile(f) and f.endswith(".js"): yield f ''' URL-safe base64 encode a binary digest and strip any padding. ''' def format_digest(digest): return b64encode(digest) ''' Generate an encoded sha512 URI. ''' def sha512_uri(content): return "sha512-%s" % format_digest(sha512(content).digest()) ''' Generate an encoded sha256 URI. ''' def sha256_uri(content): return "sha256-%s" % format_digest(sha256(content).digest()) ''' Generate an encoded md5 digest URI. ''' def md5_uri(content): return "md5-%s" % format_digest(md5(content).digest()) def main(): for file in js_files(): print "Generating content for %s" % file base = path.splitext(path.basename(file))[0] var_name = re.sub(r"[^a-z0-9]", "_", base) content = "%s=true;" % var_name with open(file, "w") as f: f.write(content) print "\tSHA512 integrity: %s" % sha512_uri(content) print "\tSHA256 integrity: %s" % sha256_uri(content) print "\tMD5 integrity: %s" % md5_uri(content) if __name__ == "__main__": main()
mpl-2.0
mkocka/galaxytea
integral/domcek/numerical_integration.py
1
3477
#!/usr/bin/env python from __future__ import division import matplotlib.pyplot as plt import matplotlib from scipy import constants as const from pylab import * # def planck(T,lambda_i,lambda_f,step=10**(-1)): x ,y = ([] for i in range(2)) for i in arange(lambda_i,lambda_f,step): # i*10**(-9) - correctuin to nm f = 10**(-9)*(2*const.h*const.c**2)/(((i*10**(-9))**5) *(exp((const.c*const.h)/((i*10**(-9))*const.k*T))-1)) y.append(f) x.append(i) return x,y def plot_f(x,y): #plots one planck function plt.plot(x,y) plt.xlabel('$\lambda$ [nm]') plt.ylabel('Spectral radiance [W.sr$^{-1}$.m$^{-}$.nm$^{-1}$') plt.grid() plt.savefig("planck") plt.gcf().clear() #first 3 parameters characterize range of temperatures and step between them #other 2 parameters characterize range of planck function def plot_multiple(T_i,T_f,step,lambda_i,lambda_f): #plots multiple planck into one graph for i in range(T_i,T_f,step): a, b = planck(i,lambda_i,lambda_f) plt.plot(a,b) plt.xlabel('$\lambda$ [nm]') plt.ylabel('Spectral radiance [W.sr$^{-1}$.m$^{-}$.nm$^{-1}$') plt.grid() plt.savefig("planck_functions") plt.gcf().clear() def midpoint_rule(a,b): #rectangle rule dx = a[1]-a[0] summation=0 for i in range(1,len(a)): #first and last value excluded summation += dx*b[i] return summation def trapezoid_rule(a,b): dx = a[1]-a[0] summation=0.5*b[0]+0.5*b[len(b)-1] for i in range(1,len(a)): #first and last value excluded summation += dx*b[i] return summation def simpson(a,b): dx = a[1]-a[0] summation = dx*(b[0]+b[len(b)-1])/3 ax ,bx = ([] for i in range(2)) if (len(a)%2==0): # this rule needs odd number of values a.pop() and b.pop() for i in range(len(a)): if i%2==0: summation +=4*b[i]*dx/3 if i%2==1: summation +=2*b[i]*dx/3 return summation plot_multiple(5000,10000,1000,10,2000) #defining boundries of calculating function lambda_initial=10 lambda_final=10000 #temperature of blackbody temperature=5778 #calculating planck function a, b = planck(temperature,lambda_initial,lambda_final) plot_f(a,b) #theoretical stefan - boltzman value stef_boltz = const.sigma*temperature**4 print 30*"-" print "Planck function parameters" print 30*"-" print "Temperature:", temperature print "Range of integration in nm: (", lambda_initial, ",", lambda_final,")" print print "Calculated values:" print 30*"-" #integration gives us value dependent on sr**-1, need to mutliply by appropriate constant (pi) #for more information: http://en.wikipedia.org/wiki/Planck's_law#Stefan.E2.80.93Boltzmann_law print "Stephan-Boltzman law:", stef_boltz print "Midpoint rule method:", midpoint_rule(a,b)*const.pi, "Sigma:", midpoint_rule(a,b)*const.pi/(temperature**4) print "Trapezoid rule method:", trapezoid_rule(a,b)*const.pi, "Sigma:", trapezoid_rule(a,b)*const.pi/(temperature**4) print "Simpson's rule method:", simpson(a,b)*const.pi, "Sigma:", simpson(a,b)*const.pi/(temperature**4) print "Theoretical Sigma:", ((const.pi)**2*(const.k)**4)/(60*((const.h)/(2*const.pi))**3*(const.c)**2) print print "Relative errors" print 30*"-" print "Midpoint rule method:", (stef_boltz - midpoint_rule(a,b)*const.pi) / stef_boltz print "Trapezoid rule method:", (stef_boltz - trapezoid_rule(a,b)*const.pi) / stef_boltz print "Simpson's rule method:", (stef_boltz - simpson(a,b)*const.pi) / stef_boltz print midpoint_rule(a,b)*const.pi / stef_boltz print trapezoid_rule(a,b)*const.pi / stef_boltz print simpson(a,b)*const.pi / stef_boltz
mit
eufarn7sp/egads-eufar
egads/algorithms/microphysics/mass_conc_dmt.py
2
4982
__author__ = "mfreer, ohenry" __date__ = "2017-01-26 13:07" __version__ = "1.2" __all__ = ['MassConcDmt'] import numpy import egads.core.egads_core as egads_core import egads.core.metadata as egads_metadata class MassConcDmt(egads_core.EgadsAlgorithm): """ FILE mass_conc_dmt.py VERSION 1.2 CATEGORY Microphysics PURPOSE Calculates mass concentration given a size distribution. DESCRIPTION Calculates mass concentration given a size distribution. Can be used to calculate liquid or ice water content depending on the types of hydrometeors being sampled. INPUT c_i array[time, bins] cm-3 number concentration of hydrometeors in size category i d_i vector[bins] um average diameter of size category i s_i array[time,bins] _ shape factor of hydrometeor in size category i to account for asphericity rho_i vector[bins] g/cm3 density of the hydrometeor in size category i OUTPUT M vector[time] g/cm3 mass concentration SOURCE REFERENCES "Data Analysis User's Guide, Chapter 1, Section 1.3.2", Droplet Measurement Technologies, 2009, http://www.dropletmeasurement.com/sites/default/files/Manuals Guides/Data%20Analysis%20Guide/DOC-0222%20Rev%20A%20Data%20Analysis%20Guide%20Ch%201.pdf """ def __init__(self, return_Egads=True): egads_core.EgadsAlgorithm.__init__(self, return_Egads) self.output_metadata = egads_metadata.VariableMetadata({'units':'g/cm^3', 'long_name':'Mass concentration', 'standard_name':'', 'Category':['Microphysics']}) self.metadata = egads_metadata.AlgorithmMetadata({'Inputs':['c_i', 'd_i', 's_i', 'rho_i'], 'InputUnits':['cm^-3', 'um', '', 'g/cm^3'], 'InputTypes':['array[time, bins]','vector[bins]','array[time,bins]','vector[bins]'], 'InputDescription':['Number concentration of hydrometeors in size category i', 'Average diameter of size category i', 'Shape factor of hydrometeor in size category i to account for asphericity', 'density of the hydrometeor in size category i'], 'Outputs':['M'], 'OutputUnits':['g/cm^3'], 'OutputTypes':['vector[time]'], 'OutputDescription':['Mass concentration'], 'Purpose':'Calculates mass concentration given a size distribution', 'Description':'Calculates mass concentration given a size distribution. Can be used to calculate liquid or ice water content depending on the types of hydrometeors being sampled', 'Category':'Microphysics', 'Source':'', 'References':"Data Analysis User's Guide, Chapter 1, Section 1.3.2, Droplet Measurement Technologies, 2009, http://www.dropletmeasurement.com/sites/default/files/ManualsGuides/Data%20Analysis%20Guide/DOC-0222%20Rev%20A%20Data%20Analysis%20Guide%20Ch%201.pdf", 'Processor':self.name, 'ProcessorDate':__date__, 'ProcessorVersion':__version__, 'DateProcessed':self.now()}, self.output_metadata) def run(self, c_i, d_i, s_i, rho_i): return egads_core.EgadsAlgorithm.run(self, c_i, d_i, s_i, rho_i) def _algorithm(self, c_i, d_i, s_i, rho_i): d_i = d_i * 1.0e-4 if c_i.ndim <= 1: M = (numpy.pi / 6.0) * numpy.sum(s_i * rho_i * c_i * d_i ** 3) else: M = (numpy.pi / 6.0) * numpy.sum(s_i * rho_i * c_i * d_i ** 3, axis=1) return M
bsd-3-clause
jelugbo/tundex
common/lib/xmodule/xmodule/modulestore/exceptions.py
25
2613
""" Exceptions thrown by KeyStore objects """ class ItemNotFoundError(Exception): pass class ItemWriteConflictError(Exception): pass class InsufficientSpecificationError(Exception): pass class OverSpecificationError(Exception): pass class InvalidLocationError(Exception): pass class NoPathToItem(Exception): pass class ReferentialIntegrityError(Exception): """ An incorrect pointer to an object exists. For example, 2 parents point to the same child, an xblock points to a nonexistent child (which probably raises ItemNotFoundError instead depending on context). """ pass class DuplicateItemError(Exception): """ Attempted to create an item which already exists. """ def __init__(self, element_id, store=None, collection=None): super(DuplicateItemError, self).__init__() self.element_id = element_id self.store = store self.collection = collection def __str__(self, *args, **kwargs): """ Print info about what's duplicated """ return '{0.store}[{0.collection}] already has {0.element_id}'.format( self, Exception.__str__(self, *args, **kwargs) ) class VersionConflictError(Exception): """ The caller asked for either draft or published head and gave a version which conflicted with it. """ def __init__(self, requestedLocation, currentHeadVersionGuid): super(VersionConflictError, self).__init__(u'Requested {}, but current head is {}'.format( requestedLocation, currentHeadVersionGuid )) class DuplicateCourseError(Exception): """ An attempt to create a course whose id duplicates an existing course's """ def __init__(self, course_id, existing_entry): """ existing_entry will have the who, when, and other properties of the existing entry """ super(DuplicateCourseError, self).__init__( u'Cannot create course {}, which duplicates {}'.format(course_id, existing_entry) ) self.course_id = course_id self.existing_entry = existing_entry class InvalidBranchSetting(Exception): """ Raised when the process' branch setting did not match the required setting for the attempted operation on a store. """ def __init__(self, expected_setting, actual_setting): super(InvalidBranchSetting, self).__init__(u"Invalid branch: expected {} but got {}".format(expected_setting, actual_setting)) self.expected_setting = expected_setting self.actual_setting = actual_setting
agpl-3.0
hackerdeen/hackhub
web.py
1
9524
from flask import request, jsonify, Response, json, redirect, abort, render_template, session, json from hackhub import app, spaceapi, get_db, DOOR_CODE from member import Member from status import Status, new_status from event import new_event, recent_events from bank import bank from door_code import new_code, add_url_code, url_codes, user_url_code from decorators import login_required from casclient import client as casclient from payments import membership import datetime import urllib MONTHS = ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August', 'September', 'October', 'November', 'December'] @app.route('/') def the_homepage(): return render_template('front_home.html') @app.route('/about-us/') def the_about_us(): return render_template('front_home.html') @app.route('/contact-us/') def the_contact(): return render_template('front_home.html') @app.route('/blog/') def the_blog(): return redirect('http://www.hackerdeen.org.uk/blog/') @app.route('/spaceapi') def spaceapi_json(): s = spaceapi s['state'].update(Status().status) s['events'] = [] for event in recent_events(): s['events'].append({'name': event[1], 'type': event[2], 'timestamp': event[3], 'extra': event[4]}) s['sensors'] = {} membership_counts = membership() s['sensors']['total_member_count'] = [] for i in range(0,3): s['sensors']['total_member_count'].append({ 'location': str(MONTHS[membership_counts[i][0] - 1]) + " " + str(membership_counts[i][1]), 'value': membership_counts[i][2] }) r = jsonify(s) r.headers['Access-Control-Allow-Origin'] = '*' r.headers['Cache-Control'] = 'no-cache' return r @app.route('/hub/') @login_required def hub_home(): m = Member(session['username']) return render_template('home.html', profile=m.get_profile(), status=Status().status, paid=m.is_paid(), username=session['username'], payment_history=m.payment_history()) @app.route('/hub/login') def hub_login(): return redirect(casclient.get_login_url()) @app.route('/hub/login/ticket') def hub_login_ticket(): try: ticket = request.args["ticket"] user, attributes, pgtiou = casclient.verify_ticket(ticket) if not user: raise Exception() except: return redirect('/hub/login') try: m = Member(user) except: session['application_username'] = user return redirect('/hub/apply-for-membership') session['username'] = user return redirect('/hub/') @app.route('/hub/logout') def hub_logout(): session.clear() return redirect('/') @app.route('/hub/apply-for-membership', methods=['GET', 'POST']) def hub_apply_for_membership(): if not session.get('application_username'): return redirect('/hub/') try: m = Member(session['application_username']) except: pass else: session['username'] = session['application_username'] del session['application_username'] return redirect('/hub/') db = get_db() cur = db.cursor() cur.execute('SELECT 1 FROM application WHERE username=?', (session['application_username'],)) if cur.fetchone(): cur.close() return render_template('membership_apply_thanks.html', status=Status().status) cur.close() if request.method == 'GET': return render_template('membership_apply.html', status=Status().status) else: try: username = session['application_username'] realname = request.form['realname'] nickname = request.form['nickname'] email = request.form['email'] address = request.form['address'] except KeyError: return redirect('/hub/apply-for-membership') cur = db.cursor() cur.execute("""insert into application (username, realname, nickname, email, address, received) values (?, ?, ?, ?, ?, strftime('%s', 'now'))""", (username, realname, nickname, email, address)) db.commit() cur.close() return render_template('membership_apply_thanks.html', status=Status().status) @app.route('/hub/profile') @login_required def hub_profile(): return render_template('profile.html', profile=Member(session['username']).get_profile(), status=Status().status) @app.route('/hub/profile/edit', methods=['GET', 'POST']) @login_required def hub_profile_edit(): if request.method == 'GET': return render_template('profile_edit.html', profile=Member(session['username']).get_profile(), status=Status().status) elif request.method == 'POST': updatable = ['realname', 'nickname', 'email', 'twitter', 'irc', 'github', 'address'] update = {} for x in request.form: if x in updatable: update[x] = None if request.form[x] == 'None' else request.form[x] m = Member(session['username']) m.update_profile(update) return redirect('/hub/') @app.route('/hub/profile/emails') @login_required def profile_emails(): m = Member(session['username']) uprefs = m.get_email_prefs() db = get_db() cur = db.cursor() cur.execute("SELECT code, description FROM email_events") events = cur.fetchall() return render_template("email_prefs.html", uprefs=uprefs, events=events) @app.route('/hub/profile/email_toggle/<event>') @login_required def profile_email_toggle(event): m = Member(session['username']) if m.get_email_prefs()[event]: m.set_email_pref(event, 0) else: m.set_email_pref(event, 1) return redirect('/hub/profile/emails') @app.route('/hub/payments') @login_required def hub_payments(): return render_template('payments.html', payments=Member(session['username']).get_payments(), status=Status().status) @app.route('/hub/status') @login_required def hub_status(): status = Status().status status['lastchange_utc_forhumans'] = datetime.datetime.fromtimestamp(int(status['lastchange'])).strftime("%c") return render_template('status.html', status=status) @app.route('/hub/status/update', methods=['POST', 'GET']) @login_required def hub_status_update(): if request.method == 'GET': return render_template('status_update.html', status=Status().status) elif request.method == 'POST': if not ( request.form['state'] == '0' or request.form['state'] == '1' ): abort(400) new_status(request.form['state'], request.form['message'], session['username']) return redirect('/hub/status') @app.route('/hub/door') @login_required def door(): return render_template("unlock.html", code=user_url_code(session['username']), door_code=DOOR_CODE) from unlock import unlock @app.route('/hub/open_door') @login_required def open_door(): if Member(session['username']).is_active(): success, message = unlock(session['username']) if success: return redirect('/hub/door/'+urllib.quote_plus(message)) else: return redirect('/hub/door/'+urllib.quote_plus("Well that didn't work... " + message)) else: return redirect('/hub/door/'+urllib.quote_plus("You have to pay to do that :)")) @app.route('/hub/door/<msg>') def open_door_res(msg): msg = urllib.unquote_plus(msg) return render_template("open_door.html", response=msg) @app.route('/hub/door_code') @login_required def door_code(): if Member(session['username']).is_active(): return render_template("door_code.html", code=new_code(session['username'])) else: return render_template("open_door.html", response="You have to be an active member to do that.") def unlock_code(code): for c in url_codes(): if c[0] == code: if Member(c[1]).is_active(): success, resp = unlock(c[1]) return True, success, resp else: return False, False, "Member is not active." else: return False, False, "Code not found" @app.route('/hub/open_door_code/<code>') def open_door_code(code): return render_template('unlock_code.html', msg=None, code=code) @app.route('/hub/unlock_door_code', methods=['POST']) def unlcok_door_code(): code = request.form['code'] found, unlocked, msg = unlock_code(code) if not (found and unlocked): msg = "Well that didn't work... " + msg return render_template('unlock_code.html', code=code, msg=msg) @app.route('/hub/open_door_code_json/<code>') def open_door_code_json(code): found, unlocked, msg = unlock_code(code) if found: return jsonify({'unlocked':unlocked, 'message': msg}) else: return abort(404) @app.route('/hub/gen_url_code') @login_required def gen_url_code(): add_url_code(session['username']) return redirect('/hub/door') @app.route('/hub/so_form') @login_required def so_form(): return render_template("standing_order.html", member=Member(session['username']), bank=bank) @app.route('/hub/setup_payment') @login_required def setup_payment(): return render_template("setup_payment.html", member=Member(session['username']), bank=bank)
bsd-2-clause
skosukhin/spack
var/spack/repos/builtin/packages/cask/package.py
1
2353
############################################################################## # Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC. # Produced at the Lawrence Livermore National Laboratory. # # This file is part of Spack. # Created by Todd Gamblin, tgamblin@llnl.gov, All rights reserved. # LLNL-CODE-647188 # # For details, see https://github.com/spack/spack # Please also see the NOTICE and LICENSE files for our notice and the LGPL. # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU Lesser General Public License (as # published by the Free Software Foundation) version 2.1, February 1999. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and # conditions of the GNU Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this program; if not, write to the Free Software # Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA ############################################################################## # # Based on Homebrew's formula: # https://github.com/Homebrew/homebrew-core/blob/master/Formula/cask.rb # from spack import * from glob import glob class Cask(Package): """Cask is a project management tool for Emacs Lisp to automate the package development cycle; development, dependencies, testing, building, packaging and more.""" homepage = "http://cask.readthedocs.io/en/latest/" url = "https://github.com/cask/cask/archive/v0.7.4.tar.gz" version('0.8.1', '25196468a7ce634cfff14733678be6ba') # version 0.8.0 is broken version('0.7.4', 'c973a7db43bc980dd83759a5864a1260') depends_on('emacs', type=('build', 'run')) def install(self, spec, prefix): mkdirp(prefix.bin) install('bin/cask', prefix.bin) install_tree('templates', join_path(prefix, 'templates')) for el_file in glob("*.el"): install(el_file, prefix) for misc_file in ['COPYING', 'cask.png', 'README.md']: install(misc_file, prefix) # disable cask's automatic upgrading feature touch(join_path(prefix, ".no-upgrade"))
lgpl-2.1
JeffAMcGee/friendloc
friendloc/base/gisgraphy.py
1
2955
import json import re import logging import time import restkit.errors from restkit import Resource from http_parser.http import NoMoreData import friendloc from friendloc.base.models import GeonamesPlace from friendloc.base.utils import in_local_box class GisgraphyResource(Resource): COORD_RE = re.compile('(-?\d+\.\d+), *(-?\d+\.\d+)') def __init__(self, settings=None): if not settings: settings = friendloc.settings Resource.__init__(self, settings.gisgraphy_url, client_opts={'timeout':60}, ) self._mdist = {} def set_mdists(self,mdists): self._mdist = mdists def mdist(self,gnp): id = str(gnp.feature_id) if id in self._mdist: return self._mdist[id] if gnp.feature_code in self._mdist: return self._mdist[gnp.feature_code] return self._mdist.get('other',None) def fulltextsearch(self, q, **kwargs): backoff_seconds = [15,60,240,0] for delay in backoff_seconds: try: # make the query lower case as workaround for "Portland, OR" r = self.get('fulltext/fulltextsearch', q=q, format="json", spellchecking=False, **kwargs) return json.loads(r.body_string())["response"]["docs"] except NoMoreData: logging.error("incomplete response from gisgraphy") if delay==0: raise except restkit.errors.RequestFailed as e: if 'Empty query' in e.message: return [] else: raise time.sleep(delay) def twitter_loc(self, q): if not q: return None # check for "30.639, -96.347" style coordinates match = self.COORD_RE.search(q) if match: return GeonamesPlace( lat=float(match.group(1)), lng=float(match.group(2)), feature_code='COORD', mdist=self._mdist.get('COORD',None), ) #try gisgraphy q = q.lower().strip().replace('-','/').replace(',',', ') q = ''.join(re.split('[|&!+]',q)).strip() if not q: return None results = self.fulltextsearch(q) # otherwise, return the best result if results: res = GeonamesPlace(results[0]) res.mdist = self.mdist(res) return res # try splitting q in half for splitter in ('and','or','/'): parts = q.split(splitter) if len(parts)==2: for part in parts: res = self.twitter_loc(part) if res: return res return None if __name__ == '__main__': res = GisgraphyResource() f = res.fulltextsearch('Austin TX')
bsd-2-clause
maestrano/openerp
openerp/addons/association/__init__.py
886
1054
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
bowang/tensorflow
tensorflow/contrib/graph_editor/reroute.py
35
18201
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Various function for graph rerouting.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib.graph_editor import subgraph as _subgraph from tensorflow.contrib.graph_editor import util as _util from tensorflow.python.framework import ops as _tf_ops from tensorflow.python.util.all_util import remove_undocumented _allowed_symbols = [ "swap_ts", "reroute_ts", "swap_inputs", "reroute_inputs", "swap_outputs", "reroute_outputs", "swap_ios", "reroute_ios", "remove_control_inputs", "add_control_inputs", ] def _check_ts_compatibility(ts0, ts1): """Make sure the shape and dtype of the two tensor's lists are compatible. Args: ts0: an object convertible to a list of `tf.Tensor`. ts1: an object convertible to a list of `tf.Tensor`. Raises: ValueError: if any pair of tensors (same index in ts0 and ts1) have a dtype or a shape which is not compatible. """ ts0 = _util.make_list_of_t(ts0) ts1 = _util.make_list_of_t(ts1) if len(ts0) != len(ts1): raise ValueError("ts0 and ts1 have different sizes: {} != {}".format( len(ts0), len(ts1))) for t0, t1 in zip(ts0, ts1): # check dtype dtype0, dtype1 = t0.dtype, t1.dtype if not dtype0.is_compatible_with(dtype1): raise ValueError("Dtypes {} and {} are not compatible.".format(dtype0, dtype1)) # check shape shape0, shape1 = t0.get_shape(), t1.get_shape() if not shape0.is_compatible_with(shape1): raise ValueError("Shapes {} and {} are not compatible.".format(shape0, shape1)) class _RerouteMode(object): """Enums for reroute's mode. swap: the end of tensors a and b are swapped. a2b: the end of the tensor a are also rerouted to the end of the tensor b (the end of b is left dangling). b2a: the end of the tensor b are also rerouted to the end of the tensor a (the end of a is left dangling). """ swap, a2b, b2a = range(3) @classmethod def check(cls, mode): """Check swap mode. Args: mode: an integer representing one of the modes. Returns: A tuple `(a2b, b2a)` boolean indicating what rerouting needs doing. Raises: ValueError: if mode is outside the enum range. """ if mode == cls.swap: return True, True elif mode == cls.b2a: return False, True elif mode == cls.a2b: return True, False else: raise ValueError("Unknown _RerouteMode: {}".format(mode)) def _reroute_t(t0, t1, consumers1, can_modify=None, cannot_modify=None): """Reroute the end of the tensors (t0,t1). Warning: this function is directly manipulating the internals of the `tf.Graph`. Args: t0: a tf.Tensor. t1: a tf.Tensor. consumers1: The consumers of t1 which needs to be rerouted. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. """ nb_update_inputs = 0 if can_modify is not None: consumers1 &= can_modify if cannot_modify is not None: consumers1 -= cannot_modify consumers1_indices = {} for consumer1 in consumers1: consumers1_indices[consumer1] = [i for i, t in enumerate(consumer1.inputs) if t is t1] for consumer1 in consumers1: for i in consumers1_indices[consumer1]: consumer1._update_input(i, t0) # pylint: disable=protected-access nb_update_inputs += 1 return nb_update_inputs def _reroute_ts(ts0, ts1, mode, can_modify=None, cannot_modify=None): """Reroute the end of the tensors in each pair (t0,t1) in ts0 x ts1. This function is the back-bone of the Graph-Editor. It is essentially a thin wrapper on top of the tf.Operation._update_input. Given a pair of tensor t0, t1 in ts0 x ts1, this function re-route the end of t0 and t1 in three possible ways: 1) The reroute mode is "a<->b" or "b<->a": the tensors' end are swapped. After this operation, the previous consumers of t0 are now consumers of t1 and vice-versa. 2) The reroute mode is "a->b": the tensors' end of t0 are re-routed to the tensors's end of t1 (which are left dangling). After this operation, the previous consumers of t0 are still consuming t0 but the previous consumers of t1 are not also consuming t0. The tensor t1 has no consumer. 3) The reroute mode is "b->a": this mode is the symmetric of the "a->b" mode. Note that this function is re-routing the end of two tensors, not the start. Re-routing the start of two tensors is not supported by this library. The reason for that is the following: TensorFlow, by design, creates a strong bond between an op and its output tensor. This Graph editor follows this design and treats an operation A and its generating tensors {t_i} as an entity which cannot be broken. In other words, an op cannot be detached from any of its output tensors, ever. But it is possible to detach an op from its input tensors, which is what this function concerns itself with. Warning: this function is directly manipulating the internals of the tf.Graph. Args: ts0: an object convertible to a list of `tf.Tensor`. ts1: an object convertible to a list of `tf.Tensor`. mode: what to do with those tensors: "a->b" or "b<->a" for swaping and "a->b" or "b->a" for one direction re-routing. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. Raises: TypeError: if `ts0` or `ts1` cannot be converted to a list of `tf.Tensor`. TypeError: if `can_modify` or `cannot_modify` is not `None` and cannot be converted to a list of `tf.Operation`. """ a2b, b2a = _RerouteMode.check(mode) ts0 = _util.make_list_of_t(ts0) ts1 = _util.make_list_of_t(ts1) _check_ts_compatibility(ts0, ts1) if cannot_modify is not None: cannot_modify = frozenset(_util.make_list_of_op(cannot_modify)) if can_modify is not None: can_modify = frozenset(_util.make_list_of_op(can_modify)) nb_update_inputs = 0 precomputed_consumers = [] # precompute consumers to avoid issue with repeated tensors: for t0, t1 in zip(ts0, ts1): consumers0 = set(t0.consumers()) consumers1 = set(t1.consumers()) precomputed_consumers.append((consumers0, consumers1)) for t0, t1, consumers in zip(ts0, ts1, precomputed_consumers): if t0 is t1: continue # Silently ignore identical tensors. consumers0, consumers1 = consumers if a2b: nb_update_inputs += _reroute_t(t0, t1, consumers1, can_modify, cannot_modify) if b2a: nb_update_inputs += _reroute_t(t1, t0, consumers0, can_modify, cannot_modify) return nb_update_inputs def swap_ts(ts0, ts1, can_modify=None, cannot_modify=None): """For each tensor's pair, swap the end of (t0,t1). B0 B1 B0 B1 | | => X A0 A1 A0 A1 Args: ts0: an object convertible to a list of `tf.Tensor`. ts1: an object convertible to a list of `tf.Tensor`. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ return _reroute_ts(ts0, ts1, _RerouteMode.swap, can_modify, cannot_modify) def reroute_ts(ts0, ts1, can_modify=None, cannot_modify=None): """For each tensor's pair, replace the end of t1 by the end of t0. B0 B1 B0 B1 | | => |/ A0 A1 A0 A1 The end of the tensors in ts1 are left dangling. Args: ts0: an object convertible to a list of `tf.Tensor`. ts1: an object convertible to a list of `tf.Tensor`. can_modify: iterable of operations which can be modified. Any operation outside within_ops will be left untouched by this function. cannot_modify: iterable of operations which cannot be modified. Any operation within cannot_modify will be left untouched by this function. Returns: The number of individual modifications made by the function. Raises: TypeError: if ts0 or ts1 cannot be converted to a list of tf.Tensor. TypeError: if can_modify or cannot_modify is not None and cannot be converted to a list of tf.Operation. """ return _reroute_ts(ts0, ts1, _RerouteMode.a2b, can_modify, cannot_modify) def _reroute_sgv_remap(sgv0, sgv1, mode): """Remap in place the inputs of two subgraph views to mimic the reroute. This function is meant to used by reroute_inputs only. Args: sgv0: the first subgraph to have its inputs remapped. sgv1: the second subgraph to have its inputs remapped. mode: reroute mode, see _reroute_ts(...). Raises: TypeError: if svg0 or svg1 are not SubGraphView. ValueError: if sgv0 and sgv1 do not belong to the same graph. """ a2b, b2a = _RerouteMode.check(mode) if not isinstance(sgv0, _subgraph.SubGraphView): raise TypeError("Expected a SubGraphView, got {}".format(type(sgv0))) if not isinstance(sgv1, _subgraph.SubGraphView): raise TypeError("Expected a SubGraphView, got {}".format(type(sgv1))) _util.check_graphs(sgv0, sgv1) sgv0_ = sgv0.copy() sgv1_ = sgv1.copy() # pylint: disable=protected-access if a2b and b2a: (sgv0_._input_ts, sgv1_._input_ts) = (sgv1_._input_ts, sgv0_._input_ts) (sgv0_._passthrough_ts, sgv1_._passthrough_ts) = (sgv1_._passthrough_ts, sgv0_._passthrough_ts) elif a2b: sgv1_._input_ts = sgv0_._input_ts[:] sgv1_._passthrough_ts = sgv0_._passthrough_ts[:] elif b2a: sgv0_._input_ts = sgv1_._input_ts[:] sgv0_._passthrough_ts = sgv1_._passthrough_ts[:] # pylint: enable=protected-access # Update the passthrough outputs as well. def update_passthrough_outputs(a, b): # pylint: disable=protected-access for i, t in enumerate(b._output_ts): if t in a._passthrough_ts: ii = a._input_ts.index(t) b._output_ts[i] = b._input_ts[ii] # pylint: enable=protected-access if a2b: update_passthrough_outputs(sgv0_, sgv1_) if b2a: update_passthrough_outputs(sgv1_, sgv0_) # in-place # pylint: disable=protected-access sgv0._assign_from(sgv0_) sgv1._assign_from(sgv1_) # pylint: enable=protected-access def _reroute_sgv_inputs(sgv0, sgv1, mode): """Re-route all the inputs of two subgraphs. Args: sgv0: the first subgraph to have its inputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to have its inputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: A tuple `(sgv0, sgv1)` of subgraph views with their inputs swapped. Note that the function argument sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ sgv0 = _subgraph.make_view(sgv0) sgv1 = _subgraph.make_view(sgv1) _util.check_graphs(sgv0, sgv1) can_modify = sgv0.ops + sgv1.ops # also allow consumers of passthrough to be modified: can_modify += _util.get_consuming_ops(sgv0.passthroughs) can_modify += _util.get_consuming_ops(sgv1.passthroughs) _reroute_ts(sgv0.inputs, sgv1.inputs, mode, can_modify=can_modify) _reroute_sgv_remap(sgv0, sgv1, mode) return sgv0, sgv1 def _reroute_sgv_outputs(sgv0, sgv1, mode): """Re-route all the outputs of two operations. Args: sgv0: the first subgraph to have its outputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to have its outputs swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: A tuple `(sgv0, sgv1)` of subgraph views with their outputs swapped. Note that the function argument sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ sgv0 = _subgraph.make_view(sgv0) sgv1 = _subgraph.make_view(sgv1) _util.check_graphs(sgv0, sgv1) cannot_modify = sgv0.ops + sgv1.ops _reroute_ts(sgv0.outputs, sgv1.outputs, mode, cannot_modify=cannot_modify) return sgv0, sgv1 def _reroute_sgv(sgv0, sgv1, mode): """Re-route both the inputs and the outputs of the two subgraph views. This involves swapping all the inputs/outputs of the two subgraph views. Args: sgv0: the first subgraph to be swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. sgv1: the second subgraph to be swapped. This argument is converted to a subgraph using the same rules than the function subgraph.make_view. mode: reroute mode, see _reroute_ts(...). Returns: A tuple `(sgv0, sgv1)` of subgraph views with their outputs and inputs swapped. Note that the function argument sgv0 and sgv1 are also modified in place. Raises: StandardError: if sgv0 or sgv1 cannot be converted to a SubGraphView using the same rules than the function subgraph.make_view. """ _reroute_sgv_outputs(sgv0, sgv1, mode) _reroute_sgv_inputs(sgv0, sgv1, mode) return sgv0, sgv1 def swap_inputs(sgv0, sgv1): """Swap all the inputs of sgv0 and sgv1 (see reroute_inputs).""" return _reroute_sgv_inputs(sgv0, sgv1, _RerouteMode.swap) def reroute_inputs(sgv0, sgv1): """Re-route all the inputs of sgv0 to sgv1 (see reroute_inputs).""" return _reroute_sgv_inputs(sgv0, sgv1, _RerouteMode.a2b) def swap_outputs(sgv0, sgv1): """Swap all the outputs of sgv0 and sgv1 (see _reroute_outputs).""" return _reroute_sgv_outputs(sgv0, sgv1, _RerouteMode.swap) def reroute_outputs(sgv0, sgv1): """Re-route all the outputs of sgv0 to sgv1 (see _reroute_outputs).""" return _reroute_sgv_outputs(sgv0, sgv1, _RerouteMode.a2b) def swap_ios(sgv0, sgv1): """Swap the inputs and outputs of sgv1 to sgv0 (see _reroute).""" return _reroute_sgv(sgv0, sgv1, _RerouteMode.swap) def reroute_ios(sgv0, sgv1): """Re-route the inputs and outputs of sgv0 to sgv1 (see _reroute).""" return _reroute_sgv(sgv0, sgv1, _RerouteMode.a2b) def remove_control_inputs(op, cops): """Remove the control inputs cops from co. Warning: this function is directly manipulating the internals of the `tf.Graph`. Args: op: a `tf.Operation` from which to remove the control inputs. cops: an object convertible to a list of `tf.Operation`. Raises: TypeError: if op is not a `tf.Operation`. ValueError: if any cop in cops is not a control input of op. """ if not isinstance(op, _tf_ops.Operation): raise TypeError("Expected a tf.Operation, got: {}", type(op)) cops = _util.make_list_of_op(cops, allow_graph=False) for cop in cops: if cop not in op.control_inputs: raise ValueError("{} is not a control_input of {}".format(op.name, cop.name)) # pylint: disable=protected-access op._control_inputs = [cop for cop in op._control_inputs if cop not in cops] op._recompute_node_def() # pylint: enable=protected-access def add_control_inputs(op, cops): """Add the control inputs cops to op. Warning: this function is directly manipulating the internals of the tf.Graph. Args: op: a tf.Operation to which the control inputs are added. cops: an object convertible to a list of `tf.Operation`. Raises: TypeError: if op is not a tf.Operation ValueError: if any cop in cops is already a control input of op. """ if not isinstance(op, _tf_ops.Operation): raise TypeError("Expected a tf.Operation, got: {}", type(op)) cops = _util.make_list_of_op(cops, allow_graph=False) for cop in cops: if cop in op.control_inputs: raise ValueError("{} is already a control_input of {}".format(cop.name, op.name)) # pylint: disable=protected-access op._control_inputs += cops op._recompute_node_def() # pylint: enable=protected-access remove_undocumented(__name__, _allowed_symbols)
apache-2.0
esthermm/odoomrp-wip
purchase_homologation/models/__init__.py
8
1676
# -*- coding: utf-8 -*- ############################################################################## # # # OpenERP, Open Source Management Solution. # # # # @author Carlos Sánchez Cifuentes <csanchez@grupovermon.com> # # # # This program is free software: you can redistribute it and/or modify # # it under the terms of the GNU Affero General Public License as # # published by the Free Software Foundation, either version 3 of the # # License, or (at your option) any later version. # # # # This program is distributed in the hope that it will be useful, # # but WITHOUT ANY WARRANTY; without even the implied warranty of # # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # # GNU Affero General Public License for more details. # # # # You should have received a copy of the GNU Affero General Public License # # along with this program. If not, see <http://www.gnu.org/licenses/>. # # # ############################################################################## from . import purchase_homologation from . import purchase_order_line
agpl-3.0
waytai/odoo
addons/stock_dropshipping/__init__.py
223
1085
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import stock_dropshipping import wizard # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
faywong/FFPlayer
project/jni/python/src/Lib/test/test_dict.py
53
17651
import unittest from test import test_support import UserDict, random, string import gc, weakref class DictTest(unittest.TestCase): def test_constructor(self): # calling built-in types without argument must return empty self.assertEqual(dict(), {}) self.assert_(dict() is not {}) def test_literal_constructor(self): # check literal constructor for different sized dicts (to exercise the BUILD_MAP oparg for n in (0, 1, 6, 256, 400): items = [(''.join([random.choice(string.letters) for j in range(8)]), i) for i in range(n)] random.shuffle(items) dictliteral = '{' + ', '.join('%r: %d' % item for item in items) + '}' self.assertEqual(eval(dictliteral), dict(items)) def test_bool(self): self.assert_(not {}) self.assert_({1: 2}) self.assert_(bool({}) is False) self.assert_(bool({1: 2}) is True) def test_keys(self): d = {} self.assertEqual(d.keys(), []) d = {'a': 1, 'b': 2} k = d.keys() self.assert_(d.has_key('a')) self.assert_(d.has_key('b')) self.assertRaises(TypeError, d.keys, None) def test_values(self): d = {} self.assertEqual(d.values(), []) d = {1:2} self.assertEqual(d.values(), [2]) self.assertRaises(TypeError, d.values, None) def test_items(self): d = {} self.assertEqual(d.items(), []) d = {1:2} self.assertEqual(d.items(), [(1, 2)]) self.assertRaises(TypeError, d.items, None) def test_has_key(self): d = {} self.assert_(not d.has_key('a')) d = {'a': 1, 'b': 2} k = d.keys() k.sort() self.assertEqual(k, ['a', 'b']) self.assertRaises(TypeError, d.has_key) def test_contains(self): d = {} self.assert_(not ('a' in d)) self.assert_('a' not in d) d = {'a': 1, 'b': 2} self.assert_('a' in d) self.assert_('b' in d) self.assert_('c' not in d) self.assertRaises(TypeError, d.__contains__) def test_len(self): d = {} self.assertEqual(len(d), 0) d = {'a': 1, 'b': 2} self.assertEqual(len(d), 2) def test_getitem(self): d = {'a': 1, 'b': 2} self.assertEqual(d['a'], 1) self.assertEqual(d['b'], 2) d['c'] = 3 d['a'] = 4 self.assertEqual(d['c'], 3) self.assertEqual(d['a'], 4) del d['b'] self.assertEqual(d, {'a': 4, 'c': 3}) self.assertRaises(TypeError, d.__getitem__) class BadEq(object): def __eq__(self, other): raise Exc() def __hash__(self): return 24 d = {} d[BadEq()] = 42 self.assertRaises(KeyError, d.__getitem__, 23) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.__getitem__, x) def test_clear(self): d = {1:1, 2:2, 3:3} d.clear() self.assertEqual(d, {}) self.assertRaises(TypeError, d.clear, None) def test_update(self): d = {} d.update({1:100}) d.update({2:20}) d.update({1:1, 2:2, 3:3}) self.assertEqual(d, {1:1, 2:2, 3:3}) d.update() self.assertEqual(d, {1:1, 2:2, 3:3}) self.assertRaises((TypeError, AttributeError), d.update, None) class SimpleUserDict: def __init__(self): self.d = {1:1, 2:2, 3:3} def keys(self): return self.d.keys() def __getitem__(self, i): return self.d[i] d.clear() d.update(SimpleUserDict()) self.assertEqual(d, {1:1, 2:2, 3:3}) class Exc(Exception): pass d.clear() class FailingUserDict: def keys(self): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = 1 def __iter__(self): return self def next(self): if self.i: self.i = 0 return 'a' raise Exc return BogonIter() def __getitem__(self, key): return key self.assertRaises(Exc, d.update, FailingUserDict()) class FailingUserDict: def keys(self): class BogonIter: def __init__(self): self.i = ord('a') def __iter__(self): return self def next(self): if self.i <= ord('z'): rtn = chr(self.i) self.i += 1 return rtn raise StopIteration return BogonIter() def __getitem__(self, key): raise Exc self.assertRaises(Exc, d.update, FailingUserDict()) class badseq(object): def __iter__(self): return self def next(self): raise Exc() self.assertRaises(Exc, {}.update, badseq()) self.assertRaises(ValueError, {}.update, [(1, 2, 3)]) def test_fromkeys(self): self.assertEqual(dict.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) d = {} self.assert_(not(d.fromkeys('abc') is d)) self.assertEqual(d.fromkeys('abc'), {'a':None, 'b':None, 'c':None}) self.assertEqual(d.fromkeys((4,5),0), {4:0, 5:0}) self.assertEqual(d.fromkeys([]), {}) def g(): yield 1 self.assertEqual(d.fromkeys(g()), {1:None}) self.assertRaises(TypeError, {}.fromkeys, 3) class dictlike(dict): pass self.assertEqual(dictlike.fromkeys('a'), {'a':None}) self.assertEqual(dictlike().fromkeys('a'), {'a':None}) self.assert_(type(dictlike.fromkeys('a')) is dictlike) self.assert_(type(dictlike().fromkeys('a')) is dictlike) class mydict(dict): def __new__(cls): return UserDict.UserDict() ud = mydict.fromkeys('ab') self.assertEqual(ud, {'a':None, 'b':None}) self.assert_(isinstance(ud, UserDict.UserDict)) self.assertRaises(TypeError, dict.fromkeys) class Exc(Exception): pass class baddict1(dict): def __init__(self): raise Exc() self.assertRaises(Exc, baddict1.fromkeys, [1]) class BadSeq(object): def __iter__(self): return self def next(self): raise Exc() self.assertRaises(Exc, dict.fromkeys, BadSeq()) class baddict2(dict): def __setitem__(self, key, value): raise Exc() self.assertRaises(Exc, baddict2.fromkeys, [1]) # test fast path for dictionary inputs d = dict(zip(range(6), range(6))) self.assertEqual(dict.fromkeys(d, 0), dict(zip(range(6), [0]*6))) def test_copy(self): d = {1:1, 2:2, 3:3} self.assertEqual(d.copy(), {1:1, 2:2, 3:3}) self.assertEqual({}.copy(), {}) self.assertRaises(TypeError, d.copy, None) def test_get(self): d = {} self.assert_(d.get('c') is None) self.assertEqual(d.get('c', 3), 3) d = {'a' : 1, 'b' : 2} self.assert_(d.get('c') is None) self.assertEqual(d.get('c', 3), 3) self.assertEqual(d.get('a'), 1) self.assertEqual(d.get('a', 3), 1) self.assertRaises(TypeError, d.get) self.assertRaises(TypeError, d.get, None, None, None) def test_setdefault(self): # dict.setdefault() d = {} self.assert_(d.setdefault('key0') is None) d.setdefault('key0', []) self.assert_(d.setdefault('key0') is None) d.setdefault('key', []).append(3) self.assertEqual(d['key'][0], 3) d.setdefault('key', []).append(4) self.assertEqual(len(d['key']), 2) self.assertRaises(TypeError, d.setdefault) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.setdefault, x, []) def test_popitem(self): # dict.popitem() for copymode in -1, +1: # -1: b has same structure as a # +1: b is a.copy() for log2size in range(12): size = 2**log2size a = {} b = {} for i in range(size): a[repr(i)] = i if copymode < 0: b[repr(i)] = i if copymode > 0: b = a.copy() for i in range(size): ka, va = ta = a.popitem() self.assertEqual(va, int(ka)) kb, vb = tb = b.popitem() self.assertEqual(vb, int(kb)) self.assert_(not(copymode < 0 and ta != tb)) self.assert_(not a) self.assert_(not b) d = {} self.assertRaises(KeyError, d.popitem) def test_pop(self): # Tests for pop with specified key d = {} k, v = 'abc', 'def' d[k] = v self.assertRaises(KeyError, d.pop, 'ghi') self.assertEqual(d.pop(k), v) self.assertEqual(len(d), 0) self.assertRaises(KeyError, d.pop, k) # verify longs/ints get same value when key > 32 bits (for 64-bit archs) # see SF bug #689659 x = 4503599627370496L y = 4503599627370496 h = {x: 'anything', y: 'something else'} self.assertEqual(h[x], h[y]) self.assertEqual(d.pop(k, v), v) d[k] = v self.assertEqual(d.pop(k, 1), v) self.assertRaises(TypeError, d.pop) class Exc(Exception): pass class BadHash(object): fail = False def __hash__(self): if self.fail: raise Exc() else: return 42 x = BadHash() d[x] = 42 x.fail = True self.assertRaises(Exc, d.pop, x) def test_mutatingiteration(self): d = {} d[1] = 1 try: for i in d: d[i+1] = 1 except RuntimeError: pass else: self.fail("changing dict size during iteration doesn't raise Error") def test_repr(self): d = {} self.assertEqual(repr(d), '{}') d[1] = 2 self.assertEqual(repr(d), '{1: 2}') d = {} d[1] = d self.assertEqual(repr(d), '{1: {...}}') class Exc(Exception): pass class BadRepr(object): def __repr__(self): raise Exc() d = {1: BadRepr()} self.assertRaises(Exc, repr, d) def test_le(self): self.assert_(not ({} < {})) self.assert_(not ({1: 2} < {1L: 2L})) class Exc(Exception): pass class BadCmp(object): def __eq__(self, other): raise Exc() def __hash__(self): return 42 d1 = {BadCmp(): 1} d2 = {1: 1} try: d1 < d2 except Exc: pass else: self.fail("< didn't raise Exc") def test_missing(self): # Make sure dict doesn't have a __missing__ method self.assertEqual(hasattr(dict, "__missing__"), False) self.assertEqual(hasattr({}, "__missing__"), False) # Test several cases: # (D) subclass defines __missing__ method returning a value # (E) subclass defines __missing__ method raising RuntimeError # (F) subclass sets __missing__ instance variable (no effect) # (G) subclass doesn't define __missing__ at a all class D(dict): def __missing__(self, key): return 42 d = D({1: 2, 3: 4}) self.assertEqual(d[1], 2) self.assertEqual(d[3], 4) self.assert_(2 not in d) self.assert_(2 not in d.keys()) self.assertEqual(d[2], 42) class E(dict): def __missing__(self, key): raise RuntimeError(key) e = E() try: e[42] except RuntimeError, err: self.assertEqual(err.args, (42,)) else: self.fail("e[42] didn't raise RuntimeError") class F(dict): def __init__(self): # An instance variable __missing__ should have no effect self.__missing__ = lambda key: None f = F() try: f[42] except KeyError, err: self.assertEqual(err.args, (42,)) else: self.fail("f[42] didn't raise KeyError") class G(dict): pass g = G() try: g[42] except KeyError, err: self.assertEqual(err.args, (42,)) else: self.fail("g[42] didn't raise KeyError") def test_tuple_keyerror(self): # SF #1576657 d = {} try: d[(1,)] except KeyError, e: self.assertEqual(e.args, ((1,),)) else: self.fail("missing KeyError") def test_bad_key(self): # Dictionary lookups should fail if __cmp__() raises an exception. class CustomException(Exception): pass class BadDictKey: def __hash__(self): return hash(self.__class__) def __cmp__(self, other): if isinstance(other, self.__class__): raise CustomException return other d = {} x1 = BadDictKey() x2 = BadDictKey() d[x1] = 1 for stmt in ['d[x2] = 2', 'z = d[x2]', 'x2 in d', 'd.has_key(x2)', 'd.get(x2)', 'd.setdefault(x2, 42)', 'd.pop(x2)', 'd.update({x2: 2})']: try: exec stmt in locals() except CustomException: pass else: self.fail("Statement didn't raise exception") def test_resize1(self): # Dict resizing bug, found by Jack Jansen in 2.2 CVS development. # This version got an assert failure in debug build, infinite loop in # release build. Unfortunately, provoking this kind of stuff requires # a mix of inserts and deletes hitting exactly the right hash codes in # exactly the right order, and I can't think of a randomized approach # that would be *likely* to hit a failing case in reasonable time. d = {} for i in range(5): d[i] = i for i in range(5): del d[i] for i in range(5, 9): # i==8 was the problem d[i] = i def test_resize2(self): # Another dict resizing bug (SF bug #1456209). # This caused Segmentation faults or Illegal instructions. class X(object): def __hash__(self): return 5 def __eq__(self, other): if resizing: d.clear() return False d = {} resizing = False d[X()] = 1 d[X()] = 2 d[X()] = 3 d[X()] = 4 d[X()] = 5 # now trigger a resize resizing = True d[9] = 6 def test_empty_presized_dict_in_freelist(self): # Bug #3537: if an empty but presized dict with a size larger # than 7 was in the freelist, it triggered an assertion failure try: d = {'a': 1/0, 'b': None, 'c': None, 'd': None, 'e': None, 'f': None, 'g': None, 'h': None} except ZeroDivisionError: pass d = {} def test_container_iterator(self): # Bug #3680: tp_traverse was not implemented for dictiter objects class C(object): pass iterators = (dict.iteritems, dict.itervalues, dict.iterkeys) for i in iterators: obj = C() ref = weakref.ref(obj) container = {obj: 1} obj.x = i(container) del obj, container gc.collect() self.assert_(ref() is None, "Cycle was not collected") from test import mapping_tests class GeneralMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = dict class Dict(dict): pass class SubclassMappingTests(mapping_tests.BasicTestMappingProtocol): type2test = Dict def test_main(): test_support.run_unittest( DictTest, GeneralMappingTests, SubclassMappingTests, ) if __name__ == "__main__": test_main()
lgpl-2.1
simodalla/pg_harep_lab
ssh/__init__.py
1
1370
# -*- coding: utf-8 -*- from __future__ import unicode_literals, absolute_import import os from fabric import colors from fabric.api import task, run, cd from fabric.contrib.files import exists, contains, append @task def prepare_ssh_autologin(ssh_pub_key='~/.ssh/id_rsa.pub'): """Prepare server for ssh autologin with ssh ke.""" ssh_dir = '~/.ssh' authorized_keys = 'authorized_keys' if not exists(ssh_dir): run('mkdir %s' % ssh_dir) with cd(ssh_dir): if not exists(authorized_keys): run('touch %s && chmod 600 %s' % (authorized_keys, authorized_keys)) if not os.path.exists(os.path.expanduser(ssh_pub_key)): print(colors.red('Public key file "%s" not' ' exist.' % ssh_pub_key)) return False ssh_pub_key_string = open( os.path.expanduser(ssh_pub_key), 'r').readline() if not contains(authorized_keys, ssh_pub_key_string): append(authorized_keys, ssh_pub_key_string) print(colors.green('Public key successfully added' ' in %s.' % authorized_keys)) else: print(colors.magenta('Public key already in %s.' % authorized_keys)) run('chmod 700 %s' % ssh_dir) return True
gpl-3.0
SamiHiltunen/invenio-groups
setup.py
2
4258
# -*- coding: utf-8 -*- # # This file is part of Invenio. # Copyright (C) 2015 CERN # # Invenio is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of the # License, or (at your option) any later version. # # Invenio is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Invenio; if not, write to the Free Software Foundation, # Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Invenio module that adds support for user groups.""" import os import sys from setuptools import setup from setuptools.command.test import test as TestCommand readme = open('README.rst').read() history = open('CHANGES.rst').read() requirements = [ 'blinker>=1.3.0', 'Flask-Breadcrumbs>=0.2', 'Flask-Login>=0.2.7', 'Flask-Menu>=0.2', 'Flask-Registry>=0.2', 'Flask>=0.10.1', 'invenio-accounts>=0.1.2', 'invenio-base>=0.2.1', 'mock>=1.0.1', 'six>=1.7.2', 'SQLAlchemy-Utils[encrypted]>=0.30.1', 'SQLAlchemy>=1.0', 'wtforms-alchemy>=0.13.1', 'WTForms>=2.0.1', 'invenio-upgrader>=0.1.0', ] test_requirements = [ 'Flask-Testing>=0.4.1', 'coverage>=3.7.1', 'pytest-cov>=1.8.1', 'pytest-pep8>=1.0.6', 'pytest>=2.7.0', 'unittest2>=1.1.0', ] class PyTest(TestCommand): """PyTest Test.""" user_options = [('pytest-args=', 'a', "Arguments to pass to py.test")] def initialize_options(self): """Init pytest.""" TestCommand.initialize_options(self) self.pytest_args = [] try: from ConfigParser import ConfigParser except ImportError: from configparser import ConfigParser config = ConfigParser() config.read('pytest.ini') self.pytest_args = config.get('pytest', 'addopts').split(' ') def finalize_options(self): """Finalize pytest.""" TestCommand.finalize_options(self) self.test_args = [] self.test_suite = True def run_tests(self): """Run tests.""" # import here, cause outside the eggs aren't loaded import pytest import _pytest.config pm = _pytest.config.get_plugin_manager() pm.consider_setuptools_entrypoints() errno = pytest.main(self.pytest_args) sys.exit(errno) # Get the version string. Cannot be done with import! g = {} with open(os.path.join('invenio_groups', 'version.py'), 'rt') as fp: exec(fp.read(), g) version = g['__version__'] setup( name='invenio-groups', version=version, description=__doc__, long_description=readme + '\n\n' + history, keywords='invenio groups', license='GPLv2', author='CERN', author_email='info@invenio-software.org', url='https://github.com/inveniosoftware/invenio-groups', packages=[ 'invenio_groups', ], zip_safe=False, include_package_data=True, platforms='any', install_requires=requirements, extras_require={ 'docs': [ 'Sphinx>=1.3', 'sphinx_rtd_theme>=0.1.7', ], 'tests': test_requirements, }, classifiers=[ 'Environment :: Web Environment', 'Intended Audience :: Developers', 'License :: OSI Approved :: GNU General Public License v2 (GPLv2)', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Software Development :: Libraries :: Python Modules', "Programming Language :: Python :: 2", # 'Programming Language :: Python :: 2.6', 'Programming Language :: Python :: 2.7', # 'Programming Language :: Python :: 3', # 'Programming Language :: Python :: 3.3', # 'Programming Language :: Python :: 3.4', 'Development Status :: 1 - Planning', ], tests_require=test_requirements, cmdclass={'test': PyTest}, )
gpl-2.0
nowopen/scrapy
tests/mockserver.py
110
6622
from __future__ import print_function import sys, time, random, os, json import six from six.moves.urllib.parse import urlencode from subprocess import Popen, PIPE from twisted.web.server import Site, NOT_DONE_YET from twisted.web.resource import Resource from twisted.internet import reactor, defer, ssl from scrapy import twisted_version if twisted_version < (11, 0, 0): def deferLater(clock, delay, func, *args, **kw): def _cancel_method(): _cancel_cb(None) d.errback(Exception()) def _cancel_cb(result): if cl.active(): cl.cancel() return result d = defer.Deferred() d.cancel = _cancel_method d.addCallback(lambda ignored: func(*args, **kw)) d.addBoth(_cancel_cb) cl = clock.callLater(delay, d.callback, None) return d else: from twisted.internet.task import deferLater def getarg(request, name, default=None, type=str): if name in request.args: return type(request.args[name][0]) else: return default class LeafResource(Resource): isLeaf = True def deferRequest(self, request, delay, f, *a, **kw): def _cancelrequest(_): # silence CancelledError d.addErrback(lambda _: None) d.cancel() d = deferLater(reactor, delay, f, *a, **kw) request.notifyFinish().addErrback(_cancelrequest) return d class Follow(LeafResource): def render(self, request): total = getarg(request, "total", 100, type=int) show = getarg(request, "show", 1, type=int) order = getarg(request, "order", "desc") maxlatency = getarg(request, "maxlatency", 0, type=float) n = getarg(request, "n", total, type=int) if order == "rand": nlist = [random.randint(1, total) for _ in range(show)] else: # order == "desc" nlist = range(n, max(n - show, 0), -1) lag = random.random() * maxlatency self.deferRequest(request, lag, self.renderRequest, request, nlist) return NOT_DONE_YET def renderRequest(self, request, nlist): s = """<html> <head></head> <body>""" args = request.args.copy() for nl in nlist: args["n"] = [str(nl)] argstr = urlencode(args, doseq=True) s += "<a href='/follow?%s'>follow %d</a><br>" % (argstr, nl) s += """</body>""" request.write(s) request.finish() class Delay(LeafResource): def render_GET(self, request): n = getarg(request, "n", 1, type=float) b = getarg(request, "b", 1, type=int) if b: # send headers now and delay body request.write('') self.deferRequest(request, n, self._delayedRender, request, n) return NOT_DONE_YET def _delayedRender(self, request, n): request.write("Response delayed for %0.3f seconds\n" % n) request.finish() class Status(LeafResource): def render_GET(self, request): n = getarg(request, "n", 200, type=int) request.setResponseCode(n) return "" class Raw(LeafResource): def render_GET(self, request): request.startedWriting = 1 self.deferRequest(request, 0, self._delayedRender, request) return NOT_DONE_YET render_POST = render_GET def _delayedRender(self, request): raw = getarg(request, 'raw', 'HTTP 1.1 200 OK\n') request.startedWriting = 1 request.write(raw) request.channel.transport.loseConnection() request.finish() class Echo(LeafResource): def render_GET(self, request): output = { 'headers': dict(request.requestHeaders.getAllRawHeaders()), 'body': request.content.read(), } return json.dumps(output) class Partial(LeafResource): def render_GET(self, request): request.setHeader("Content-Length", "1024") self.deferRequest(request, 0, self._delayedRender, request) return NOT_DONE_YET def _delayedRender(self, request): request.write("partial content\n") request.finish() class Drop(Partial): def _delayedRender(self, request): abort = getarg(request, "abort", 0, type=int) request.write("this connection will be dropped\n") tr = request.channel.transport try: if abort and hasattr(tr, 'abortConnection'): tr.abortConnection() else: tr.loseConnection() finally: request.finish() class Root(Resource): def __init__(self): Resource.__init__(self) self.putChild("status", Status()) self.putChild("follow", Follow()) self.putChild("delay", Delay()) self.putChild("partial", Partial()) self.putChild("drop", Drop()) self.putChild("raw", Raw()) self.putChild("echo", Echo()) if six.PY2 and twisted_version > (12, 3, 0): from twisted.web.test.test_webclient import PayloadResource from twisted.web.server import GzipEncoderFactory from twisted.web.resource import EncodingResourceWrapper self.putChild('payload', PayloadResource()) self.putChild("xpayload", EncodingResourceWrapper(PayloadResource(), [GzipEncoderFactory()])) def getChild(self, name, request): return self def render(self, request): return 'Scrapy mock HTTP server\n' class MockServer(): def __enter__(self): from scrapy.utils.test import get_testenv self.proc = Popen([sys.executable, '-u', '-m', 'tests.mockserver'], stdout=PIPE, env=get_testenv()) self.proc.stdout.readline() return self def __exit__(self, exc_type, exc_value, traceback): self.proc.kill() self.proc.wait() time.sleep(0.2) if __name__ == "__main__": root = Root() factory = Site(root) httpPort = reactor.listenTCP(8998, factory) contextFactory = ssl.DefaultOpenSSLContextFactory( os.path.join(os.path.dirname(__file__), 'keys/cert.pem'), os.path.join(os.path.dirname(__file__), 'keys/cert.pem'), ) httpsPort = reactor.listenSSL(8999, factory, contextFactory) def print_listening(): httpHost = httpPort.getHost() httpsHost = httpsPort.getHost() print("Mock server running at http://%s:%d and https://%s:%d" % ( httpHost.host, httpHost.port, httpsHost.host, httpsHost.port)) reactor.callWhenRunning(print_listening) reactor.run()
bsd-3-clause
sznekol/django-cms
cms/tests/static_placeholder.py
13
9795
# -*- coding: utf-8 -*- from __future__ import with_statement import json from django.contrib.admin.sites import site from django.template import Context from django.template.base import Template from django.utils.encoding import force_text from cms.api import add_plugin from cms.constants import PLUGIN_MOVE_ACTION, PLUGIN_COPY_ACTION from cms.models import StaticPlaceholder, Placeholder, CMSPlugin from cms.tests.plugins import PluginsTestBaseCase from cms.utils.urlutils import admin_reverse URL_CMS_MOVE_PLUGIN = u'/en/admin/cms/page/%d/move-plugin/' class StaticPlaceholderTestCase(PluginsTestBaseCase): @property def admin_class(self): return site._registry[StaticPlaceholder] def fill_placeholder(self, placeholder=None): if placeholder is None: placeholder = Placeholder(slot=u"some_slot") placeholder.save() # a good idea, if not strictly necessary # plugin in placeholder plugin_1 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"01", ) plugin_1.save() # IMPORTANT: plugins must be reloaded, before they can be assigned # as a parent. Otherwise, the MPTT structure doesn't seem to rebuild # properly. # child of plugin_1 plugin_2 = add_plugin(placeholder, u"TextPlugin", u"en", body=u"02", ) plugin_1 = self.reload(plugin_1) plugin_2.parent = plugin_1 plugin_2.save() return placeholder def get_admin(self): usr = self._create_user("admin", True, True) return usr def test_template_creation(self): self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar') self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar') t = Template('{% load cms_tags %}{% static_placeholder "foobar" %}') t.render(self.get_context('/')) self.assertObjectExist(StaticPlaceholder.objects.all(), code='foobar', creation_method=StaticPlaceholder.CREATION_BY_TEMPLATE) self.assertEqual(Placeholder.objects.filter(slot='foobar').count(), 2) def test_empty(self): self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar') self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar') t = Template('{% load cms_tags %}{% static_placeholder "foobar" or %}No Content{% endstatic_placeholder %}') rendered = t.render(self.get_context('/')) self.assertIn("No Content", rendered) t = Template('{% load cms_tags %}{% static_placeholder "" %}') rendered = t.render(self.get_context('/')) self.assertEqual("", rendered) t = Template('{% load cms_tags %}{% static_placeholder code or %}No Content{% endstatic_placeholder %}') rendered = t.render(Context({'code': StaticPlaceholder.objects.all()[0]})) self.assertIn("No Content", rendered) for p in Placeholder.objects.all(): add_plugin(p, 'TextPlugin', 'en', body='test') t = Template('{% load cms_tags %}{% static_placeholder "foobar" or %}No Content{% endstatic_placeholder %}') rendered = t.render(self.get_context('/')) self.assertNotIn("No Content", rendered) self.assertEqual(StaticPlaceholder.objects.filter(site_id__isnull=True, code='foobar').count(), 1) def test_local(self): self.assertObjectDoesNotExist(StaticPlaceholder.objects.all(), code='foobar') self.assertObjectDoesNotExist(Placeholder.objects.all(), slot='foobar') t = Template('{% load cms_tags %}{% static_placeholder "foobar" site or %}No Content{% endstatic_placeholder %}') rendered = t.render(self.get_context('/')) self.assertIn("No Content", rendered) for p in Placeholder.objects.all(): add_plugin(p, 'TextPlugin', 'en', body='test') rendered = t.render(self.get_context('/')) self.assertNotIn("No Content", rendered) self.assertEqual(StaticPlaceholder.objects.filter(site_id__isnull=False, code='foobar').count(), 1) def test_publish_stack(self): static_placeholder = StaticPlaceholder.objects.create(name='foo', code='bar', site_id=1) self.fill_placeholder(static_placeholder.draft) static_placeholder.dirty = True static_placeholder.save() self.assertEqual(static_placeholder.draft.cmsplugin_set.all().count(), 2) self.assertEqual(static_placeholder.public.cmsplugin_set.all().count(), 0) with self.login_user_context(self.get_superuser()): response = self.client.post('%s?statics=%s' % (admin_reverse("cms_page_publish_page", args=[1, 'en']), static_placeholder.pk)) self.assertEqual(response.status_code, 302) def test_permissions(self): static_placeholder = StaticPlaceholder.objects.create(name='foo', code='bar', site_id=1) request = self.get_request() request.user = self._create_user('user_a', is_staff=True, is_superuser=False, permissions=['change_staticplaceholder']) self.assertTrue( static_placeholder.has_change_permission(request) ) self.assertFalse( static_placeholder.has_publish_permission(request) ) request.user = self._create_user('user_b', is_staff=True, is_superuser=False, permissions=['change_staticplaceholder', 'publish_page']) self.assertTrue( static_placeholder.has_change_permission(request) ) self.assertTrue( static_placeholder.has_publish_permission(request) ) request.user = self.get_superuser() self.assertTrue( static_placeholder.has_change_permission(request) ) self.assertTrue( static_placeholder.has_publish_permission(request) ) def test_move_plugin(self): static_placeholder_source = StaticPlaceholder.objects.create(name='foobar', code='foobar', site_id=1) static_placeholder_target = StaticPlaceholder.objects.create(name='foofoo', code='foofoo', site_id=1) sourceplugin = add_plugin(static_placeholder_source.draft, 'TextPlugin', 'en', body='test') plugin_class = sourceplugin.get_plugin_class_instance() expected = {'reload': plugin_class.requires_reload(PLUGIN_MOVE_ACTION)} admin = self.get_admin() with self.login_user_context(admin): request = self.get_request(post_data={'plugin_id': sourceplugin.pk, 'placeholder_id': static_placeholder_target.draft.id, 'plugin_parent': '', 'plugin_language': 'en'}) response = self.admin_class.move_plugin(request) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) source = StaticPlaceholder.objects.get(pk=static_placeholder_source.pk) target = StaticPlaceholder.objects.get(pk=static_placeholder_target.pk) self.assertTrue(source.dirty) self.assertTrue(target.dirty) def test_copy_plugin(self): static_placeholder_source = StaticPlaceholder.objects.create(name='foobar', code='foobar', site_id=1) static_placeholder_target = StaticPlaceholder.objects.create(name='foofoo', code='foofoo', site_id=1) sourceplugin = add_plugin(static_placeholder_source.draft, 'TextPlugin', 'en', body='test source') targetplugin = add_plugin(static_placeholder_target.draft, 'TextPlugin', 'en', body='test dest') StaticPlaceholder.objects.filter(pk=static_placeholder_source.pk).update(dirty=False) plugin_class = sourceplugin.get_plugin_class_instance() admin = self.get_admin() with self.login_user_context(admin): request = self.get_request(post_data={ 'source_language': 'en', 'source_placeholder_id': static_placeholder_source.draft.pk, 'source_plugin_id': sourceplugin.pk, 'target_language': 'en', 'target_placeholder_id': static_placeholder_target.draft.pk, 'target_plugin_id': targetplugin.pk, }) response = self.admin_class.copy_plugins(request) # generate the expected response plugin_list = CMSPlugin.objects.filter( language='en', placeholder_id=static_placeholder_target.draft.pk).order_by( 'depth', 'position') reduced_list = [] for plugin in plugin_list: reduced_list.append( { 'id': plugin.pk, 'type': plugin.plugin_type, 'parent': plugin.parent_id, 'position': plugin.position, 'desc': force_text(plugin.get_short_description()), 'language': plugin.language, 'placeholder_id': static_placeholder_target.draft.pk } ) expected = json.loads( json.dumps({'plugin_list': reduced_list, 'reload': plugin_class.requires_reload(PLUGIN_COPY_ACTION)})) self.assertEqual(response.status_code, 200) self.assertEqual(json.loads(response.content.decode('utf8')), expected) # Check dirty bit source = StaticPlaceholder.objects.get(pk=static_placeholder_source.pk) target = StaticPlaceholder.objects.get(pk=static_placeholder_target.pk) self.assertFalse(source.dirty) self.assertTrue(target.dirty) def test_create_by_admin(self): url = admin_reverse("cms_staticplaceholder_add") with self.login_user_context(self.get_superuser()): response = self.client.post(url, data={'name': 'Name', 'code': 'content'}) self.assertEqual(response.status_code, 302)
bsd-3-clause
digwanderlust/pants
tests/python/pants_test/backend/jvm/targets/test_jvm_binary.py
1
9783
# coding=utf-8 # Copyright 2015 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import unittest from textwrap import dedent from pants.backend.jvm.register import build_file_aliases as register_jvm from pants.backend.jvm.targets.exclude import Exclude from pants.backend.jvm.targets.jvm_binary import (Duplicate, JarRules, JvmBinary, ManifestEntries, Skip) from pants.base.address import BuildFileAddress from pants.base.exceptions import TargetDefinitionException from pants.base.payload_field import FingerprintedField from pants.base.target import Target from pants_test.base_test import BaseTest class JarRulesTest(unittest.TestCase): def test_jar_rule(self): dup_rule = Duplicate('foo', Duplicate.REPLACE) self.assertEquals('Duplicate(apply_pattern=foo, action=REPLACE)', repr(dup_rule)) skip_rule = Skip('foo') self.assertEquals('Skip(apply_pattern=foo)', repr(skip_rule)) def test_invalid_apply_pattern(self): with self.assertRaisesRegexp(ValueError, r'The supplied apply_pattern is not a string'): Skip(None) with self.assertRaisesRegexp(ValueError, r'The supplied apply_pattern is not a string'): Duplicate(None, Duplicate.SKIP) with self.assertRaisesRegexp(ValueError, r'The supplied apply_pattern: \) is not a valid'): Skip(r')') with self.assertRaisesRegexp(ValueError, r'The supplied apply_pattern: \) is not a valid'): Duplicate(r')', Duplicate.SKIP) def test_bad_action(self): with self.assertRaisesRegexp(ValueError, r'The supplied action must be one of'): Duplicate('foo', None) def test_duplicate_error(self): with self.assertRaisesRegexp(Duplicate.Error, r'Duplicate entry encountered for path foo'): raise Duplicate.Error('foo') def test_default(self): jar_rules = JarRules.default() self.assertTrue(4, len(jar_rules.rules)) for rule in jar_rules.rules: self.assertTrue(rule.apply_pattern.pattern.startswith(r'^META-INF')) def test_set_bad_default(self): with self.assertRaisesRegexp(ValueError, r'The default rules must be a JarRules'): JarRules.set_default(None) class JvmBinaryTest(BaseTest): @property def alias_groups(self): return register_jvm() def test_simple(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', basename='foo-base', ) ''')) target = self.target('//:foo') self.assertEquals('com.example.Foo', target.main) self.assertEquals('com.example.Foo', target.payload.main) self.assertEquals('foo-base', target.basename) self.assertEquals('foo-base', target.payload.basename) self.assertEquals([], target.deploy_excludes) self.assertEquals([], target.payload.deploy_excludes) self.assertEquals(JarRules.default(), target.deploy_jar_rules) self.assertEquals(JarRules.default(), target.payload.deploy_jar_rules) self.assertEquals({}, target.payload.manifest_entries.entries); def test_default_base(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', ) ''')) target = self.target('//:foo') self.assertEquals('foo', target.basename) def test_deploy_jar_excludes(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', deploy_excludes=[exclude(org='example.com', name='foo-lib')], ) ''')) target = self.target('//:foo') self.assertEquals([Exclude(org='example.com', name='foo-lib')], target.deploy_excludes) def test_deploy_jar_rules(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', deploy_jar_rules=jar_rules([Duplicate('foo', Duplicate.SKIP)], default_dup_action=Duplicate.FAIL) ) ''')) target = self.target('//:foo') jar_rules = target.deploy_jar_rules self.assertEquals(1, len(jar_rules.rules)) self.assertEquals('foo', jar_rules.rules[0].apply_pattern.pattern) self.assertEquals(repr(Duplicate.SKIP), repr(jar_rules.rules[0].action)) # <object object at 0x...> self.assertEquals(Duplicate.FAIL, jar_rules.default_dup_action) def test_bad_source_declaration(self): build_file = self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', source=['foo.py'], ) ''')) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmBinary.*foo.*source must be a single'): self.build_graph.inject_address_closure(BuildFileAddress(build_file, 'foo')) def test_bad_sources_declaration(self): with self.assertRaisesRegexp(Target.IllegalArgument, r'jvm_binary only supports a single "source" argument'): self.make_target('foo:foo', target_type=JvmBinary, main='com.example.Foo', sources=['foo.py']) def test_bad_main_declaration(self): build_file = self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='bar', main=['com.example.Bar'], ) ''')) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmBinary.*bar.*main must be a fully'): self.build_graph.inject_address_closure(BuildFileAddress(build_file, 'bar')) def test_bad_jar_rules(self): build_file = self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', deploy_jar_rules='invalid', ) ''')) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmBinary.*foo.*' r'deploy_jar_rules must be a JarRules specification. got str'): self.build_graph.inject_address_closure(BuildFileAddress(build_file, 'foo')) def _assert_fingerprints_not_equal(self, fields): for field in fields: for other_field in fields: if field == other_field: continue self.assertNotEquals(field.fingerprint(), other_field.fingerprint()) def test_jar_rules_field(self): field1 = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP)])) field1_same = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP)])) field2 = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.CONCAT)])) field3 = FingerprintedField(JarRules(rules=[Duplicate('bar', Duplicate.SKIP)])) field4 = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP), Duplicate('bar', Duplicate.SKIP)])) field5 = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP), Skip('foo')])) field6 = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP)], default_dup_action=Duplicate.FAIL)) field6_same = FingerprintedField(JarRules(rules=[Duplicate('foo', Duplicate.SKIP)], default_dup_action=Duplicate.FAIL)) field7 = FingerprintedField(JarRules(rules=[Skip('foo')])) field8 = FingerprintedField(JarRules(rules=[Skip('bar')])) field8_same = FingerprintedField(JarRules(rules=[Skip('bar')])) self.assertEquals(field1.fingerprint(), field1_same.fingerprint()) self.assertEquals(field6.fingerprint(), field6_same.fingerprint()) self.assertEquals(field8.fingerprint(), field8_same.fingerprint()) self._assert_fingerprints_not_equal([field1, field2, field3, field4, field5, field6, field7]) def test_manifest_entries(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', manifest_entries= { 'Foo-Field' : 'foo', } ) ''')) target = self.target('//:foo') self.assertTrue(isinstance(target.payload.manifest_entries, ManifestEntries)) entries = target.payload.manifest_entries.entries self.assertEquals({ 'Foo-Field' : 'foo'}, entries) def test_manifest_not_dict(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', manifest_entries= 'foo', ) ''')) with self.assertRaisesRegexp(TargetDefinitionException, r'Invalid target JvmBinary\(BuildFileAddress\(.*BUILD\), foo\)\): ' r'manifest_entries must be a dict. got str'): self.target('//:foo') def test_manifest_bad_key(self): self.add_to_build_file('BUILD', dedent(''' jvm_binary(name='foo', main='com.example.Foo', manifest_entries= { jar(org='bad', name='bad', rev='bad') : 'foo', } ) ''')) with self.assertRaisesRegexp(ManifestEntries.ExpectedDictionaryError, r'entries must be dictionary of strings, got key bad-bad-bad type JarDependency'): self.target('//:foo') def test_manifest_entries_fingerprint(self): field1 = ManifestEntries() field2 = ManifestEntries({'Foo-Field' : 'foo'}) field2_same = ManifestEntries({'Foo-Field' : 'foo'}) field3 = ManifestEntries({'Foo-Field' : 'foo', 'Bar-Field' : 'bar'}) self.assertEquals(field2.fingerprint(), field2_same.fingerprint()) self._assert_fingerprints_not_equal([field1, field2, field3])
apache-2.0
chand3040/sree_odoo
openerp/addons/sale_crm/sale_crm.py
320
1429
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import osv, fields class sale_order(osv.osv): _name = "sale.order" _inherit = ['sale.order', 'crm.tracking.mixin'] _columns = { 'categ_ids': fields.many2many('crm.case.categ', 'sale_order_category_rel', 'order_id', 'category_id', 'Tags', \ domain="['|', ('section_id', '=', section_id), ('section_id', '=', False), ('object_id.model', '=', 'crm.lead')]", context="{'object_name': 'crm.lead'}") }
agpl-3.0
tillraab/thunderfish
setup.py
3
2124
from setuptools import setup, find_packages exec(open('thunderfish/version.py').read()) long_description = """ # ThunderFish Algorithms and programs for analysing electric field recordings of weakly electric fish. [Documentation](https://bendalab.github.io/thunderfish) | [API Reference](https://bendalab.github.io/thunderfish/api) Weakly electric fish generate an electric organ discharge (EOD). In wave-type fish the EOD resembles a sinewave of a specific frequency and with higher harmonics. In pulse-type fish EODs have a distinct waveform and are separated in time. The thunderfish package provides algorithms and tools for analysing both wavefish and pulsefish EODs. """ setup( name = 'thunderfish', version = __version__, author = 'Jan Benda, Juan F. Sehuanes, Till Raab, Jörg Henninger, Jan Grewe, Fabian Sinz, Liz Weerdmeester', author_email = "jan.benda@uni-tuebingen.de", description = 'Algorithms and scripts for analyzing recordings of electric fish waveforms.', long_description = long_description, long_description_content_type = "text/markdown", url = "https://github.com/bendalab/thunderfish", license = "GPLv3", classifiers = [ "Development Status :: 4 - Beta", "Environment :: Console", "Intended Audience :: Science/Research", "License :: OSI Approved :: GNU General Public License v3 (GPLv3)", "Natural Language :: English", "Programming Language :: Python :: 3", "Operating System :: OS Independent", "Topic :: Scientific/Engineering", "Topic :: Software Development :: Libraries :: Python Modules", ], packages = find_packages(exclude = ['contrib', 'docs', 'tests*']), entry_points = { 'console_scripts': [ 'thunderfish = thunderfish.thunderfish:main', 'fishfinder = thunderfish.fishfinder:main', 'collectfish = thunderfish.collectfish:main', 'eodexplorer = thunderfish.eodexplorer:main', ]}, python_requires = '>=3.4', install_requires = ['sklearn', 'scipy', 'numpy', 'matplotlib', 'audioio'], )
gpl-3.0
sivas2811/mocha_739
hotdot_env/lib/python2.7/site-packages/pip/vendor/distlib/locators.py
79
43778
# -*- coding: utf-8 -*- # # Copyright (C) 2012-2013 Vinay Sajip. # Licensed to the Python Software Foundation under a contributor agreement. # See LICENSE.txt and CONTRIBUTORS.txt. # import gzip from io import BytesIO import json import logging import os import posixpath import re import threading import zlib from . import DistlibException from .compat import (urljoin, urlparse, urlunparse, url2pathname, pathname2url, queue, quote, unescape, string_types, build_opener, HTTPRedirectHandler as BaseRedirectHandler, Request, HTTPError, URLError) from .database import Distribution, DistributionPath, make_dist from .metadata import Metadata from .util import (cached_property, parse_credentials, ensure_slash, split_filename, get_project_data, parse_requirement, ServerProxy) from .version import get_scheme, UnsupportedVersionError from .wheel import Wheel, is_compatible logger = logging.getLogger(__name__) MD5_HASH = re.compile('^md5=([a-f0-9]+)$') CHARSET = re.compile(r';\s*charset\s*=\s*(.*)\s*$', re.I) HTML_CONTENT_TYPE = re.compile('text/html|application/x(ht)?ml') DEFAULT_INDEX = 'http://python.org/pypi' def get_all_distribution_names(url=None): """ Return all distribution names known by an index. :param url: The URL of the index. :return: A list of all known distribution names. """ if url is None: url = DEFAULT_INDEX client = ServerProxy(url, timeout=3.0) return client.list_packages() class RedirectHandler(BaseRedirectHandler): """ A class to work around a bug in some Python 3.2.x releases. """ # There's a bug in the base version for some 3.2.x # (e.g. 3.2.2 on Ubuntu Oneiric). If a Location header # returns e.g. /abc, it bails because it says the scheme '' # is bogus, when actually it should use the request's # URL for the scheme. See Python issue #13696. def http_error_302(self, req, fp, code, msg, headers): # Some servers (incorrectly) return multiple Location headers # (so probably same goes for URI). Use first header. newurl = None for key in ('location', 'uri'): if key in headers: newurl = headers[key] break if newurl is None: return urlparts = urlparse(newurl) if urlparts.scheme == '': newurl = urljoin(req.get_full_url(), newurl) if hasattr(headers, 'replace_header'): headers.replace_header(key, newurl) else: headers[key] = newurl return BaseRedirectHandler.http_error_302(self, req, fp, code, msg, headers) http_error_301 = http_error_303 = http_error_307 = http_error_302 class Locator(object): """ A base class for locators - things that locate distributions. """ source_extensions = ('.tar.gz', '.tar.bz2', '.tar', '.zip', '.tgz', '.tbz') binary_extensions = ('.egg', '.exe', '.whl') excluded_extensions = ('.pdf',) # A list of tags indicating which wheels you want to match. The default # value of None matches against the tags compatible with the running # Python. If you want to match other values, set wheel_tags on a locator # instance to a list of tuples (pyver, abi, arch) which you want to match. wheel_tags = None downloadable_extensions = source_extensions + ('.whl',) def __init__(self, scheme='default'): """ Initialise an instance. :param scheme: Because locators look for most recent versions, they need to know the version scheme to use. This specifies the current PEP-recommended scheme - use ``'legacy'`` if you need to support existing distributions on PyPI. """ self._cache = {} self.scheme = scheme # Because of bugs in some of the handlers on some of the platforms, # we use our own opener rather than just using urlopen. self.opener = build_opener(RedirectHandler()) def clear_cache(self): self._cache.clear() def _get_scheme(self): return self._scheme def _set_scheme(self, value): self._scheme = value scheme = property(_get_scheme, _set_scheme) def _get_project(self, name): """ For a given project, get a dictionary mapping available versions to Distribution instances. This should be implemented in subclasses. """ raise NotImplementedError('Please implement in the subclass') def get_distribution_names(self): """ Return all the distribution names known to this locator. """ raise NotImplementedError('Please implement in the subclass') def get_project(self, name): """ For a given project, get a dictionary mapping available versions to Distribution instances. This calls _get_project to do all the work, and just implements a caching layer on top. """ if self._cache is None: result = self._get_project(name) elif name in self._cache: result = self._cache[name] else: result = self._get_project(name) self._cache[name] = result return result def score_url(self, url): """ Give an url a score which can be used to choose preferred URLs for a given project release. """ t = urlparse(url) return (t.scheme != 'https', 'pypi.python.org' in t.netloc, posixpath.basename(t.path)) def prefer_url(self, url1, url2): """ Choose one of two URLs where both are candidates for distribution archives for the same version of a distribution (for example, .tar.gz vs. zip). The current implement favours http:// URLs over https://, archives from PyPI over those from other locations and then the archive name. """ if url1 == 'UNKNOWN': result = url2 else: result = url2 s1 = self.score_url(url1) s2 = self.score_url(url2) if s1 > s2: result = url1 if result != url2: logger.debug('Not replacing %r with %r', url1, url2) else: logger.debug('Replacing %r with %r', url1, url2) return result def split_filename(self, filename, project_name): """ Attempt to split a filename in project name, version and Python version. """ return split_filename(filename, project_name) def convert_url_to_download_info(self, url, project_name): """ See if a URL is a candidate for a download URL for a project (the URL has typically been scraped from an HTML page). If it is, a dictionary is returned with keys "name", "version", "filename" and "url"; otherwise, None is returned. """ def same_project(name1, name2): name1, name2 = name1.lower(), name2.lower() if name1 == name2: result = True else: # distribute replaces '-' by '_' in project names, so it # can tell where the version starts in a filename. result = name1.replace('_', '-') == name2.replace('_', '-') return result result = None scheme, netloc, path, params, query, frag = urlparse(url) if frag.lower().startswith('egg='): logger.debug('%s: version hint in fragment: %r', project_name, frag) origpath = path if path and path[-1] == '/': path = path[:-1] if path.endswith('.whl'): try: wheel = Wheel(path) if is_compatible(wheel, self.wheel_tags): if project_name is None: include = True else: include = same_project(wheel.name, project_name) if include: result = { 'name': wheel.name, 'version': wheel.version, 'filename': wheel.filename, 'url': urlunparse((scheme, netloc, origpath, params, query, '')), 'python-version': ', '.join( ['.'.join(list(v[2:])) for v in wheel.pyver]), } m = MD5_HASH.match(frag) if m: result['md5_digest'] = m.group(1) except Exception as e: logger.warning('invalid path for wheel: %s', path) elif path.endswith(self.downloadable_extensions): path = filename = posixpath.basename(path) for ext in self.downloadable_extensions: if path.endswith(ext): path = path[:-len(ext)] t = self.split_filename(path, project_name) if not t: logger.debug('No match for project/version: %s', path) else: name, version, pyver = t if not project_name or same_project(project_name, name): result = { 'name': name, 'version': version, 'filename': filename, 'url': urlunparse((scheme, netloc, origpath, params, query, '')), #'packagetype': 'sdist', } if pyver: result['python-version'] = pyver m = MD5_HASH.match(frag) if m: result['md5_digest'] = m.group(1) break return result def _update_version_data(self, result, info): """ Update a result dictionary (the final result from _get_project) with a dictionary for a specific version, whih typically holds information gleaned from a filename or URL for an archive for the distribution. """ name = info.pop('name') version = info.pop('version') if version in result: dist = result[version] md = dist.metadata else: dist = make_dist(name, version, scheme=self.scheme) md = dist.metadata dist.md5_digest = info.get('md5_digest') if 'python-version' in info: md['Requires-Python'] = info['python-version'] if md['Download-URL'] != info['url']: md['Download-URL'] = self.prefer_url(md['Download-URL'], info['url']) dist.locator = self result[version] = dist def locate(self, requirement, prereleases=False): """ Find the most recent distribution which matches the given requirement. :param requirement: A requirement of the form 'foo (1.0)' or perhaps 'foo (>= 1.0, < 2.0, != 1.3)' :param prereleases: If ``True``, allow pre-release versions to be located. Otherwise, pre-release versions are not returned. :return: A :class:`Distribution` instance, or ``None`` if no such distribution could be located. """ result = None scheme = get_scheme(self.scheme) r = parse_requirement(requirement) if r is None: raise DistlibException('Not a valid requirement: %r' % requirement) if r.extras: # lose the extras part of the requirement requirement = r.requirement matcher = scheme.matcher(requirement) vcls = matcher.version_class logger.debug('matcher: %s (%s)', matcher, type(matcher).__name__) versions = self.get_project(matcher.name) if versions: # sometimes, versions are invalid slist = [] for k in versions: try: if not matcher.match(k): logger.debug('%s did not match %r', matcher, k) else: if prereleases or not vcls(k).is_prerelease: slist.append(k) else: logger.debug('skipping pre-release version %s', k) except Exception: logger.warning('error matching %s with %r', matcher, k) pass # slist.append(k) if len(slist) > 1: slist = sorted(slist, key=scheme.key) if slist: logger.debug('sorted list: %s', slist) result = versions[slist[-1]] if result and r.extras: result.extras = r.extras return result class PyPIRPCLocator(Locator): """ This locator uses XML-RPC to locate distributions. It therefore cannot be used with simple mirrors (that only mirror file content). """ def __init__(self, url, **kwargs): """ Initialise an instance. :param url: The URL to use for XML-RPC. :param kwargs: Passed to the superclass constructor. """ super(PyPIRPCLocator, self).__init__(**kwargs) self.base_url = url self.client = ServerProxy(url, timeout=3.0) def get_distribution_names(self): """ Return all the distribution names known to this locator. """ return set(self.client.list_packages()) def _get_project(self, name): result = {} versions = self.client.package_releases(name, True) for v in versions: urls = self.client.release_urls(name, v) data = self.client.release_data(name, v) metadata = Metadata(scheme=self.scheme) metadata.update(data) dist = Distribution(metadata) if urls: info = urls[0] metadata['Download-URL'] = info['url'] dist.md5_digest = info.get('md5_digest') dist.locator = self result[v] = dist return result class PyPIJSONLocator(Locator): """ This locator uses PyPI's JSON interface. It's very limited in functionality nad probably not worth using. """ def __init__(self, url, **kwargs): super(PyPIJSONLocator, self).__init__(**kwargs) self.base_url = ensure_slash(url) def get_distribution_names(self): """ Return all the distribution names known to this locator. """ raise NotImplementedError('Not available from this locator') def _get_project(self, name): result = {} url = urljoin(self.base_url, '%s/json' % quote(name)) try: resp = self.opener.open(url) data = resp.read().decode() # for now d = json.loads(data) md = Metadata(scheme=self.scheme) md.update(d['info']) dist = Distribution(md) urls = d['urls'] if urls: info = urls[0] md['Download-URL'] = info['url'] dist.md5_digest = info.get('md5_digest') dist.locator = self result[md.version] = dist except Exception as e: logger.exception('JSON fetch failed: %s', e) return result class Page(object): """ This class represents a scraped HTML page. """ # The following slightly hairy-looking regex just looks for the contents of # an anchor link, which has an attribute "href" either immediately preceded # or immediately followed by a "rel" attribute. The attribute values can be # declared with double quotes, single quotes or no quotes - which leads to # the length of the expression. _href = re.compile(""" (rel\s*=\s*(?:"(?P<rel1>[^"]*)"|'(?P<rel2>[^']*)'|(?P<rel3>[^>\s\n]*))\s+)? href\s*=\s*(?:"(?P<url1>[^"]*)"|'(?P<url2>[^']*)'|(?P<url3>[^>\s\n]*)) (\s+rel\s*=\s*(?:"(?P<rel4>[^"]*)"|'(?P<rel5>[^']*)'|(?P<rel6>[^>\s\n]*)))? """, re.I | re.S | re.X) _base = re.compile(r"""<base\s+href\s*=\s*['"]?([^'">]+)""", re.I | re.S) def __init__(self, data, url): """ Initialise an instance with the Unicode page contents and the URL they came from. """ self.data = data self.base_url = self.url = url m = self._base.search(self.data) if m: self.base_url = m.group(1) _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) @cached_property def links(self): """ Return the URLs of all the links on a page together with information about their "rel" attribute, for determining which ones to treat as downloads and which ones to queue for further scraping. """ def clean(url): "Tidy up an URL." scheme, netloc, path, params, query, frag = urlparse(url) return urlunparse((scheme, netloc, quote(path), params, query, frag)) result = set() for match in self._href.finditer(self.data): d = match.groupdict('') rel = (d['rel1'] or d['rel2'] or d['rel3'] or d['rel4'] or d['rel5'] or d['rel6']) url = d['url1'] or d['url2'] or d['url3'] url = urljoin(self.base_url, url) url = unescape(url) url = self._clean_re.sub(lambda m: '%%%2x' % ord(m.group(0)), url) result.add((url, rel)) # We sort the result, hoping to bring the most recent versions # to the front result = sorted(result, key=lambda t: t[0], reverse=True) return result class SimpleScrapingLocator(Locator): """ A locator which scrapes HTML pages to locate downloads for a distribution. This runs multiple threads to do the I/O; performance is at least as good as pip's PackageFinder, which works in an analogous fashion. """ # These are used to deal with various Content-Encoding schemes. decoders = { 'deflate': zlib.decompress, 'gzip': lambda b: gzip.GzipFile(fileobj=BytesIO(d)).read(), 'none': lambda b: b, } def __init__(self, url, timeout=None, num_workers=10, **kwargs): """ Initialise an instance. :param url: The root URL to use for scraping. :param timeout: The timeout, in seconds, to be applied to requests. This defaults to ``None`` (no timeout specified). :param num_workers: The number of worker threads you want to do I/O, This defaults to 10. :param kwargs: Passed to the superclass. """ super(SimpleScrapingLocator, self).__init__(**kwargs) self.base_url = ensure_slash(url) self.timeout = timeout self._page_cache = {} self._seen = set() self._to_fetch = queue.Queue() self._bad_hosts = set() self.skip_externals = False self.num_workers = num_workers self._lock = threading.RLock() def _prepare_threads(self): """ Threads are created only when get_project is called, and terminate before it returns. They are there primarily to parallelise I/O (i.e. fetching web pages). """ self._threads = [] for i in range(self.num_workers): t = threading.Thread(target=self._fetch) t.setDaemon(True) t.start() self._threads.append(t) def _wait_threads(self): """ Tell all the threads to terminate (by sending a sentinel value) and wait for them to do so. """ # Note that you need two loops, since you can't say which # thread will get each sentinel for t in self._threads: self._to_fetch.put(None) # sentinel for t in self._threads: t.join() self._threads = [] def _get_project(self, name): self.result = result = {} self.project_name = name url = urljoin(self.base_url, '%s/' % quote(name)) self._seen.clear() self._page_cache.clear() self._prepare_threads() try: logger.debug('Queueing %s', url) self._to_fetch.put(url) self._to_fetch.join() finally: self._wait_threads() del self.result return result platform_dependent = re.compile(r'\b(linux-(i\d86|x86_64|arm\w+)|' r'win(32|-amd64)|macosx-?\d+)\b', re.I) def _is_platform_dependent(self, url): """ Does an URL refer to a platform-specific download? """ return self.platform_dependent.search(url) def _process_download(self, url): """ See if an URL is a suitable download for a project. If it is, register information in the result dictionary (for _get_project) about the specific version it's for. Note that the return value isn't actually used other than as a boolean value. """ if self._is_platform_dependent(url): info = None else: info = self.convert_url_to_download_info(url, self.project_name) logger.debug('process_download: %s -> %s', url, info) if info: with self._lock: # needed because self.result is shared self._update_version_data(self.result, info) return info def _should_queue(self, link, referrer, rel): """ Determine whether a link URL from a referring page and with a particular "rel" attribute should be queued for scraping. """ scheme, netloc, path, _, _, _ = urlparse(link) if path.endswith(self.source_extensions + self.binary_extensions + self.excluded_extensions): result = False elif self.skip_externals and not link.startswith(self.base_url): result = False elif not referrer.startswith(self.base_url): result = False elif rel not in ('homepage', 'download'): result = False elif scheme not in ('http', 'https', 'ftp'): result = False elif self._is_platform_dependent(link): result = False else: host = netloc.split(':', 1)[0] if host.lower() == 'localhost': result = False else: result = True logger.debug('should_queue: %s (%s) from %s -> %s', link, rel, referrer, result) return result def _fetch(self): """ Get a URL to fetch from the work queue, get the HTML page, examine its links for download candidates and candidates for further scraping. This is a handy method to run in a thread. """ while True: url = self._to_fetch.get() try: if url: page = self.get_page(url) if page is None: # e.g. after an error continue for link, rel in page.links: if link not in self._seen: self._seen.add(link) if (not self._process_download(link) and self._should_queue(link, url, rel)): logger.debug('Queueing %s from %s', link, url) self._to_fetch.put(link) finally: # always do this, to avoid hangs :-) self._to_fetch.task_done() if not url: #logger.debug('Sentinel seen, quitting.') break def get_page(self, url): """ Get the HTML for an URL, possibly from an in-memory cache. XXX TODO Note: this cache is never actually cleared. It's assumed that the data won't get stale over the lifetime of a locator instance (not necessarily true for the default_locator). """ # http://peak.telecommunity.com/DevCenter/EasyInstall#package-index-api scheme, netloc, path, _, _, _ = urlparse(url) if scheme == 'file' and os.path.isdir(url2pathname(path)): url = urljoin(ensure_slash(url), 'index.html') if url in self._page_cache: result = self._page_cache[url] logger.debug('Returning %s from cache: %s', url, result) else: host = netloc.split(':', 1)[0] result = None if host in self._bad_hosts: logger.debug('Skipping %s due to bad host %s', url, host) else: req = Request(url, headers={'Accept-encoding': 'identity'}) try: logger.debug('Fetching %s', url) resp = self.opener.open(req, timeout=self.timeout) logger.debug('Fetched %s', url) headers = resp.info() content_type = headers.get('Content-Type', '') if HTML_CONTENT_TYPE.match(content_type): final_url = resp.geturl() data = resp.read() encoding = headers.get('Content-Encoding') if encoding: decoder = self.decoders[encoding] # fail if not found data = decoder(data) encoding = 'utf-8' m = CHARSET.search(content_type) if m: encoding = m.group(1) try: data = data.decode(encoding) except UnicodeError: data = data.decode('latin-1') # fallback result = Page(data, final_url) self._page_cache[final_url] = result except HTTPError as e: if e.code != 404: logger.exception('Fetch failed: %s: %s', url, e) except URLError as e: logger.exception('Fetch failed: %s: %s', url, e) with self._lock: self._bad_hosts.add(host) except Exception as e: logger.exception('Fetch failed: %s: %s', url, e) finally: self._page_cache[url] = result # even if None (failure) return result _distname_re = re.compile('<a href=[^>]*>([^<]+)<') def get_distribution_names(self): """ Return all the distribution names known to this locator. """ result = set() page = self.get_page(self.base_url) if not page: raise DistlibException('Unable to get %s' % self.base_url) for match in self._distname_re.finditer(page.data): result.add(match.group(1)) return result class DirectoryLocator(Locator): """ This class locates distributions in a directory tree. """ def __init__(self, path, **kwargs): """ Initialise an instance. :param path: The root of the directory tree to search. :param kwargs: Passed to the superclass constructor, except for: * recursive - if True (the default), subdirectories are recursed into. If False, only the top-level directory is searched, """ self.recursive = kwargs.pop('recursive', True) super(DirectoryLocator, self).__init__(**kwargs) path = os.path.abspath(path) if not os.path.isdir(path): raise DistlibException('Not a directory: %r' % path) self.base_dir = path def should_include(self, filename, parent): """ Should a filename be considered as a candidate for a distribution archive? As well as the filename, the directory which contains it is provided, though not used by the current implementation. """ return filename.endswith(self.downloadable_extensions) def _get_project(self, name): result = {} for root, dirs, files in os.walk(self.base_dir): for fn in files: if self.should_include(fn, root): fn = os.path.join(root, fn) url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) info = self.convert_url_to_download_info(url, name) if info: self._update_version_data(result, info) if not self.recursive: break return result def get_distribution_names(self): """ Return all the distribution names known to this locator. """ result = set() for root, dirs, files in os.walk(self.base_dir): for fn in files: if self.should_include(fn, root): fn = os.path.join(root, fn) url = urlunparse(('file', '', pathname2url(os.path.abspath(fn)), '', '', '')) info = self.convert_url_to_download_info(url, None) if info: result.add(info['name']) if not self.recursive: break return result class JSONLocator(Locator): """ This locator uses special extended metadata (not available on PyPI) and is the basis of performant dependency resolution in distlib. Other locators require archive downloads before dependencies can be determined! As you might imagine, that can be slow. """ def get_distribution_names(self): """ Return all the distribution names known to this locator. """ raise NotImplementedError('Not available from this locator') def _get_project(self, name): result = {} data = get_project_data(name) if data: for info in data.get('files', []): if info['ptype'] != 'sdist' or info['pyversion'] != 'source': continue dist = make_dist(data['name'], info['version'], scheme=self.scheme) md = dist.metadata md['Download-URL'] = info['url'] dist.md5_digest = info.get('digest') md.dependencies = info.get('requirements', {}) dist.exports = info.get('exports', {}) result[dist.version] = dist return result class DistPathLocator(Locator): """ This locator finds installed distributions in a path. It can be useful for adding to an :class:`AggregatingLocator`. """ def __init__(self, distpath, **kwargs): """ Initialise an instance. :param distpath: A :class:`DistributionPath` instance to search. """ super(DistPathLocator, self).__init__(**kwargs) assert isinstance(distpath, DistributionPath) self.distpath = distpath def _get_project(self, name): dist = self.distpath.get_distribution(name) if dist is None: result = {} else: result = { dist.version: dist } return result class AggregatingLocator(Locator): """ This class allows you to chain and/or merge a list of locators. """ def __init__(self, *locators, **kwargs): """ Initialise an instance. :param locators: The list of locators to search. :param kwargs: Passed to the superclass constructor, except for: * merge - if False (the default), the first successful search from any of the locators is returned. If True, the results from all locators are merged (this can be slow). """ self.merge = kwargs.pop('merge', False) self.locators = locators super(AggregatingLocator, self).__init__(**kwargs) def clear_cache(self): super(AggregatingLocator, self).clear_cache() for locator in self.locators: locator.clear_cache() def _set_scheme(self, value): self._scheme = value for locator in self.locators: locator.scheme = value scheme = property(Locator.scheme.fget, _set_scheme) def _get_project(self, name): result = {} for locator in self.locators: r = locator.get_project(name) if r: if self.merge: result.update(r) else: result = r break return result def get_distribution_names(self): """ Return all the distribution names known to this locator. """ result = set() for locator in self.locators: try: result |= locator.get_distribution_names() except NotImplementedError: pass return result default_locator = AggregatingLocator( JSONLocator(), SimpleScrapingLocator('https://pypi.python.org/simple/', timeout=3.0)) locate = default_locator.locate class DependencyFinder(object): """ Locate dependencies for distributions. """ def __init__(self, locator=None): """ Initialise an instance, using the specified locator to locate distributions. """ self.locator = locator or default_locator self.scheme = get_scheme(self.locator.scheme) def _get_name_and_version(self, p): """ A utility method used to get name and version from e.g. a Provides-Dist value. :param p: A value in a form foo (1.0) :return: The name and version as a tuple. """ comps = p.strip().rsplit(' ', 1) name = comps[0] version = None if len(comps) == 2: version = comps[1] if len(version) < 3 or version[0] != '(' or version[-1] != ')': raise DistlibException('Ill-formed provides field: %r' % p) version = version[1:-1] # trim off parentheses # Name in lower case for case-insensitivity return name.lower(), version def add_distribution(self, dist): """ Add a distribution to the finder. This will update internal information about who provides what. :param dist: The distribution to add. """ logger.debug('adding distribution %s', dist) name = dist.key self.dists_by_name[name] = dist self.dists[(name, dist.version)] = dist for p in dist.provides: name, version = self._get_name_and_version(p) logger.debug('Add to provided: %s, %s, %s', name, version, dist) self.provided.setdefault(name, set()).add((version, dist)) def remove_distribution(self, dist): """ Remove a distribution from the finder. This will update internal information about who provides what. :param dist: The distribution to remove. """ logger.debug('removing distribution %s', dist) name = dist.key del self.dists_by_name[name] del self.dists[(name, dist.version)] for p in dist.provides: name, version = self._get_name_and_version(p) logger.debug('Remove from provided: %s, %s, %s', name, version, dist) s = self.provided[name] s.remove((version, dist)) if not s: del self.provided[name] def get_matcher(self, reqt): """ Get a version matcher for a requirement. :param reqt: The requirement :type reqt: str :return: A version matcher (an instance of :class:`distlib.version.Matcher`). """ try: matcher = self.scheme.matcher(reqt) except UnsupportedVersionError: # XXX compat-mode if cannot read the version name = reqt.split()[0] matcher = self.scheme.matcher(name) return matcher def find_providers(self, reqt): """ Find the distributions which can fulfill a requirement. :param reqt: The requirement. :type reqt: str :return: A set of distribution which can fulfill the requirement. """ matcher = self.get_matcher(reqt) name = matcher.key # case-insensitive result = set() provided = self.provided if name in provided: for version, provider in provided[name]: try: match = matcher.match(version) except UnsupportedVersionError: match = False if match: result.add(provider) break return result def try_to_replace(self, provider, other, problems): """ Attempt to replace one provider with another. This is typically used when resolving dependencies from multiple sources, e.g. A requires (B >= 1.0) while C requires (B >= 1.1). For successful replacement, ``provider`` must meet all the requirements which ``other`` fulfills. :param provider: The provider we are trying to replace with. :param other: The provider we're trying to replace. :param problems: If False is returned, this will contain what problems prevented replacement. This is currently a tuple of the literal string 'cantreplace', ``provider``, ``other`` and the set of requirements that ``provider`` couldn't fulfill. :return: True if we can replace ``other`` with ``provider``, else False. """ rlist = self.reqts[other] unmatched = set() for s in rlist: matcher = self.get_matcher(s) if not matcher.match(provider.version): unmatched.add(s) if unmatched: # can't replace other with provider problems.add(('cantreplace', provider, other, unmatched)) result = False else: # can replace other with provider self.remove_distribution(other) del self.reqts[other] for s in rlist: self.reqts.setdefault(provider, set()).add(s) self.add_distribution(provider) result = True return result def find(self, requirement, tests=False, prereleases=False): """ Find a distribution matching requirement and all distributions it depends on. Use the ``tests`` argument to determine whether distributions used only for testing should be included in the results. Allow ``requirement`` to be either a :class:`Distribution` instance or a string expressing a requirement. If ``prereleases`` is True, allow pre-release versions to be returned - otherwise, don't. Return a set of :class:`Distribution` instances and a set of problems. The distributions returned should be such that they have the :attr:`required` attribute set to ``True`` if they were from the ``requirement`` passed to ``find()``, and they have the :attr:`build_time_dependency` attribute set to ``True`` unless they are post-installation dependencies of the ``requirement``. The problems should be a tuple consisting of the string ``'unsatisfied'`` and the requirement which couldn't be satisfied by any distribution known to the locator. """ self.provided = {} self.dists = {} self.dists_by_name = {} self.reqts = {} if isinstance(requirement, Distribution): dist = odist = requirement logger.debug('passed %s as requirement', odist) else: dist = odist = self.locator.locate(requirement, prereleases=prereleases) if dist is None: raise DistlibException('Unable to locate %r' % requirement) logger.debug('located %s', odist) dist.requested = True problems = set() todo = set([dist]) install_dists = set([odist]) while todo: dist = todo.pop() name = dist.key # case-insensitive if name not in self.dists_by_name: self.add_distribution(dist) else: #import pdb; pdb.set_trace() other = self.dists_by_name[name] if other != dist: self.try_to_replace(dist, other, problems) ireqts = dist.requires sreqts = dist.setup_requires ereqts = set() if not tests or dist not in install_dists: treqts = set() else: treqts = dist.test_requires all_reqts = ireqts | sreqts | treqts | ereqts for r in all_reqts: providers = self.find_providers(r) if not providers: logger.debug('No providers found for %r', r) provider = self.locator.locate(r, prereleases=prereleases) if provider is None: logger.debug('Cannot satisfy %r', r) problems.add(('unsatisfied', r)) else: n, v = provider.key, provider.version if (n, v) not in self.dists: todo.add(provider) providers.add(provider) if r in ireqts and dist in install_dists: install_dists.add(provider) logger.debug('Adding %s to install_dists', provider.name_and_version) for p in providers: name = p.key if name not in self.dists_by_name: self.reqts.setdefault(p, set()).add(r) else: other = self.dists_by_name[name] if other != p: # see if other can be replaced by p self.try_to_replace(p, other, problems) dists = set(self.dists.values()) for dist in dists: dist.build_time_dependency = dist not in install_dists if dist.build_time_dependency: logger.debug('%s is a build-time dependency only.', dist.name_and_version) logger.debug('find done for %s', odist) return dists, problems
unlicense
Comunitea/OCB
addons/account/wizard/account_tax_chart.py
385
3247
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv import fields, osv class account_tax_chart(osv.osv_memory): """ For Chart of taxes """ _name = "account.tax.chart" _description = "Account tax chart" _columns = { 'period_id': fields.many2one('account.period', \ 'Period', \ ), 'target_move': fields.selection([('posted', 'All Posted Entries'), ('all', 'All Entries'), ], 'Target Moves', required=True), } def _get_period(self, cr, uid, context=None): """Return default period value""" period_ids = self.pool.get('account.period').find(cr, uid, context=context) return period_ids and period_ids[0] or False def account_tax_chart_open_window(self, cr, uid, ids, context=None): """ Opens chart of Accounts @param cr: the current row, from the database cursor, @param uid: the current user’s ID for security checks, @param ids: List of account chart’s IDs @return: dictionary of Open account chart window on given fiscalyear and all Entries or posted entries """ mod_obj = self.pool.get('ir.model.data') act_obj = self.pool.get('ir.actions.act_window') if context is None: context = {} data = self.browse(cr, uid, ids, context=context)[0] result = mod_obj.get_object_reference(cr, uid, 'account', 'action_tax_code_tree') id = result and result[1] or False result = act_obj.read(cr, uid, [id], context=context)[0] if data.period_id: result['context'] = str({'period_id': data.period_id.id, \ 'fiscalyear_id': data.period_id.fiscalyear_id.id, \ 'state': data.target_move}) period_code = data.period_id.code result['name'] += period_code and (':' + period_code) or '' else: result['context'] = str({'state': data.target_move}) return result _defaults = { 'period_id': _get_period, 'target_move': 'posted' } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
zyq001/ryu
ryu/services/protocols/bgp/core_managers/import_map_manager.py
52
1677
# Copyright (C) 2014 Nippon Telegraph and Telephone Corporation. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or # implied. # See the License for the specific language governing permissions and # limitations under the License. from ryu.services.protocols.bgp.info_base.vrf import VrfRtImportMap from ryu.services.protocols.bgp.info_base.vrf4 import Vrf4NlriImportMap from ryu.services.protocols.bgp.info_base.vrf6 import Vrf6NlriImportMap class ImportMapManager(object): def __init__(self): self._import_maps_by_name = {} def create_vpnv4_nlri_import_map(self, name, value): self._create_import_map_factory(name, value, Vrf4NlriImportMap) def create_vpnv6_nlri_import_map(self, name, value): self._create_import_map_factory(name, value, Vrf6NlriImportMap) def create_rt_import_map(self, name, value): self._create_import_map_factory(name, value, VrfRtImportMap) def _create_import_map_factory(self, name, value, cls): if self._import_maps_by_name.get(name) is not None: raise ImportMapAlreadyExistsError() self._import_maps_by_name[name] = cls(value) def get_import_map_by_name(self, name): return self._import_maps_by_name.get(name) class ImportMapAlreadyExistsError(Exception): pass
apache-2.0
m1ssou/zulip
zerver/test_subs.py
113
63624
# -*- coding: utf-8 -*- from __future__ import absolute_import from zerver.lib import cache from zerver.lib.test_helpers import ( AuthedTestCase, queries_captured, stub, tornado_redirected_to_list ) from zerver.decorator import ( JsonableError ) from zerver.lib.test_runner import ( slow ) from zerver.models import ( get_display_recipient, Message, Realm, Recipient, Stream, Subscription, UserProfile, ) from zerver.lib.actions import ( create_stream_if_needed, do_add_default_stream, do_add_subscription, do_change_is_admin, do_remove_default_stream, gather_subscriptions, get_default_streams_for_realm, get_realm, get_stream, get_user_profile_by_email, set_default_streams, ) import random import ujson import urllib class StreamAdminTest(AuthedTestCase): def test_make_stream_public(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'private_stream', invite_only=True) do_change_is_admin(user_profile, True) params = { 'stream_name': 'private_stream' } result = self.client.post("/json/make_stream_public", params) self.assert_json_error(result, 'You are not invited to this stream.') do_add_subscription(user_profile, stream) do_change_is_admin(user_profile, True) params = { 'stream_name': 'private_stream' } result = self.client.post("/json/make_stream_public", params) self.assert_json_success(result) stream = Stream.objects.get(name='private_stream', realm=realm) self.assertFalse(stream.invite_only) def test_make_stream_private(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'public_stream') do_change_is_admin(user_profile, True) params = { 'stream_name': 'public_stream' } result = self.client.post("/json/make_stream_private", params) self.assert_json_success(result) stream = Stream.objects.get(name='public_stream', realm=realm) self.assertTrue(stream.invite_only) def test_deactivate_stream_backend(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'new_stream') do_add_subscription(user_profile, stream, no_log=True) do_change_is_admin(user_profile, True) result = self.client.delete('/json/streams/new_stream') self.assert_json_success(result) subscription_exists = Subscription.objects.filter( user_profile=user_profile, recipient__type_id=stream.id, recipient__type=Recipient.STREAM, active=True, ).exists() self.assertFalse(subscription_exists) def test_deactivate_stream_backend_requires_realm_admin(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'new_stream') do_add_subscription(user_profile, stream, no_log=True) result = self.client.delete('/json/streams/new_stream') self.assert_json_error(result, 'Must be a realm administrator') def test_rename_stream(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'stream_name1') do_add_subscription(user_profile, stream, no_log=True) do_change_is_admin(user_profile, True) events = [] with tornado_redirected_to_list(events): result = self.client.post('/json/rename_stream?old_name=stream_name1&new_name=stream_name2') self.assert_json_success(result) event = events[1]['event'] self.assertEqual(event, dict( op='update', type='stream', property='name', value='stream_name2', name='stream_name1' )) users = events[1]['users'] self.assertEqual(users, [user_profile.id]) stream_name1_exists = Stream.objects.filter( name='stream_name1', realm=realm, ).exists() self.assertFalse(stream_name1_exists) stream_name2_exists = Stream.objects.filter( name='stream_name2', realm=realm, ).exists() self.assertTrue(stream_name2_exists) def test_rename_stream_requires_realm_admin(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'stream_name1') result = self.client.post('/json/rename_stream?old_name=stream_name1&new_name=stream_name2') self.assert_json_error(result, 'Must be a realm administrator') def test_change_stream_description(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'stream_name1') do_add_subscription(user_profile, stream, no_log=True) do_change_is_admin(user_profile, True) events = [] with tornado_redirected_to_list(events): result = self.client_patch('/json/streams/stream_name1', {'description': ujson.dumps('Test description')}) self.assert_json_success(result) event = events[0]['event'] self.assertEqual(event, dict( op='update', type='stream', property='description', value='Test description', name='stream_name1' )) users = events[0]['users'] self.assertEqual(users, [user_profile.id]) stream = Stream.objects.get( name='stream_name1', realm=realm, ) self.assertEqual('Test description', stream.description) def test_change_stream_description_requires_realm_admin(self): email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm stream, _ = create_stream_if_needed(realm, 'stream_name1') do_add_subscription(user_profile, stream, no_log=True) do_change_is_admin(user_profile, False) result = self.client_patch('/json/streams/stream_name1', {'description': ujson.dumps('Test description')}) self.assert_json_error(result, 'Must be a realm administrator') def set_up_stream_for_deletion(self, stream_name, invite_only=False, subscribed=True): """ Create a stream for deletion by an administrator. """ email = 'hamlet@zulip.com' self.login(email) user_profile = get_user_profile_by_email(email) stream, _ = create_stream_if_needed(user_profile.realm, stream_name, invite_only=invite_only) # For testing deleting streams you aren't on. if subscribed: do_add_subscription(user_profile, stream, no_log=True) do_change_is_admin(user_profile, True) return stream def delete_stream(self, stream, subscribed=True): """ Delete the stream and assess the result. """ active_name = stream.name events = [] with tornado_redirected_to_list(events): result = self.client.delete('/json/streams/' + active_name) self.assert_json_success(result) deletion_events = [e['event'] for e in events if e['event']['type'] == 'subscription'] if subscribed: self.assertEqual(deletion_events[0], dict( op='remove', type='subscription', subscriptions=[{'name': active_name, 'stream_id': stream.id}] )) else: # You could delete the stream, but you weren't on it so you don't # receive an unsubscription event. self.assertEqual(deletion_events, []) with self.assertRaises(Stream.DoesNotExist): Stream.objects.get(realm=get_realm("zulip.com"), name=active_name) # A deleted stream's name is changed, is deactivated, is invite-only, # and has no subscribers. deactivated_stream_name = "!DEACTIVATED:" + active_name deactivated_stream = Stream.objects.get(name=deactivated_stream_name) self.assertTrue(deactivated_stream.deactivated) self.assertTrue(deactivated_stream.invite_only) self.assertEqual(deactivated_stream.name, deactivated_stream_name) subscribers = self.users_subscribed_to_stream( deactivated_stream_name, "zulip.com") self.assertEqual(subscribers, []) # It doesn't show up in the list of public streams anymore. result = self.client.post("/json/get_public_streams") public_streams = [s["name"] for s in ujson.loads(result.content)["streams"]] self.assertNotIn(active_name, public_streams) self.assertNotIn(deactivated_stream_name, public_streams) # Even if you could guess the new name, you can't subscribe to it. result = self.client.post( "/json/subscriptions/add", {"subscriptions": ujson.dumps([{"name": deactivated_stream_name}])}) self.assert_json_error( result, "Unable to access stream (%s)." % (deactivated_stream_name,)) def test_delete_public_stream(self): """ When an administrator deletes a public stream, that stream is not visible to users at all anymore. """ stream = self.set_up_stream_for_deletion("newstream") self.delete_stream(stream) def test_delete_private_stream(self): """ Administrators can delete private streams they are on. """ stream = self.set_up_stream_for_deletion("newstream", invite_only=True) self.delete_stream(stream) def test_delete_streams_youre_not_on(self): """ Administrators can delete public streams they aren't on, but cannot delete private streams they aren't on. """ pub_stream = self.set_up_stream_for_deletion( "pubstream", subscribed=False) self.delete_stream(pub_stream, subscribed=False) priv_stream = self.set_up_stream_for_deletion( "privstream", subscribed=False, invite_only=True) result = self.client.delete('/json/streams/' + priv_stream.name) self.assert_json_error( result, "Cannot administer invite-only streams this way") def attempt_unsubscribe_of_principal(self, is_admin=False, is_subbed=True, invite_only=False, other_user_subbed=True): # Set up the main user, who is in most cases an admin. email = "hamlet@zulip.com" self.login(email) user_profile = get_user_profile_by_email(email) realm = user_profile.realm if is_admin: do_change_is_admin(user_profile, True) # Set up the stream. stream_name = u"hümbüǵ" stream, _ = create_stream_if_needed(realm, stream_name, invite_only=invite_only) # Set up the principal to be unsubscribed. other_email = "cordelia@zulip.com" other_user_profile = get_user_profile_by_email(other_email) # Subscribe the admin and/or principal as specified in the flags. if is_subbed: do_add_subscription(user_profile, stream, no_log=True) if other_user_subbed: do_add_subscription(other_user_profile, stream, no_log=True) result = self.client.post( "/json/subscriptions/remove", {"subscriptions": ujson.dumps([stream.name]), "principals": ujson.dumps([other_email])}) # If the removal succeeded, then assert that Cordelia is no longer subscribed. if result.status_code not in [400]: subbed_users = self.users_subscribed_to_stream(stream_name, other_user_profile.realm.domain) self.assertNotIn(other_user_profile, subbed_users) return result def test_cant_remove_others_from_stream(self): """ If you're not an admin, you can't remove other people from streams. """ result = self.attempt_unsubscribe_of_principal( is_admin=False, is_subbed=True, invite_only=False, other_user_subbed=True) self.assert_json_error( result, "This action requires administrative rights") def test_admin_remove_others_from_public_stream(self): """ If you're an admin, you can remove people from public streams, even those you aren't on. """ result = self.attempt_unsubscribe_of_principal( is_admin=True, is_subbed=True, invite_only=False, other_user_subbed=True) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 1) self.assertEqual(len(json["not_subscribed"]), 0) def test_admin_remove_others_from_subbed_private_stream(self): """ If you're an admin, you can remove other people from private streams you are on. """ result = self.attempt_unsubscribe_of_principal( is_admin=True, is_subbed=True, invite_only=True, other_user_subbed=True) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 1) self.assertEqual(len(json["not_subscribed"]), 0) def test_admin_remove_others_from_unsubbed_private_stream(self): """ Even if you're an admin, you can't remove people from private streams you aren't on. """ result = self.attempt_unsubscribe_of_principal( is_admin=True, is_subbed=False, invite_only=True, other_user_subbed=True) self.assert_json_error( result, "Cannot administer invite-only streams this way") def test_remove_already_not_subbed(self): """ Trying to unsubscribe someone who already isn't subscribed to a stream fails gracefully. """ result = self.attempt_unsubscribe_of_principal( is_admin=True, is_subbed=False, invite_only=False, other_user_subbed=False) json = self.assert_json_success(result) self.assertEqual(len(json["removed"]), 0) self.assertEqual(len(json["not_subscribed"]), 1) def test_remove_invalid_user(self): """ Trying to unsubscribe an invalid user from a stream fails gracefully. """ admin_email = "hamlet@zulip.com" self.login(admin_email) user_profile = get_user_profile_by_email(admin_email) do_change_is_admin(user_profile, True) realm = user_profile.realm stream_name = u"hümbüǵ" stream, _ = create_stream_if_needed(realm, stream_name) result = self.client.post("/json/subscriptions/remove", {"subscriptions": ujson.dumps([stream.name]), "principals": ujson.dumps(["baduser@zulip.com"])}) self.assert_json_error( result, "User not authorized to execute queries on behalf of 'baduser@zulip.com'") class DefaultStreamTest(AuthedTestCase): def get_default_stream_names(self, realm): streams = get_default_streams_for_realm(realm) stream_names = [s.name for s in streams] return set(stream_names) def test_set_default_streams(self): realm = Realm.objects.get(domain="zulip.com") stream_names = ['apple', 'banana', 'Carrot Cake'] expected_names = stream_names + ['zulip'] set_default_streams(realm, stream_names) stream_names = self.get_default_stream_names(realm) self.assertEqual(stream_names, set(expected_names)) def test_add_and_remove_default_stream(self): realm = Realm.objects.get(domain="zulip.com") orig_stream_names = self.get_default_stream_names(realm) do_add_default_stream(realm, 'Added Stream') new_stream_names = self.get_default_stream_names(realm) added_stream_names = new_stream_names - orig_stream_names self.assertEqual(added_stream_names, set(['Added Stream'])) # idempotentcy--2nd call to add_default_stream should be a noop do_add_default_stream(realm, 'Added Stream') self.assertEqual(self.get_default_stream_names(realm), new_stream_names) # start removing do_remove_default_stream(realm, 'Added Stream') self.assertEqual(self.get_default_stream_names(realm), orig_stream_names) # idempotentcy--2nd call to remove_default_stream should be a noop do_remove_default_stream(realm, 'Added Stream') self.assertEqual(self.get_default_stream_names(realm), orig_stream_names) def test_api_calls(self): self.login("hamlet@zulip.com") user_profile = get_user_profile_by_email('hamlet@zulip.com') do_change_is_admin(user_profile, True) stream_name = 'stream ADDED via api' result = self.client_patch('/json/default_streams', dict(stream_name=stream_name)) self.assert_json_success(result) self.assertTrue(stream_name in self.get_default_stream_names(user_profile.realm)) # and remove it result = self.client_delete('/json/default_streams', dict(stream_name=stream_name)) self.assert_json_success(result) self.assertFalse(stream_name in self.get_default_stream_names(user_profile.realm)) class SubscriptionPropertiesTest(AuthedTestCase): def test_set_stream_color(self): """ A POST request to /json/subscriptions/property with stream_name and color data sets the stream color, and for that stream only. """ test_email = "hamlet@zulip.com" self.login(test_email) old_subs, _ = gather_subscriptions(get_user_profile_by_email(test_email)) sub = old_subs[0] stream_name = sub['name'] new_color = "#ffffff" # TODO: ensure that this is different from old_color result = self.client.post( "/json/subscriptions/property", {"subscription_data": ujson.dumps([{"property": "color", "stream": stream_name, "value": "#ffffff"}])}) self.assert_json_success(result) new_subs = gather_subscriptions(get_user_profile_by_email(test_email))[0] found_sub = None for sub in new_subs: if sub['name'] == stream_name: found_sub = sub break self.assertIsNotNone(found_sub) self.assertEqual(found_sub['color'], new_color) new_subs.remove(found_sub) for sub in old_subs: if sub['name'] == stream_name: found_sub = sub break old_subs.remove(found_sub) self.assertEqual(old_subs, new_subs) def test_set_color_missing_stream_name(self): """ Updating the color property requires a `stream` key. """ test_email = "hamlet@zulip.com" self.login(test_email) result = self.client.post( "/json/subscriptions/property", {"subscription_data": ujson.dumps([{"property": "color", "value": "#ffffff"}])}) self.assert_json_error( result, "stream key is missing from subscription_data[0]") def test_set_color_missing_color(self): """ Updating the color property requires a color. """ test_email = "hamlet@zulip.com" self.login(test_email) subs = gather_subscriptions(get_user_profile_by_email(test_email))[0] result = self.client.post( "/json/subscriptions/property", {"subscription_data": ujson.dumps([{"property": "color", "stream": subs[0]["name"]}])}) self.assert_json_error( result, "value key is missing from subscription_data[0]") def test_set_invalid_property(self): """ Trying to set an invalid property returns a JSON error. """ test_email = "hamlet@zulip.com" self.login(test_email) subs = gather_subscriptions(get_user_profile_by_email(test_email))[0] result = self.client.post( "/json/subscriptions/property", {"subscription_data": ujson.dumps([{"property": "bad", "value": "bad", "stream": subs[0]["name"]}])}) self.assert_json_error(result, "Unknown subscription property: bad") class SubscriptionRestApiTest(AuthedTestCase): def test_basic_add_delete(self): email = 'hamlet@zulip.com' self.login(email) # add request = { 'add': ujson.dumps([{'name': 'my_test_stream_1'}]) } result = self.client_patch( "/api/v1/users/me/subscriptions", request, **self.api_auth(email) ) self.assert_json_success(result) streams = self.get_streams(email) self.assertTrue('my_test_stream_1' in streams) # now delete the same stream request = { 'delete': ujson.dumps(['my_test_stream_1']) } result = self.client_patch( "/api/v1/users/me/subscriptions", request, **self.api_auth(email) ) self.assert_json_success(result) streams = self.get_streams(email) self.assertTrue('my_test_stream_1' not in streams) def test_bad_add_parameters(self): email = 'hamlet@zulip.com' self.login(email) def check_for_error(val, expected_message): request = { 'add': ujson.dumps(val) } result = self.client_patch( "/api/v1/users/me/subscriptions", request, **self.api_auth(email) ) self.assert_json_error(result, expected_message) check_for_error(['foo'], 'add[0] is not a dict') check_for_error([{'bogus': 'foo'}], 'name key is missing from add[0]') check_for_error([{'name': {}}], 'add[0]["name"] is not a string') def test_bad_principals(self): email = 'hamlet@zulip.com' self.login(email) request = { 'add': ujson.dumps([{'name': 'my_new_stream'}]), 'principals': ujson.dumps([{}]), } result = self.client_patch( "/api/v1/users/me/subscriptions", request, **self.api_auth(email) ) self.assert_json_error(result, 'principals[0] is not a string') def test_bad_delete_parameters(self): email = 'hamlet@zulip.com' self.login(email) request = { 'delete': ujson.dumps([{'name': 'my_test_stream_1'}]) } result = self.client_patch( "/api/v1/users/me/subscriptions", request, **self.api_auth(email) ) self.assert_json_error(result, "delete[0] is not a string") class SubscriptionAPITest(AuthedTestCase): def setUp(self): """ All tests will be logged in as hamlet. Also save various useful values as attributes that tests can access. """ self.test_email = "hamlet@zulip.com" self.login(self.test_email) self.user_profile = get_user_profile_by_email(self.test_email) self.realm = self.user_profile.realm self.streams = self.get_streams(self.test_email) def make_random_stream_names(self, existing_stream_names): """ Helper function to make up random stream names. It takes existing_stream_names and randomly appends a digit to the end of each, but avoids names that appear in the list names_to_avoid. """ random_streams = [] all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.realm)] for stream in existing_stream_names: random_stream = stream + str(random.randint(0, 9)) if not random_stream in all_stream_names: random_streams.append(random_stream) return random_streams def test_successful_subscriptions_list(self): """ Calling /api/v1/users/me/subscriptions should successfully return your subscriptions. """ email = self.test_email result = self.client.get("/api/v1/users/me/subscriptions", **self.api_auth(email)) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("subscriptions", json) for stream in json['subscriptions']: self.assertIsInstance(stream['name'], basestring) self.assertIsInstance(stream['color'], basestring) self.assertIsInstance(stream['invite_only'], bool) # check that the stream name corresponds to an actual stream try: Stream.objects.get(name__iexact=stream['name'], realm=self.realm) except Stream.DoesNotExist: self.fail("stream does not exist") list_streams = [stream['name'] for stream in json["subscriptions"]] # also check that this matches the list of your subscriptions self.assertItemsEqual(list_streams, self.streams) def helper_check_subs_before_and_after_add(self, subscriptions, other_params, subscribed, already_subscribed, email, new_subs, invite_only=False): """ Check result of adding subscriptions. You can add subscriptions for yourself or possibly many principals, which is why e-mails map to subscriptions in the result. The result json is of the form {"msg": "", "result": "success", "already_subscribed": {"iago@zulip.com": ["Venice", "Verona"]}, "subscribed": {"iago@zulip.com": ["Venice8"]}} """ result = self.common_subscribe_to_streams(self.test_email, subscriptions, other_params, invite_only=invite_only) self.assert_json_success(result) json = ujson.loads(result.content) self.assertItemsEqual(subscribed, json["subscribed"][email]) self.assertItemsEqual(already_subscribed, json["already_subscribed"][email]) new_streams = self.get_streams(email) self.assertItemsEqual(new_streams, new_subs) def test_successful_subscriptions_add(self): """ Calling /json/subscriptions/add should successfully add streams, and should determine which are new subscriptions vs which were already subscribed. We randomly generate stream names to add, because it doesn't matter whether the stream already exists. """ self.assertNotEqual(len(self.streams), 0) # necessary for full test coverage add_streams = self.make_random_stream_names(self.streams) self.assertNotEqual(len(add_streams), 0) # necessary for full test coverage events = [] with tornado_redirected_to_list(events): self.helper_check_subs_before_and_after_add(self.streams + add_streams, {}, add_streams, self.streams, self.test_email, self.streams + add_streams) self.assert_length(events, 4, True) def test_successful_subscriptions_notifies_pm(self): """ Calling /json/subscriptions/add should notify when a new stream is created. """ invitee = "iago@zulip.com" invitee_full_name = 'Iago' current_stream = self.get_streams(invitee)[0] invite_streams = self.make_random_stream_names(current_stream)[:1] result = self.common_subscribe_to_streams( invitee, invite_streams, extra_post_data={ 'announce': 'true', 'principals': '["%s"]' % (self.user_profile.email,) }, ) self.assert_json_success(result) msg = Message.objects.latest('id') self.assertEqual(msg.recipient.type, Recipient.PERSONAL) self.assertEqual(msg.sender_id, get_user_profile_by_email('notification-bot@zulip.com').id) expected_msg = "Hi there! %s just created a new stream '%s'. " \ "!_stream_subscribe_button(%s)" % (invitee_full_name, invite_streams[0], invite_streams[0]) self.assertEqual(msg.content, expected_msg) def test_successful_subscriptions_notifies_stream(self): """ Calling /json/subscriptions/add should notify when a new stream is created. """ invitee = "iago@zulip.com" invitee_full_name = 'Iago' current_stream = self.get_streams(invitee)[0] invite_streams = self.make_random_stream_names(current_stream)[:1] notifications_stream = Stream.objects.get(name=current_stream, realm=self.realm) self.realm.notifications_stream = notifications_stream self.realm.save() # Delete the UserProfile from the cache so the realm change will be # picked up cache.cache_delete(cache.user_profile_by_email_cache_key(invitee)) result = self.common_subscribe_to_streams( invitee, invite_streams, extra_post_data=dict( announce='true', principals='["%s"]' % (self.user_profile.email,) ), ) self.assert_json_success(result) msg = Message.objects.latest('id') self.assertEqual(msg.recipient.type, Recipient.STREAM) self.assertEqual(msg.sender_id, get_user_profile_by_email('notification-bot@zulip.com').id) expected_msg = "%s just created a new stream `%s`. " \ "!_stream_subscribe_button(%s)" % (invitee_full_name, invite_streams[0], invite_streams[0]) self.assertEqual(msg.content, expected_msg) def test_successful_subscriptions_notifies_with_escaping(self): """ Calling /json/subscriptions/add should notify when a new stream is created. """ invitee = "iago@zulip.com" invitee_full_name = 'Iago' invite_streams = ['strange ) \\ test'] result = self.common_subscribe_to_streams( invitee, invite_streams, extra_post_data={ 'announce': 'true', 'principals': '["%s"]' % (self.user_profile.email,) }, ) self.assert_json_success(result) msg = Message.objects.latest('id') self.assertEqual(msg.sender_id, get_user_profile_by_email('notification-bot@zulip.com').id) expected_msg = "Hi there! %s just created a new stream '%s'. " \ "!_stream_subscribe_button(strange \\) \\\\ test)" % ( invitee_full_name, invite_streams[0]) self.assertEqual(msg.content, expected_msg) def test_non_ascii_stream_subscription(self): """ Subscribing to a stream name with non-ASCII characters succeeds. """ self.helper_check_subs_before_and_after_add(self.streams + [u"hümbüǵ"], {}, [u"hümbüǵ"], self.streams, self.test_email, self.streams + [u"hümbüǵ"]) def test_subscriptions_add_too_long(self): """ Calling /json/subscriptions/add on a stream whose name is >60 characters should return a JSON error. """ # character limit is 60 characters long_stream_name = "a" * 61 result = self.common_subscribe_to_streams(self.test_email, [long_stream_name]) self.assert_json_error(result, "Stream name (%s) too long." % (long_stream_name,)) def test_user_settings_for_adding_streams(self): with stub(UserProfile, 'can_create_streams', lambda self: True): result = self.common_subscribe_to_streams(self.test_email, ['stream1']) self.assert_json_success(result) with stub(UserProfile, 'can_create_streams', lambda self: False): result = self.common_subscribe_to_streams(self.test_email, ['stream1']) self.assert_json_error(result, 'User cannot create streams.') def test_subscriptions_add_invalid_stream(self): """ Calling /json/subscriptions/add on a stream whose name is invalid (as defined by valid_stream_name in zerver/views.py) should return a JSON error. """ # currently, the only invalid name is the empty string invalid_stream_name = "" result = self.common_subscribe_to_streams(self.test_email, [invalid_stream_name]) self.assert_json_error(result, "Invalid stream name (%s)." % (invalid_stream_name,)) def assert_adding_subscriptions_for_principal(self, invitee, streams, invite_only=False): """ Calling /json/subscriptions/add on behalf of another principal (for whom you have permission to add subscriptions) should successfully add those subscriptions and send a message to the subscribee notifying them. """ other_profile = get_user_profile_by_email(invitee) current_streams = self.get_streams(invitee) self.assertIsInstance(other_profile, UserProfile) self.assertNotEqual(len(current_streams), 0) # necessary for full test coverage self.assertNotEqual(len(streams), 0) # necessary for full test coverage streams_to_sub = streams[:1] # just add one, to make the message easier to check streams_to_sub.extend(current_streams) self.helper_check_subs_before_and_after_add(streams_to_sub, {"principals": ujson.dumps([invitee])}, streams[:1], current_streams, invitee, streams_to_sub, invite_only=invite_only) # verify that the user was sent a message informing them about the subscription msg = Message.objects.latest('id') self.assertEqual(msg.recipient.type, msg.recipient.PERSONAL) self.assertEqual(msg.sender_id, get_user_profile_by_email("notification-bot@zulip.com").id) expected_msg = ("Hi there! We thought you'd like to know that %s just " "subscribed you to the %sstream [%s](#narrow/stream/%s)." % (self.user_profile.full_name, '**invite-only** ' if invite_only else '', streams[0], urllib.quote(streams[0].encode('utf-8')))) if not Stream.objects.get(name=streams[0]).invite_only: expected_msg += ("\nYou can see historical content on a " "non-invite-only stream by narrowing to it.") self.assertEqual(msg.content, expected_msg) recipients = get_display_recipient(msg.recipient) self.assertEqual(len(recipients), 1) self.assertEqual(recipients[0]['email'], invitee) def test_multi_user_subscription(self): email1 = 'cordelia@zulip.com' email2 = 'iago@zulip.com' realm = Realm.objects.get(domain="zulip.com") streams_to_sub = ['multi_user_stream'] events = [] with tornado_redirected_to_list(events): with queries_captured() as queries: self.common_subscribe_to_streams( self.test_email, streams_to_sub, dict(principals=ujson.dumps([email1, email2])), ) self.assert_length(queries, 43) self.assert_length(events, 6, exact=True) for ev in filter(lambda x: x['event']['type'] not in ('message', 'stream'), events): self.assertEqual(ev['event']['op'], 'add') self.assertEqual( set(ev['event']['subscriptions'][0]['subscribers']), set([email1, email2]) ) stream = get_stream('multi_user_stream', realm) self.assertEqual(stream.num_subscribers(), 2) # Now add ourselves events = [] with tornado_redirected_to_list(events): with queries_captured() as queries: self.common_subscribe_to_streams( self.test_email, streams_to_sub, dict(principals=ujson.dumps([self.test_email])), ) self.assert_length(queries, 8) self.assert_length(events, 2, True) add_event, add_peer_event = events self.assertEqual(add_event['event']['type'], 'subscription') self.assertEqual(add_event['event']['op'], 'add') self.assertEqual(add_event['users'], [get_user_profile_by_email(self.test_email).id]) self.assertEqual( set(add_event['event']['subscriptions'][0]['subscribers']), set([email1, email2, self.test_email]) ) self.assertEqual(len(add_peer_event['users']), 2) self.assertEqual(add_peer_event['event']['type'], 'subscription') self.assertEqual(add_peer_event['event']['op'], 'peer_add') self.assertEqual(add_peer_event['event']['user_email'], self.test_email) stream = get_stream('multi_user_stream', realm) self.assertEqual(stream.num_subscribers(), 3) # Finally, add othello, exercising the do_add_subscription() code path. events = [] email3 = 'othello@zulip.com' user_profile = get_user_profile_by_email(email3) stream = get_stream('multi_user_stream', realm) with tornado_redirected_to_list(events): do_add_subscription(user_profile, stream) self.assert_length(events, 2, True) add_event, add_peer_event = events self.assertEqual(add_event['event']['type'], 'subscription') self.assertEqual(add_event['event']['op'], 'add') self.assertEqual(add_event['users'], [get_user_profile_by_email(email3).id]) self.assertEqual( set(add_event['event']['subscriptions'][0]['subscribers']), set([email1, email2, email3, self.test_email]) ) self.assertEqual(len(add_peer_event['users']), 3) self.assertEqual(add_peer_event['event']['type'], 'subscription') self.assertEqual(add_peer_event['event']['op'], 'peer_add') self.assertEqual(add_peer_event['event']['user_email'], email3) def test_bulk_subscribe_MIT(self): realm = Realm.objects.get(domain="mit.edu") streams = ["stream_%s" % i for i in xrange(40)] for stream in streams: create_stream_if_needed(realm, stream) events = [] with tornado_redirected_to_list(events): with queries_captured() as queries: self.common_subscribe_to_streams( 'starnine@mit.edu', streams, dict(principals=ujson.dumps(['starnine@mit.edu'])), ) # Make sure MIT does not get any tornado subscription events self.assert_length(events, 0, True) self.assert_length(queries, 7) def test_bulk_subscribe_many(self): # Create a whole bunch of streams realm = Realm.objects.get(domain="zulip.com") streams = ["stream_%s" % i for i in xrange(20)] for stream in streams: create_stream_if_needed(realm, stream) with queries_captured() as queries: self.common_subscribe_to_streams( self.test_email, streams, dict(principals=ujson.dumps([self.test_email])), ) # Make sure we don't make O(streams) queries self.assert_length(queries, 9) @slow(0.15, "common_subscribe_to_streams is slow") def test_subscriptions_add_for_principal(self): """ You can subscribe other people to streams. """ invitee = "iago@zulip.com" current_streams = self.get_streams(invitee) invite_streams = self.make_random_stream_names(current_streams) self.assert_adding_subscriptions_for_principal(invitee, invite_streams) @slow(0.15, "common_subscribe_to_streams is slow") def test_subscriptions_add_for_principal_invite_only(self): """ You can subscribe other people to invite only streams. """ invitee = "iago@zulip.com" current_streams = self.get_streams(invitee) invite_streams = self.make_random_stream_names(current_streams) self.assert_adding_subscriptions_for_principal(invitee, invite_streams, invite_only=True) @slow(0.15, "common_subscribe_to_streams is slow") def test_non_ascii_subscription_for_principal(self): """ You can subscribe other people to streams even if they containing non-ASCII characters. """ self.assert_adding_subscriptions_for_principal("iago@zulip.com", [u"hümbüǵ"]) def test_subscription_add_invalid_principal(self): """ Calling subscribe on behalf of a principal that does not exist should return a JSON error. """ invalid_principal = "rosencrantz-and-guildenstern@zulip.com" # verify that invalid_principal actually doesn't exist with self.assertRaises(UserProfile.DoesNotExist): get_user_profile_by_email(invalid_principal) result = self.common_subscribe_to_streams(self.test_email, self.streams, {"principals": ujson.dumps([invalid_principal])}) self.assert_json_error(result, "User not authorized to execute queries on behalf of '%s'" % (invalid_principal,)) def test_subscription_add_principal_other_realm(self): """ Calling subscribe on behalf of a principal in another realm should return a JSON error. """ principal = "starnine@mit.edu" profile = get_user_profile_by_email(principal) # verify that principal exists (thus, the reason for the error is the cross-realming) self.assertIsInstance(profile, UserProfile) result = self.common_subscribe_to_streams(self.test_email, self.streams, {"principals": ujson.dumps([principal])}) self.assert_json_error(result, "User not authorized to execute queries on behalf of '%s'" % (principal,)) def helper_check_subs_before_and_after_remove(self, subscriptions, json_dict, email, new_subs): """ Check result of removing subscriptions. Unlike adding subscriptions, you can only remove subscriptions for yourself, so the result format is different. {"msg": "", "removed": ["Denmark", "Scotland", "Verona"], "not_subscribed": ["Rome"], "result": "success"} """ result = self.client.post("/json/subscriptions/remove", {"subscriptions": ujson.dumps(subscriptions)}) self.assert_json_success(result) json = ujson.loads(result.content) for key, val in json_dict.iteritems(): self.assertItemsEqual(val, json[key]) # we don't care about the order of the items new_streams = self.get_streams(email) self.assertItemsEqual(new_streams, new_subs) def test_successful_subscriptions_remove(self): """ Calling /json/subscriptions/remove should successfully remove streams, and should determine which were removed vs which weren't subscribed to. We cannot randomly generate stream names because the remove code verifies whether streams exist. """ if len(self.streams) < 2: self.fail() # necesssary for full test coverage streams_to_remove = self.streams[1:] not_subbed = [] for stream in Stream.objects.all(): if not stream.name in self.streams: not_subbed.append(stream.name) random.shuffle(not_subbed) self.assertNotEqual(len(not_subbed), 0) # necessary for full test coverage try_to_remove = not_subbed[:3] # attempt to remove up to 3 streams not already subbed to streams_to_remove.extend(try_to_remove) self.helper_check_subs_before_and_after_remove(streams_to_remove, {"removed": self.streams[1:], "not_subscribed": try_to_remove}, self.test_email, [self.streams[0]]) def test_subscriptions_remove_fake_stream(self): """ Calling /json/subscriptions/remove on a stream that doesn't exist should return a JSON error. """ random_streams = self.make_random_stream_names(self.streams) self.assertNotEqual(len(random_streams), 0) # necessary for full test coverage streams_to_remove = random_streams[:1] # pick only one fake stream, to make checking the error message easy result = self.client.post("/json/subscriptions/remove", {"subscriptions": ujson.dumps(streams_to_remove)}) self.assert_json_error(result, "Stream(s) (%s) do not exist" % (random_streams[0],)) def helper_subscriptions_exists(self, stream, exists, subscribed): """ A helper function that calls /json/subscriptions/exists on a stream and verifies that the returned JSON dictionary has the exists and subscribed values passed in as parameters. (If subscribed should not be present, pass in None.) """ result = self.client.post("/json/subscriptions/exists", {"stream": stream}) json = ujson.loads(result.content) self.assertIn("exists", json) self.assertEqual(json["exists"], exists) if exists: self.assert_json_success(result) else: self.assertEquals(result.status_code, 404) if not subscribed is None: self.assertIn("subscribed", json) self.assertEqual(json["subscribed"], subscribed) def test_successful_subscriptions_exists_subbed(self): """ Calling /json/subscriptions/exist on a stream to which you are subbed should return that it exists and that you are subbed. """ self.assertNotEqual(len(self.streams), 0) # necessary for full test coverage self.helper_subscriptions_exists(self.streams[0], True, True) def test_successful_subscriptions_exists_not_subbed(self): """ Calling /json/subscriptions/exist on a stream to which you are not subbed should return that it exists and that you are not subbed. """ all_stream_names = [stream.name for stream in Stream.objects.filter(realm=self.realm)] streams_not_subbed = list(set(all_stream_names) - set(self.streams)) self.assertNotEqual(len(streams_not_subbed), 0) # necessary for full test coverage self.helper_subscriptions_exists(streams_not_subbed[0], True, False) def test_subscriptions_does_not_exist(self): """ Calling /json/subscriptions/exist on a stream that doesn't exist should return that it doesn't exist. """ random_streams = self.make_random_stream_names(self.streams) self.assertNotEqual(len(random_streams), 0) # necessary for full test coverage self.helper_subscriptions_exists(random_streams[0], False, None) def test_subscriptions_exist_invalid_name(self): """ Calling /json/subscriptions/exist on a stream whose name is invalid (as defined by valid_stream_name in zerver/views.py) should return a JSON error. """ # currently, the only invalid stream name is the empty string invalid_stream_name = "" result = self.client.post("/json/subscriptions/exists", {"stream": invalid_stream_name}) self.assert_json_error(result, "Invalid characters in stream name") def get_subscription(self, user_profile, stream_name): stream = Stream.objects.get(realm=self.realm, name=stream_name) return Subscription.objects.get( user_profile=user_profile, recipient__type=Recipient.STREAM, recipient__type_id=stream.id, ) def test_subscriptions_add_notification_default_true(self): """ When creating a subscription, the desktop and audible notification settings for that stream are derived from the global notification settings. """ invitee = "iago@zulip.com" user_profile = get_user_profile_by_email(invitee) user_profile.enable_stream_desktop_notifications = True user_profile.enable_stream_sounds = True user_profile.save() current_stream = self.get_streams(invitee)[0] invite_streams = self.make_random_stream_names(current_stream) self.assert_adding_subscriptions_for_principal(invitee, invite_streams) subscription = self.get_subscription(user_profile, invite_streams[0]) self.assertTrue(subscription.desktop_notifications) self.assertTrue(subscription.audible_notifications) def test_subscriptions_add_notification_default_false(self): """ When creating a subscription, the desktop and audible notification settings for that stream are derived from the global notification settings. """ invitee = "iago@zulip.com" user_profile = get_user_profile_by_email(invitee) user_profile.enable_stream_desktop_notifications = False user_profile.enable_stream_sounds = False user_profile.save() current_stream = self.get_streams(invitee)[0] invite_streams = self.make_random_stream_names(current_stream) self.assert_adding_subscriptions_for_principal(invitee, invite_streams) subscription = self.get_subscription(user_profile, invite_streams[0]) self.assertFalse(subscription.desktop_notifications) self.assertFalse(subscription.audible_notifications) class GetPublicStreamsTest(AuthedTestCase): def test_public_streams(self): """ Ensure that get_public_streams successfully returns a list of streams """ email = 'hamlet@zulip.com' self.login(email) result = self.client.post("/json/get_public_streams") self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("streams", json) self.assertIsInstance(json["streams"], list) def test_public_streams_api(self): """ Ensure that get_public_streams successfully returns a list of streams """ email = 'hamlet@zulip.com' self.login(email) # Check it correctly lists the user's subs with include_public=false result = self.client.get("/api/v1/streams?include_public=false", **self.api_auth(email)) result2 = self.client.get("/api/v1/users/me/subscriptions", **self.api_auth(email)) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("streams", json) self.assertIsInstance(json["streams"], list) self.assert_json_success(result2) json2 = ujson.loads(result2.content) self.assertEqual(sorted([s["name"] for s in json["streams"]]), sorted([s["name"] for s in json2["subscriptions"]])) # Check it correctly lists all public streams with include_subscribed=false result = self.client.get("/api/v1/streams?include_public=true&include_subscribed=false", **self.api_auth(email)) self.assert_json_success(result) json = ujson.loads(result.content) all_streams = [stream.name for stream in Stream.objects.filter(realm=get_user_profile_by_email(email).realm)] self.assertEqual(sorted(s["name"] for s in json["streams"]), sorted(all_streams)) # Check non-superuser can't use include_all_active result = self.client.get("/api/v1/streams?include_all_active=true", **self.api_auth(email)) self.assertEqual(result.status_code, 400) class InviteOnlyStreamTest(AuthedTestCase): def test_must_be_subbed_to_send(self): """ If you try to send a message to an invite-only stream to which you aren't subscribed, you'll get a 400. """ self.login("hamlet@zulip.com") # Create Saxony as an invite-only stream. self.assert_json_success( self.common_subscribe_to_streams("hamlet@zulip.com", ["Saxony"], invite_only=True)) email = "cordelia@zulip.com" with self.assertRaises(JsonableError): self.send_message(email, "Saxony", Recipient.STREAM) def test_list_respects_invite_only_bit(self): """ Make sure that /api/v1/users/me/subscriptions properly returns the invite-only bit for streams that are invite-only """ email = 'hamlet@zulip.com' self.login(email) result1 = self.common_subscribe_to_streams(email, ["Saxony"], invite_only=True) self.assert_json_success(result1) result2 = self.common_subscribe_to_streams(email, ["Normandy"], invite_only=False) self.assert_json_success(result2) result = self.client.get("/api/v1/users/me/subscriptions", **self.api_auth(email)) self.assert_json_success(result) json = ujson.loads(result.content) self.assertIn("subscriptions", json) for sub in json["subscriptions"]: if sub['name'] == "Normandy": self.assertEqual(sub['invite_only'], False, "Normandy was mistakenly marked invite-only") if sub['name'] == "Saxony": self.assertEqual(sub['invite_only'], True, "Saxony was not properly marked invite-only") @slow(0.15, "lots of queries") def test_inviteonly(self): # Creating an invite-only stream is allowed email = 'hamlet@zulip.com' stream_name = "Saxony" result = self.common_subscribe_to_streams(email, [stream_name], invite_only=True) self.assert_json_success(result) json = ujson.loads(result.content) self.assertEqual(json["subscribed"], {email: [stream_name]}) self.assertEqual(json["already_subscribed"], {}) # Subscribing oneself to an invite-only stream is not allowed email = "othello@zulip.com" self.login(email) result = self.common_subscribe_to_streams(email, [stream_name]) self.assert_json_error(result, 'Unable to access stream (Saxony).') # authorization_errors_fatal=False works email = "othello@zulip.com" self.login(email) result = self.common_subscribe_to_streams(email, [stream_name], extra_post_data={'authorization_errors_fatal': ujson.dumps(False)}) self.assert_json_success(result) json = ujson.loads(result.content) self.assertEqual(json["unauthorized"], [stream_name]) self.assertEqual(json["subscribed"], {}) self.assertEqual(json["already_subscribed"], {}) # Inviting another user to an invite-only stream is allowed email = 'hamlet@zulip.com' self.login(email) result = self.common_subscribe_to_streams( email, [stream_name], extra_post_data={'principals': ujson.dumps(["othello@zulip.com"])}) self.assert_json_success(result) json = ujson.loads(result.content) self.assertEqual(json["subscribed"], {"othello@zulip.com": [stream_name]}) self.assertEqual(json["already_subscribed"], {}) # Make sure both users are subscribed to this stream result = self.client.get("/api/v1/streams/%s/members" % (stream_name,), **self.api_auth(email)) self.assert_json_success(result) json = ujson.loads(result.content) self.assertTrue('othello@zulip.com' in json['subscribers']) self.assertTrue('hamlet@zulip.com' in json['subscribers']) class GetSubscribersTest(AuthedTestCase): def setUp(self): self.email = "hamlet@zulip.com" self.user_profile = get_user_profile_by_email(self.email) self.login(self.email) def check_well_formed_result(self, result, stream_name, domain): """ A successful call to get_subscribers returns the list of subscribers in the form: {"msg": "", "result": "success", "subscribers": ["hamlet@zulip.com", "prospero@zulip.com"]} """ self.assertIn("subscribers", result) self.assertIsInstance(result["subscribers"], list) true_subscribers = [user_profile.email for user_profile in self.users_subscribed_to_stream( stream_name, domain)] self.assertItemsEqual(result["subscribers"], true_subscribers) def make_subscriber_request(self, stream_name, email=None): if email is None: email = self.email return self.client.get("/api/v1/streams/%s/members" % (stream_name,), **self.api_auth(email)) def make_successful_subscriber_request(self, stream_name): result = self.make_subscriber_request(stream_name) self.assert_json_success(result) self.check_well_formed_result(ujson.loads(result.content), stream_name, self.user_profile.realm.domain) def test_subscriber(self): """ get_subscribers returns the list of subscribers. """ stream_name = gather_subscriptions(self.user_profile)[0][0]['name'] self.make_successful_subscriber_request(stream_name) @slow(0.15, "common_subscribe_to_streams is slow") def test_gather_subscriptions(self): """ gather_subscriptions returns correct results with only 3 queries """ realm = Realm.objects.get(domain="zulip.com") streams = ["stream_%s" % i for i in xrange(10)] for stream in streams: create_stream_if_needed(realm, stream) users_to_subscribe = [self.email, "othello@zulip.com", "cordelia@zulip.com"] ret = self.common_subscribe_to_streams( self.email, streams, dict(principals=ujson.dumps(users_to_subscribe))) self.assert_json_success(ret) ret = self.common_subscribe_to_streams( self.email, ["stream_invite_only_1"], dict(principals=ujson.dumps(users_to_subscribe)), invite_only=True) self.assert_json_success(ret) with queries_captured() as queries: subscriptions = gather_subscriptions(self.user_profile) self.assertTrue(len(subscriptions[0]) >= 11) for sub in subscriptions[0]: if not sub["name"].startswith("stream_"): continue self.assertTrue(len(sub["subscribers"]) == len(users_to_subscribe)) self.assert_length(queries, 4, exact=True) @slow(0.15, "common_subscribe_to_streams is slow") def test_gather_subscriptions_mit(self): """ gather_subscriptions returns correct results with only 3 queries """ # Subscribe only ourself because invites are disabled on mit.edu users_to_subscribe = ["starnine@mit.edu", "espuser@mit.edu"] for email in users_to_subscribe: self.subscribe_to_stream(email, "mit_stream") ret = self.common_subscribe_to_streams( "starnine@mit.edu", ["mit_invite_only"], dict(principals=ujson.dumps(users_to_subscribe)), invite_only=True) self.assert_json_success(ret) with queries_captured() as queries: subscriptions = gather_subscriptions(get_user_profile_by_email("starnine@mit.edu")) self.assertTrue(len(subscriptions[0]) >= 2) for sub in subscriptions[0]: if not sub["name"].startswith("mit_"): continue if sub["name"] == "mit_invite_only": self.assertTrue(len(sub["subscribers"]) == len(users_to_subscribe)) else: self.assertTrue(len(sub["subscribers"]) == 0) self.assert_length(queries, 4, exact=True) def test_nonsubscriber(self): """ Even a non-subscriber to a public stream can query a stream's membership with get_subscribers. """ # Create a stream for which Hamlet is the only subscriber. stream_name = "Saxony" self.common_subscribe_to_streams(self.email, [stream_name]) other_email = "othello@zulip.com" # Fetch the subscriber list as a non-member. self.login(other_email) self.make_successful_subscriber_request(stream_name) def test_subscriber_private_stream(self): """ A subscriber to a private stream can query that stream's membership. """ stream_name = "Saxony" self.common_subscribe_to_streams(self.email, [stream_name], invite_only=True) self.make_successful_subscriber_request(stream_name) def test_nonsubscriber_private_stream(self): """ A non-subscriber to a private stream can't query that stream's membership. """ # Create a private stream for which Hamlet is the only subscriber. stream_name = "NewStream" self.common_subscribe_to_streams(self.email, [stream_name], invite_only=True) other_email = "othello@zulip.com" # Try to fetch the subscriber list as a non-member. result = self.make_subscriber_request(stream_name, email=other_email) self.assert_json_error(result, "Unable to retrieve subscribers for invite-only stream")
apache-2.0
nakato/AuthKit
authkit/authorize/pylons_adaptors.py
2
3015
"""Pylons specific code to facilitate using AuthKit with Pylons There is a full Pylons manual in addition to the AuthKit manual which you should read if you plan to use AuthKit with Pylons .. Note :: In addition to the authorize methods described here, you can also use the default ``authkit.authorize.middleware`` function to add WSGI middleware authorization checks to your Pylons application since Pylons has a full WSGI middleware stack. Just add the middleware to your project's ``config/middleware.py`` file. """ from decorator import decorator from pylons import request from authkit.authorize import PermissionSetupError from authkit.authorize import NotAuthenticatedError, NotAuthorizedError from authkit.authorize import authorize_request as authkit_authorize_request def authorize(permission): """ This is a decorator which can be used to decorate a Pylons controller action. It takes the permission to check as the only argument and can be used with all types of permission objects. """ def validate(func, self, *args, **kwargs): all_conf = request.environ.get('authkit.config') if all_conf is None: raise Exception('Authentication middleware not present') if all_conf.get('setup.enable', True) is True: def app(environ, start_response): return func(self, *args, **kwargs) return permission.check(app, request.environ, self.start_response) else: return func(self, *args, **kwargs) return decorator(validate) def authorize_request(permission): """ This function can be used within a controller action to ensure that no code after the function call is executed if the user doesn't pass the permission check specified by ``permission``. .. Note :: Unlike the ``authorize()`` decorator or ``authkit.authorize.middleware`` middleware, this function has no access to the WSGI response so cannot be used to check response-based permissions. Since almost all AuthKit permissions are request-based this shouldn't be a big problem unless you are defining your own advanced permission checks. """ authkit_authorize_request(request.environ, permission) def authorized(permission): """ Similar to the ``authorize_request()`` function with no access to the request but rather than raising an exception to stop the request if a permission check fails, this function simply returns ``False`` so that you can test permissions in your code without triggering a sign in. It can therefore be used in a controller action or template. Use like this:: if authorized(permission): return Response('You are authorized') else: return Response('Access denied') """ try: authorize_request(permission) except (NotAuthorizedError, NotAuthenticatedError): return False else: return True
mit
hujingguang/OpsSystem
ops_system/utils.py
1
1182
import commands import os def get_system_info(): cmd_get_cpu_sockect=r"lscpu|grep Socket|awk '{print $NF}'" cmd_get_cpu_cores=r"cat /proc/cpuinfo|grep processor|wc -l" cmd_get_mem_total=r"free -m |head -n 2|tail -n 1|awk '{print $2}'" cmd_get_mem_used=r"free -m |tail -n 2|head -n 1|awk '{print $3}'" cmd_get_users_num=r"w -h |wc -l" cmd_get_uptime=r"uptime |awk -F',' '{print $1}'" cpu_sockets=commands.getoutput(cmd_get_cpu_sockect) res=os.system('lscpu &>/dev/null') if res != 0: cpu_sockets=1 cpu_cores=commands.getoutput(cmd_get_cpu_cores) mem_total=commands.getoutput(cmd_get_mem_total) mem_used=commands.getoutput(cmd_get_mem_used) users=commands.getoutput(cmd_get_users_num) uptime=commands.getoutput(cmd_get_uptime) mem_percentage=round(float(mem_used)/float(mem_total)*100,2) mem_left_percentage=100-mem_percentage info_dict={'cpu_sockets':cpu_sockets,'cpu_cores':cpu_cores,'mem_total':mem_total,'mem_used':mem_used,'mem_percentage':mem_percentage,'users':users,'uptime':uptime,'mem_left_percentage':mem_left_percentage} return info_dict if __name__=='__main__': print get_system_info()
mit
gregdek/ansible
lib/ansible/modules/cloud/rackspace/rax_mon_entity.py
77
5795
#!/usr/bin/python # Copyright: Ansible Project # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: rax_mon_entity short_description: Create or delete a Rackspace Cloud Monitoring entity description: - Create or delete a Rackspace Cloud Monitoring entity, which represents a device to monitor. Entities associate checks and alarms with a target system and provide a convenient, centralized place to store IP addresses. Rackspace monitoring module flow | *rax_mon_entity* -> rax_mon_check -> rax_mon_notification -> rax_mon_notification_plan -> rax_mon_alarm version_added: "2.0" options: label: description: - Defines a name for this entity. Must be a non-empty string between 1 and 255 characters long. required: true state: description: - Ensure that an entity with this C(name) exists or does not exist. choices: ["present", "absent"] agent_id: description: - Rackspace monitoring agent on the target device to which this entity is bound. Necessary to collect C(agent.) rax_mon_checks against this entity. named_ip_addresses: description: - Hash of IP addresses that may be referenced by name by rax_mon_checks added to this entity. Must be a dictionary of with keys that are names between 1 and 64 characters long, and values that are valid IPv4 or IPv6 addresses. metadata: description: - Hash of arbitrary C(name), C(value) pairs that are passed to associated rax_mon_alarms. Names and values must all be between 1 and 255 characters long. author: Ash Wilson (@smashwilson) extends_documentation_fragment: rackspace.openstack ''' EXAMPLES = ''' - name: Entity example gather_facts: False hosts: local connection: local tasks: - name: Ensure an entity exists rax_mon_entity: credentials: ~/.rax_pub state: present label: my_entity named_ip_addresses: web_box: 192.0.2.4 db_box: 192.0.2.5 meta: hurf: durf register: the_entity ''' try: import pyrax HAS_PYRAX = True except ImportError: HAS_PYRAX = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.rax import rax_argument_spec, rax_required_together, setup_rax_module def cloud_monitoring(module, state, label, agent_id, named_ip_addresses, metadata): if len(label) < 1 or len(label) > 255: module.fail_json(msg='label must be between 1 and 255 characters long') changed = False cm = pyrax.cloud_monitoring if not cm: module.fail_json(msg='Failed to instantiate client. This typically ' 'indicates an invalid region or an incorrectly ' 'capitalized region name.') existing = [] for entity in cm.list_entities(): if label == entity.label: existing.append(entity) entity = None if existing: entity = existing[0] if state == 'present': should_update = False should_delete = False should_create = False if len(existing) > 1: module.fail_json(msg='%s existing entities have the label %s.' % (len(existing), label)) if entity: if named_ip_addresses and named_ip_addresses != entity.ip_addresses: should_delete = should_create = True # Change an existing Entity, unless there's nothing to do. should_update = agent_id and agent_id != entity.agent_id or \ (metadata and metadata != entity.metadata) if should_update and not should_delete: entity.update(agent_id, metadata) changed = True if should_delete: entity.delete() else: should_create = True if should_create: # Create a new Entity. entity = cm.create_entity(label=label, agent=agent_id, ip_addresses=named_ip_addresses, metadata=metadata) changed = True else: # Delete the existing Entities. for e in existing: e.delete() changed = True if entity: entity_dict = { "id": entity.id, "name": entity.name, "agent_id": entity.agent_id, } module.exit_json(changed=changed, entity=entity_dict) else: module.exit_json(changed=changed) def main(): argument_spec = rax_argument_spec() argument_spec.update( dict( state=dict(default='present', choices=['present', 'absent']), label=dict(required=True), agent_id=dict(), named_ip_addresses=dict(type='dict', default={}), metadata=dict(type='dict', default={}) ) ) module = AnsibleModule( argument_spec=argument_spec, required_together=rax_required_together() ) if not HAS_PYRAX: module.fail_json(msg='pyrax is required for this module') state = module.params.get('state') label = module.params.get('label') agent_id = module.params.get('agent_id') named_ip_addresses = module.params.get('named_ip_addresses') metadata = module.params.get('metadata') setup_rax_module(module, pyrax) cloud_monitoring(module, state, label, agent_id, named_ip_addresses, metadata) if __name__ == '__main__': main()
gpl-3.0
dbertha/odoo
addons/base_report_designer/__init__.py
421
1136
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import wizard import base_report_designer import installer import openerp_sxw2rml # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
meomancer/field-campaigner
flask_project/campaign_manager/insights_functions/feature_attribute_completeness.py
1
5735
__author__ = 'Irwan Fathurrahman <irwan@kartoza.com>' __date__ = '17/05/17' import json from campaign_manager.insights_functions._abstract_overpass_insight_function \ import AbstractOverpassInsightFunction class FeatureAttributeCompleteness(AbstractOverpassInsightFunction): function_name = "Feature completeness" tags_capitalizaition_checks = ['name'] icon = 'list' _function_good_data = None # cleaned data nodes = {} # attribute of insight function need_feature = True need_required_attributes = True def get_ui_html_file(self): """ Get ui name in templates :return: string name of html :rtype: str """ return "feature_completeness" def get_summary_html_file(self): """ Get summary name in templates :return: string name of html :rtype: str """ return "" def get_details_html_file(self): """ Get summary name in templates :return: string name of html :rtype: str """ return "" def process_data(self, raw_data): """ Get geometry of campaign. :param raw_data: Raw data that returns by function provider :type raw_data: dict :return: list good data :rtype: dict """ list_good_data = [] self._function_good_data = [] if not raw_data or not self.feature_type: return [] try: required_attributes = self.get_required_attributes() for value in raw_data: if 'tags' not in value: continue feature_key = self.feature.split('=')[0] feature_key_found_in_tags = False for key, tag in value['tags'].items(): if key == feature_key: feature_key_found_in_tags = True if not feature_key_found_in_tags: continue self._function_good_data.append(value) self.check_feature_completeness( value, required_attributes) if value['error'] == 'False': list_good_data.append(value) except KeyError: pass return list_good_data def check_capitalization(self, key, value): # Check all uppercase or lowercase if value.isupper(): return '%s value is all uppercase' % key elif value.islower(): return '%s value is all lowercase' % key # Check mixed case for index, name in enumerate(value.split()): if name[0].islower() and not self.is_string_int(name[0]): # e.g : name of Feature return '%s value is mixed case' % key return None def check_feature_completeness( self, feature_data, required_attributes): """Check feature completeness. :param feature_data: Feature data :type feature_data: dict :param required_attributes: Required attributes :type required_attributes: dict """ warning_message = [] error_message = [] tags = feature_data['tags'] if isinstance(required_attributes, str): return for required_attribute, survey_values in required_attributes.items(): if required_attribute not in tags: error_message.append( '%s not found' % required_attribute) else: value_in_tag = tags[required_attribute] if survey_values: if value_in_tag not in survey_values: error_message.append( '%s is not allowed as value %s' % (value_in_tag, required_attribute)) if required_attribute in self.tags_capitalizaition_checks: warning = self.check_capitalization( required_attribute, value_in_tag) if warning: warning_message.append(warning) feature_data['error'] = 'False' feature_data['warning'] = 'False' if warning_message or error_message: feature_data['error'] = 'True' if not error_message and warning_message: feature_data['warning'] = 'True' feature_data['error_message'] = ', '.join(error_message) feature_data['warning_message'] = ', '.join(warning_message) feature_data['completeness'] = \ (len(error_message) / len(required_attribute)) * 100 def is_string_int(self, text): """Check whether the text is int or not.""" try: int(text) except ValueError: return False return True def post_process_data(self, data): """Process data regarding output. This needed for processing data for counting or grouping. :param data: Data that received from open street map :type data: dict :return: Processed data :rtype: dict """ percentage = '0.0' if len(self._function_good_data) > 0: percentage = '%.1f' % ( (len(data) / len(self._function_good_data)) * 100 ) output = { 'attributes': self.get_required_attributes(), 'data': self._function_good_data, 'percentage': percentage, 'complete': len(data), 'total': len(self._function_good_data), 'last_update': self.last_update, 'updating': self.is_updating, 'raw_data': self.get_function_raw_data() } return output
bsd-3-clause
commaai/openpilot
selfdrive/debug/disable_ecu.py
2
1433
#!/usr/bin/env python3 import traceback import cereal.messaging as messaging from selfdrive.car.isotp_parallel_query import IsoTpParallelQuery from selfdrive.swaglog import cloudlog EXT_DIAG_REQUEST = b'\x10\x03' EXT_DIAG_RESPONSE = b'\x50\x03' COM_CONT_REQUEST = b'\x28\x83\x03' COM_CONT_RESPONSE = b'' def disable_ecu(ecu_addr, logcan, sendcan, bus, timeout=0.1, retry=5, debug=False): print(f"ecu disable {hex(ecu_addr)} ...") for i in range(retry): try: # enter extended diagnostic session query = IsoTpParallelQuery(sendcan, logcan, bus, [ecu_addr], [EXT_DIAG_REQUEST], [EXT_DIAG_RESPONSE], debug=debug) for addr, dat in query.get_data(timeout).items(): # pylint: disable=unused-variable print("ecu communication control disable tx/rx ...") # communication control disable tx and rx query = IsoTpParallelQuery(sendcan, logcan, bus, [ecu_addr], [COM_CONT_REQUEST], [COM_CONT_RESPONSE], debug=debug) query.get_data(0) return True print(f"ecu disable retry ({i+1}) ...") except Exception: cloudlog.warning(f"ecu disable exception: {traceback.format_exc()}") return False if __name__ == "__main__": import time sendcan = messaging.pub_sock('sendcan') logcan = messaging.sub_sock('can') time.sleep(1) # honda bosch radar disable disabled = disable_ecu(0x18DAB0F1, logcan, sendcan, 1, debug=False) print(f"disabled: {disabled}")
mit
jseabold/statsmodels
statsmodels/tools/tests/test_transform_model.py
5
1784
# -*- coding: utf-8 -*- """ Created on Tue May 27 13:26:01 2014 Author: Josef Perktold License: BSD-3 """ import numpy as np from numpy.testing import assert_allclose, assert_equal from scipy import stats from statsmodels.regression.linear_model import OLS from statsmodels.tools.transform_model import StandardizeTransform def test_standardize1(): np.random.seed(123) x = 1 + np.random.randn(5, 4) transf = StandardizeTransform(x) xs1 = transf(x) assert_allclose(transf.mean, x.mean(0), rtol=1e-13) assert_allclose(transf.scale, x.std(0, ddof=1), rtol=1e-13) xs2 = stats.zscore(x, ddof=1) assert_allclose(xs1, xs2, rtol=1e-13, atol=1e-20) # check we use stored transformation xs4 = transf(2 * x) assert_allclose(xs4, (2*x - transf.mean) / transf.scale, rtol=1e-13, atol=1e-20) # affine transform does not change standardized x2 = 2 * x + np.random.randn(4) transf2 = StandardizeTransform(x2) xs3 = transf2(x2) assert_allclose(xs3, xs1, rtol=1e-13, atol=1e-20) # check constant x5 = np.column_stack((np.ones(x.shape[0]), x)) transf5 = StandardizeTransform(x5) xs5 = transf5(x5) assert_equal(transf5.const_idx, 0) assert_equal(xs5[:, 0], np.ones(x.shape[0])) assert_allclose(xs5[:, 1:], xs1, rtol=1e-13, atol=1e-20) def test_standardize_ols(): np.random.seed(123) nobs = 20 x = 1 + np.random.randn(nobs, 4) exog = np.column_stack((np.ones(nobs), x)) endog = exog.sum(1) + np.random.randn(nobs) res2 = OLS(endog, exog).fit() transf = StandardizeTransform(exog) exog_st = transf(exog) res1 = OLS(endog, exog_st).fit() params = transf.transform_params(res1.params) assert_allclose(params, res2.params, rtol=1e-13)
bsd-3-clause
Connor-R/nba_shot_charts
processing/shots_Relative_Career.py
1
3244
import requests import urllib import csv import os import sys from time import time from py_data_getter import data_getter from py_db import db db = db('nba_shots') def initiate(): print "-------------------------" print "shots_Relative_Career.py" start_time = time() process() end_time = time() elapsed_time = float(end_time - start_time) print "time elapsed (in seconds): " + str(elapsed_time) print "time elapsed (in minutes): " + str(elapsed_time/60.0) print "shots_Relative_Career.py" print "-------------------------" def process(): for _type in ('Player', 'Team'): print '\t' + _type if _type == 'Player': _join = 'JOIN players USING (player_id)\n' _career = 'CONCAT(GREATEST(1996, from_year),to_year)' else: _join = '' _career = "'1'" query = """SELECT %s_id, %s AS career, season_type, c.shot_zone_basic, c.shot_zone_area, all_games AS games, SUM(a.attempts) AS attempts, IFNULL(100*(SUM(b.attempts*b.zone_pct/c.zone_pct)/SUM(b.attempts)),0) AS zone_pct_plus, IFNULL(100*(SUM(b.attempts*b.efg/c.efg)/SUM(b.attempts)),0) AS ZONE_efg_plus, IFNULL(SUM(ZONE_paa),0) as ZONE_paa, IFNULL(SUM(ZONE_paa)/all_games,0) as ZONE_paa_per_game, IFNULL(100*(SUM(b.attempts*b.efg/d.efg)/SUM(b.attempts)),0) AS efg_plus, IFNULL(SUM(paa),0) AS paa, IFNULL(SUM(paa)/all_games,0) AS paa_per_game, IFNULL(SUM(par),0) AS par, IFNULL(SUM(par)/all_games,0) AS par_per_game FROM shots_%s_Relative_Year a %sJOIN shots_%s_Distribution_Year b USING (%s_id, season_id, season_type, shot_zone_basic, shot_zone_area) JOIN shots_League_Distribution_Year c USING (season_id, season_type, shot_zone_basic, shot_zone_area) JOIN shots_League_Distribution_Year d USING (season_id, season_type) JOIN( SELECT %s_id, season_type, SUM(games) AS all_games FROM shots_%s_Breakdown WHERE shot_zone_basic = 'all' AND shot_zone_area = 'all' GROUP BY %s_id, season_type ) g USING (%s_id, season_type) WHERE d.shot_zone_basic = 'all' AND d.shot_zone_area = 'all' GROUP BY %s_id, season_type, shot_zone_basic, shot_zone_area """ q = query % (_type, _career, _type, _join, _type, _type, _type, _type, _type, _type, _type) # raw_input(q) res = db.query(q) entries = [] _id = '%s_id' % (_type.lower()) for row in res: type_id, season_id, season_type, z_basic, z_area, games, attempts, z_plus, ZONE_efg, ZONE_paa, ZONE_paag, efg, paa, paag, par, parg = row entry = {_id:type_id, "season_id":season_id, "season_type":season_type, "shot_zone_basic":z_basic, "shot_zone_area":z_area, "games":games, "attempts":attempts, "zone_pct_plus":z_plus, "ZONE_efg_plus":ZONE_efg, "ZONE_paa":ZONE_paa, "ZONE_paa_per_game":ZONE_paag, "efg_plus":efg, "paa":paa, "paa_per_game":paag, "par":par, "par_per_game":parg} entries.append(entry) table = "shots_%s_Relative_Career" % (_type) if entries != []: for i in range(0, len(entries), 1000): db.insertRowDict(entries[i: i + 1000], table, insertMany=True, replace=True, rid=0,debug=1) db.conn.commit() if __name__ == "__main__": initiate()
mit
qvicksilver/ansible
lib/ansible/runner/lookup_plugins/together.py
174
2135
# (c) 2013, Bradley Young <young.bradley@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. import ansible.utils as utils from ansible.utils import safe_eval import ansible.errors as errors from itertools import izip_longest def flatten(terms): ret = [] for term in terms: if isinstance(term, list): ret.extend(term) elif isinstance(term, tuple): ret.extend(term) else: ret.append(term) return ret class LookupModule(object): """ Transpose a list of arrays: [1, 2, 3], [4, 5, 6] -> [1, 4], [2, 5], [3, 6] Replace any empty spots in 2nd array with None: [1, 2], [3] -> [1, 3], [2, None] """ def __init__(self, basedir=None, **kwargs): self.basedir = basedir def __lookup_injects(self, terms, inject): results = [] for x in terms: intermediate = utils.listify_lookup_plugin_terms(x, self.basedir, inject) results.append(intermediate) return results def run(self, terms, inject=None, **kwargs): # this code is common with 'items.py' consider moving to utils if we need it again terms = utils.listify_lookup_plugin_terms(terms, self.basedir, inject) terms = self.__lookup_injects(terms, inject) my_list = terms[:] if len(my_list) == 0: raise errors.AnsibleError("with_together requires at least one element in each list") return [flatten(x) for x in izip_longest(*my_list, fillvalue=None)]
gpl-3.0
paran0ids0ul/pupy
pupy/packages/windows/x86/psutil/_psbsd.py
72
15012
#!/usr/bin/env python # Copyright (c) 2009, Giampaolo Rodola'. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """FreeBSD platform implementation.""" import errno import functools import os import xml.etree.ElementTree as ET from collections import namedtuple from . import _common from . import _psposix from . import _psutil_bsd as cext from . import _psutil_posix as cext_posix from ._common import conn_tmap, usage_percent, sockfam_to_enum from ._common import socktype_to_enum __extra__all__ = [] # --- constants PROC_STATUSES = { cext.SSTOP: _common.STATUS_STOPPED, cext.SSLEEP: _common.STATUS_SLEEPING, cext.SRUN: _common.STATUS_RUNNING, cext.SIDL: _common.STATUS_IDLE, cext.SWAIT: _common.STATUS_WAITING, cext.SLOCK: _common.STATUS_LOCKED, cext.SZOMB: _common.STATUS_ZOMBIE, } TCP_STATUSES = { cext.TCPS_ESTABLISHED: _common.CONN_ESTABLISHED, cext.TCPS_SYN_SENT: _common.CONN_SYN_SENT, cext.TCPS_SYN_RECEIVED: _common.CONN_SYN_RECV, cext.TCPS_FIN_WAIT_1: _common.CONN_FIN_WAIT1, cext.TCPS_FIN_WAIT_2: _common.CONN_FIN_WAIT2, cext.TCPS_TIME_WAIT: _common.CONN_TIME_WAIT, cext.TCPS_CLOSED: _common.CONN_CLOSE, cext.TCPS_CLOSE_WAIT: _common.CONN_CLOSE_WAIT, cext.TCPS_LAST_ACK: _common.CONN_LAST_ACK, cext.TCPS_LISTEN: _common.CONN_LISTEN, cext.TCPS_CLOSING: _common.CONN_CLOSING, cext.PSUTIL_CONN_NONE: _common.CONN_NONE, } PAGESIZE = os.sysconf("SC_PAGE_SIZE") AF_LINK = cext_posix.AF_LINK # extend base mem ntuple with BSD-specific memory metrics svmem = namedtuple( 'svmem', ['total', 'available', 'percent', 'used', 'free', 'active', 'inactive', 'buffers', 'cached', 'shared', 'wired']) scputimes = namedtuple( 'scputimes', ['user', 'nice', 'system', 'idle', 'irq']) pextmem = namedtuple('pextmem', ['rss', 'vms', 'text', 'data', 'stack']) pmmap_grouped = namedtuple( 'pmmap_grouped', 'path rss, private, ref_count, shadow_count') pmmap_ext = namedtuple( 'pmmap_ext', 'addr, perms path rss, private, ref_count, shadow_count') # set later from __init__.py NoSuchProcess = None ZombieProcess = None AccessDenied = None TimeoutExpired = None def virtual_memory(): """System virtual memory as a namedtuple.""" mem = cext.virtual_mem() total, free, active, inactive, wired, cached, buffers, shared = mem avail = inactive + cached + free used = active + wired + cached percent = usage_percent((total - avail), total, _round=1) return svmem(total, avail, percent, used, free, active, inactive, buffers, cached, shared, wired) def swap_memory(): """System swap memory as (total, used, free, sin, sout) namedtuple.""" total, used, free, sin, sout = [x * PAGESIZE for x in cext.swap_mem()] percent = usage_percent(used, total, _round=1) return _common.sswap(total, used, free, percent, sin, sout) def cpu_times(): """Return system per-CPU times as a namedtuple""" user, nice, system, idle, irq = cext.cpu_times() return scputimes(user, nice, system, idle, irq) if hasattr(cext, "per_cpu_times"): def per_cpu_times(): """Return system CPU times as a namedtuple""" ret = [] for cpu_t in cext.per_cpu_times(): user, nice, system, idle, irq = cpu_t item = scputimes(user, nice, system, idle, irq) ret.append(item) return ret else: # XXX # Ok, this is very dirty. # On FreeBSD < 8 we cannot gather per-cpu information, see: # https://github.com/giampaolo/psutil/issues/226 # If num cpus > 1, on first call we return single cpu times to avoid a # crash at psutil import time. # Next calls will fail with NotImplementedError def per_cpu_times(): if cpu_count_logical() == 1: return [cpu_times()] if per_cpu_times.__called__: raise NotImplementedError("supported only starting from FreeBSD 8") per_cpu_times.__called__ = True return [cpu_times()] per_cpu_times.__called__ = False def cpu_count_logical(): """Return the number of logical CPUs in the system.""" return cext.cpu_count_logical() def cpu_count_physical(): """Return the number of physical CPUs in the system.""" # From the C module we'll get an XML string similar to this: # http://manpages.ubuntu.com/manpages/precise/man4/smp.4freebsd.html # We may get None in case "sysctl kern.sched.topology_spec" # is not supported on this BSD version, in which case we'll mimic # os.cpu_count() and return None. ret = None s = cext.cpu_count_phys() if s is not None: # get rid of padding chars appended at the end of the string index = s.rfind("</groups>") if index != -1: s = s[:index + 9] root = ET.fromstring(s) try: ret = len(root.findall('group/children/group/cpu')) or None finally: # needed otherwise it will memleak root.clear() if not ret: # If logical CPUs are 1 it's obvious we'll have only 1 # physical CPU. if cpu_count_logical() == 1: return 1 return ret def boot_time(): """The system boot time expressed in seconds since the epoch.""" return cext.boot_time() def disk_partitions(all=False): retlist = [] partitions = cext.disk_partitions() for partition in partitions: device, mountpoint, fstype, opts = partition if device == 'none': device = '' if not all: if not os.path.isabs(device) or not os.path.exists(device): continue ntuple = _common.sdiskpart(device, mountpoint, fstype, opts) retlist.append(ntuple) return retlist def users(): retlist = [] rawlist = cext.users() for item in rawlist: user, tty, hostname, tstamp = item if tty == '~': continue # reboot or shutdown nt = _common.suser(user, tty or None, hostname, tstamp) retlist.append(nt) return retlist def net_connections(kind): if kind not in _common.conn_tmap: raise ValueError("invalid %r kind argument; choose between %s" % (kind, ', '.join([repr(x) for x in conn_tmap]))) families, types = conn_tmap[kind] ret = set() rawlist = cext.net_connections() for item in rawlist: fd, fam, type, laddr, raddr, status, pid = item # TODO: apply filter at C level if fam in families and type in types: try: status = TCP_STATUSES[status] except KeyError: # XXX: Not sure why this happens. I saw this occurring # with IPv6 sockets opened by 'vim'. Those sockets # have a very short lifetime so maybe the kernel # can't initialize their status? status = TCP_STATUSES[cext.PSUTIL_CONN_NONE] fam = sockfam_to_enum(fam) type = socktype_to_enum(type) nt = _common.sconn(fd, fam, type, laddr, raddr, status, pid) ret.add(nt) return list(ret) def net_if_stats(): """Get NIC stats (isup, duplex, speed, mtu).""" names = net_io_counters().keys() ret = {} for name in names: isup, duplex, speed, mtu = cext_posix.net_if_stats(name) if hasattr(_common, 'NicDuplex'): duplex = _common.NicDuplex(duplex) ret[name] = _common.snicstats(isup, duplex, speed, mtu) return ret pids = cext.pids pid_exists = _psposix.pid_exists disk_usage = _psposix.disk_usage net_io_counters = cext.net_io_counters disk_io_counters = cext.disk_io_counters net_if_addrs = cext_posix.net_if_addrs def wrap_exceptions(fun): """Decorator which translates bare OSError exceptions into NoSuchProcess and AccessDenied. """ @functools.wraps(fun) def wrapper(self, *args, **kwargs): try: return fun(self, *args, **kwargs) except OSError as err: # support for private module import if (NoSuchProcess is None or AccessDenied is None or ZombieProcess is None): raise if err.errno == errno.ESRCH: if not pid_exists(self.pid): raise NoSuchProcess(self.pid, self._name) else: raise ZombieProcess(self.pid, self._name, self._ppid) if err.errno in (errno.EPERM, errno.EACCES): raise AccessDenied(self.pid, self._name) raise return wrapper class Process(object): """Wrapper class around underlying C implementation.""" __slots__ = ["pid", "_name", "_ppid"] def __init__(self, pid): self.pid = pid self._name = None self._ppid = None @wrap_exceptions def name(self): return cext.proc_name(self.pid) @wrap_exceptions def exe(self): return cext.proc_exe(self.pid) @wrap_exceptions def cmdline(self): return cext.proc_cmdline(self.pid) @wrap_exceptions def terminal(self): tty_nr = cext.proc_tty_nr(self.pid) tmap = _psposix._get_terminal_map() try: return tmap[tty_nr] except KeyError: return None @wrap_exceptions def ppid(self): return cext.proc_ppid(self.pid) @wrap_exceptions def uids(self): real, effective, saved = cext.proc_uids(self.pid) return _common.puids(real, effective, saved) @wrap_exceptions def gids(self): real, effective, saved = cext.proc_gids(self.pid) return _common.pgids(real, effective, saved) @wrap_exceptions def cpu_times(self): user, system = cext.proc_cpu_times(self.pid) return _common.pcputimes(user, system) @wrap_exceptions def memory_info(self): rss, vms = cext.proc_memory_info(self.pid)[:2] return _common.pmem(rss, vms) @wrap_exceptions def memory_info_ex(self): return pextmem(*cext.proc_memory_info(self.pid)) @wrap_exceptions def create_time(self): return cext.proc_create_time(self.pid) @wrap_exceptions def num_threads(self): return cext.proc_num_threads(self.pid) @wrap_exceptions def num_ctx_switches(self): return _common.pctxsw(*cext.proc_num_ctx_switches(self.pid)) @wrap_exceptions def threads(self): rawlist = cext.proc_threads(self.pid) retlist = [] for thread_id, utime, stime in rawlist: ntuple = _common.pthread(thread_id, utime, stime) retlist.append(ntuple) return retlist @wrap_exceptions def connections(self, kind='inet'): if kind not in conn_tmap: raise ValueError("invalid %r kind argument; choose between %s" % (kind, ', '.join([repr(x) for x in conn_tmap]))) families, types = conn_tmap[kind] rawlist = cext.proc_connections(self.pid, families, types) ret = [] for item in rawlist: fd, fam, type, laddr, raddr, status = item fam = sockfam_to_enum(fam) type = socktype_to_enum(type) status = TCP_STATUSES[status] nt = _common.pconn(fd, fam, type, laddr, raddr, status) ret.append(nt) return ret @wrap_exceptions def wait(self, timeout=None): try: return _psposix.wait_pid(self.pid, timeout) except _psposix.TimeoutExpired: # support for private module import if TimeoutExpired is None: raise raise TimeoutExpired(timeout, self.pid, self._name) @wrap_exceptions def nice_get(self): return cext_posix.getpriority(self.pid) @wrap_exceptions def nice_set(self, value): return cext_posix.setpriority(self.pid, value) @wrap_exceptions def status(self): code = cext.proc_status(self.pid) if code in PROC_STATUSES: return PROC_STATUSES[code] # XXX is this legit? will we even ever get here? return "?" @wrap_exceptions def io_counters(self): rc, wc, rb, wb = cext.proc_io_counters(self.pid) return _common.pio(rc, wc, rb, wb) nt_mmap_grouped = namedtuple( 'mmap', 'path rss, private, ref_count, shadow_count') nt_mmap_ext = namedtuple( 'mmap', 'addr, perms path rss, private, ref_count, shadow_count') # FreeBSD < 8 does not support functions based on kinfo_getfile() # and kinfo_getvmmap() if hasattr(cext, 'proc_open_files'): @wrap_exceptions def open_files(self): """Return files opened by process as a list of namedtuples.""" rawlist = cext.proc_open_files(self.pid) return [_common.popenfile(path, fd) for path, fd in rawlist] @wrap_exceptions def cwd(self): """Return process current working directory.""" # sometimes we get an empty string, in which case we turn # it into None return cext.proc_cwd(self.pid) or None @wrap_exceptions def memory_maps(self): return cext.proc_memory_maps(self.pid) @wrap_exceptions def num_fds(self): """Return the number of file descriptors opened by this process.""" return cext.proc_num_fds(self.pid) else: def _not_implemented(self): raise NotImplementedError("supported only starting from FreeBSD 8") open_files = _not_implemented proc_cwd = _not_implemented memory_maps = _not_implemented num_fds = _not_implemented @wrap_exceptions def cpu_affinity_get(self): return cext.proc_cpu_affinity_get(self.pid) @wrap_exceptions def cpu_affinity_set(self, cpus): # Pre-emptively check if CPUs are valid because the C # function has a weird behavior in case of invalid CPUs, # see: https://github.com/giampaolo/psutil/issues/586 allcpus = tuple(range(len(per_cpu_times()))) for cpu in cpus: if cpu not in allcpus: raise ValueError("invalid CPU #%i (choose between %s)" % (cpu, allcpus)) try: cext.proc_cpu_affinity_set(self.pid, cpus) except OSError as err: # 'man cpuset_setaffinity' about EDEADLK: # <<the call would leave a thread without a valid CPU to run # on because the set does not overlap with the thread's # anonymous mask>> if err.errno in (errno.EINVAL, errno.EDEADLK): for cpu in cpus: if cpu not in allcpus: raise ValueError("invalid CPU #%i (choose between %s)" % (cpu, allcpus)) raise
bsd-3-clause
txm/make-good
django/contrib/gis/utils/ogrinspect.py
321
8939
""" This module is for inspecting OGR data sources and generating either models for GeoDjango and/or mapping dictionaries for use with the `LayerMapping` utility. Author: Travis Pinney, Dane Springmeyer, & Justin Bronn """ from itertools import izip # Requires GDAL to use. from django.contrib.gis.gdal import DataSource from django.contrib.gis.gdal.field import OFTDate, OFTDateTime, OFTInteger, OFTReal, OFTString, OFTTime def mapping(data_source, geom_name='geom', layer_key=0, multi_geom=False): """ Given a DataSource, generates a dictionary that may be used for invoking the LayerMapping utility. Keyword Arguments: `geom_name` => The name of the geometry field to use for the model. `layer_key` => The key for specifying which layer in the DataSource to use; defaults to 0 (the first layer). May be an integer index or a string identifier for the layer. `multi_geom` => Boolean (default: False) - specify as multigeometry. """ if isinstance(data_source, basestring): # Instantiating the DataSource from the string. data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise TypeError('Data source parameter must be a string or a DataSource object.') # Creating the dictionary. _mapping = {} # Generating the field name for each field in the layer. for field in data_source[layer_key].fields: mfield = field.lower() if mfield[-1:] == '_': mfield += 'field' _mapping[mfield] = field gtype = data_source[layer_key].geom_type if multi_geom and gtype.num in (1, 2, 3): prefix = 'MULTI' else: prefix = '' _mapping[geom_name] = prefix + str(gtype).upper() return _mapping def ogrinspect(*args, **kwargs): """ Given a data source (either a string or a DataSource object) and a string model name this function will generate a GeoDjango model. Usage: >>> from django.contrib.gis.utils import ogrinspect >>> ogrinspect('/path/to/shapefile.shp','NewModel') ...will print model definition to stout or put this in a python script and use to redirect the output to a new model like: $ python generate_model.py > myapp/models.py # generate_model.py from django.contrib.gis.utils import ogrinspect shp_file = 'data/mapping_hacks/world_borders.shp' model_name = 'WorldBorders' print ogrinspect(shp_file, model_name, multi_geom=True, srid=4326, geom_name='shapes', blank=True) Required Arguments `datasource` => string or DataSource object to file pointer `model name` => string of name of new model class to create Optional Keyword Arguments `geom_name` => For specifying the model name for the Geometry Field. Otherwise will default to `geom` `layer_key` => The key for specifying which layer in the DataSource to use; defaults to 0 (the first layer). May be an integer index or a string identifier for the layer. `srid` => The SRID to use for the Geometry Field. If it can be determined, the SRID of the datasource is used. `multi_geom` => Boolean (default: False) - specify as multigeometry. `name_field` => String - specifies a field name to return for the `__unicode__` function (which will be generated if specified). `imports` => Boolean (default: True) - set to False to omit the `from django.contrib.gis.db import models` code from the autogenerated models thus avoiding duplicated imports when building more than one model by batching ogrinspect() `decimal` => Boolean or sequence (default: False). When set to True all generated model fields corresponding to the `OFTReal` type will be `DecimalField` instead of `FloatField`. A sequence of specific field names to generate as `DecimalField` may also be used. `blank` => Boolean or sequence (default: False). When set to True all generated model fields will have `blank=True`. If the user wants to give specific fields to have blank, then a list/tuple of OGR field names may be used. `null` => Boolean (default: False) - When set to True all generated model fields will have `null=True`. If the user wants to specify give specific fields to have null, then a list/tuple of OGR field names may be used. Note: This routine calls the _ogrinspect() helper to do the heavy lifting. """ return '\n'.join(s for s in _ogrinspect(*args, **kwargs)) def _ogrinspect(data_source, model_name, geom_name='geom', layer_key=0, srid=None, multi_geom=False, name_field=None, imports=True, decimal=False, blank=False, null=False): """ Helper routine for `ogrinspect` that generates GeoDjango models corresponding to the given data source. See the `ogrinspect` docstring for more details. """ # Getting the DataSource if isinstance(data_source, str): data_source = DataSource(data_source) elif isinstance(data_source, DataSource): pass else: raise TypeError('Data source parameter must be a string or a DataSource object.') # Getting the layer corresponding to the layer key and getting # a string listing of all OGR fields in the Layer. layer = data_source[layer_key] ogr_fields = layer.fields # Creating lists from the `null`, `blank`, and `decimal` # keyword arguments. def process_kwarg(kwarg): if isinstance(kwarg, (list, tuple)): return [s.lower() for s in kwarg] elif kwarg: return [s.lower() for s in ogr_fields] else: return [] null_fields = process_kwarg(null) blank_fields = process_kwarg(blank) decimal_fields = process_kwarg(decimal) # Gets the `null` and `blank` keywords for the given field name. def get_kwargs_str(field_name): kwlist = [] if field_name.lower() in null_fields: kwlist.append('null=True') if field_name.lower() in blank_fields: kwlist.append('blank=True') if kwlist: return ', ' + ', '.join(kwlist) else: return '' # For those wishing to disable the imports. if imports: yield '# This is an auto-generated Django model module created by ogrinspect.' yield 'from django.contrib.gis.db import models' yield '' yield 'class %s(models.Model):' % model_name for field_name, width, precision, field_type in izip(ogr_fields, layer.field_widths, layer.field_precisions, layer.field_types): # The model field name. mfield = field_name.lower() if mfield[-1:] == '_': mfield += 'field' # Getting the keyword args string. kwargs_str = get_kwargs_str(field_name) if field_type is OFTReal: # By default OFTReals are mapped to `FloatField`, however, they # may also be mapped to `DecimalField` if specified in the # `decimal` keyword. if field_name.lower() in decimal_fields: yield ' %s = models.DecimalField(max_digits=%d, decimal_places=%d%s)' % (mfield, width, precision, kwargs_str) else: yield ' %s = models.FloatField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTInteger: yield ' %s = models.IntegerField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTString: yield ' %s = models.CharField(max_length=%s%s)' % (mfield, width, kwargs_str) elif field_type is OFTDate: yield ' %s = models.DateField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTDateTime: yield ' %s = models.DateTimeField(%s)' % (mfield, kwargs_str[2:]) elif field_type is OFTDate: yield ' %s = models.TimeField(%s)' % (mfield, kwargs_str[2:]) else: raise TypeError('Unknown field type %s in %s' % (field_type, mfield)) # TODO: Autodetection of multigeometry types (see #7218). gtype = layer.geom_type if multi_geom and gtype.num in (1, 2, 3): geom_field = 'Multi%s' % gtype.django else: geom_field = gtype.django # Setting up the SRID keyword string. if srid is None: if layer.srs is None: srid_str = 'srid=-1' else: srid = layer.srs.srid if srid is None: srid_str = 'srid=-1' elif srid == 4326: # WGS84 is already the default. srid_str = '' else: srid_str = 'srid=%s' % srid else: srid_str = 'srid=%s' % srid yield ' %s = models.%s(%s)' % (geom_name, geom_field, srid_str) yield ' objects = models.GeoManager()' if name_field: yield '' yield ' def __unicode__(self): return self.%s' % name_field
bsd-3-clause
bradleyayers/django-attest
django_attest/runner.py
1
1875
# coding: utf-8 __all__ = () try: # Django 1.2 doesn't have this from django.utils import unittest except ImportError: pass else: from attest import capture_output, TestFailure, TestResult, get_reporter_by_name from django.test.simple import DjangoTestSuiteRunner __all__ += ("Runner", ) class DummyProgress(object): start = update = finish = lambda *a, **k: None class TextResult(unittest.TextTestResult): def _exc_info_to_string(self, err, test): if err[0] is TestFailure: # retrieve stdout/stderr if self.buffer: stdout = sys.stdout.getvalue().splitlines() stderr = sys.stderr.getvalue().splitlines() else: stdout, stderr = [], [] # pull the test function out of the TestCase. test_func = getattr(test, test._testMethodName) result = TestResult(test=test_func, exc_info=err, time=0, error=err[1], stdout=stdout, stderr=stderr) reporter = get_reporter_by_name('auto')() reporter.begin(()) reporter.progress = DummyProgress() reporter.failure(result) with capture_output() as (out, err): try: reporter.finished() except SystemExit: pass return '\n'.join(out + err) return super(TextResult, self)._exc_info_to_string(err, test) class TextRunner(unittest.TextTestRunner): resultclass = TextResult class Runner(DjangoTestSuiteRunner): def run_suite(self, suite, **kwargs): runner = TextRunner(verbosity=self.verbosity, failfast=self.failfast) return runner.run(suite)
bsd-2-clause
tushar7795/MicroBlog
flask/lib/python2.7/site-packages/whoosh/lang/snowball/italian.py
96
9125
from .bases import _StandardStemmer from whoosh.compat import u class ItalianStemmer(_StandardStemmer): """ The Italian Snowball stemmer. :cvar __vowels: The Italian vowels. :type __vowels: unicode :cvar __step0_suffixes: Suffixes to be deleted in step 0 of the algorithm. :type __step0_suffixes: tuple :cvar __step1_suffixes: Suffixes to be deleted in step 1 of the algorithm. :type __step1_suffixes: tuple :cvar __step2_suffixes: Suffixes to be deleted in step 2 of the algorithm. :type __step2_suffixes: tuple :note: A detailed description of the Italian stemming algorithm can be found under http://snowball.tartarus.org/algorithms/italian/stemmer.html """ __vowels = u("aeiou\xE0\xE8\xEC\xF2\xF9") __step0_suffixes = ('gliela', 'gliele', 'glieli', 'glielo', 'gliene', 'sene', 'mela', 'mele', 'meli', 'melo', 'mene', 'tela', 'tele', 'teli', 'telo', 'tene', 'cela', 'cele', 'celi', 'celo', 'cene', 'vela', 'vele', 'veli', 'velo', 'vene', 'gli', 'ci', 'la', 'le', 'li', 'lo', 'mi', 'ne', 'si', 'ti', 'vi') __step1_suffixes = ('atrice', 'atrici', 'azione', 'azioni', 'uzione', 'uzioni', 'usione', 'usioni', 'amento', 'amenti', 'imento', 'imenti', 'amente', 'abile', 'abili', 'ibile', 'ibili', 'mente', 'atore', 'atori', 'logia', 'logie', 'anza', 'anze', 'iche', 'ichi', 'ismo', 'ismi', 'ista', 'iste', 'isti', u('ist\xE0'), u('ist\xE8'), u('ist\xEC'), 'ante', 'anti', 'enza', 'enze', 'ico', 'ici', 'ica', 'ice', 'oso', 'osi', 'osa', 'ose', u('it\xE0'), 'ivo', 'ivi', 'iva', 'ive') __step2_suffixes = ('erebbero', 'irebbero', 'assero', 'assimo', 'eranno', 'erebbe', 'eremmo', 'ereste', 'eresti', 'essero', 'iranno', 'irebbe', 'iremmo', 'ireste', 'iresti', 'iscano', 'iscono', 'issero', 'arono', 'avamo', 'avano', 'avate', 'eremo', 'erete', 'erono', 'evamo', 'evano', 'evate', 'iremo', 'irete', 'irono', 'ivamo', 'ivano', 'ivate', 'ammo', 'ando', 'asse', 'assi', 'emmo', 'enda', 'ende', 'endi', 'endo', 'erai', 'erei', 'Yamo', 'iamo', 'immo', 'irai', 'irei', 'isca', 'isce', 'isci', 'isco', 'ano', 'are', 'ata', 'ate', 'ati', 'ato', 'ava', 'avi', 'avo', u('er\xE0'), 'ere', u('er\xF2'), 'ete', 'eva', 'evi', 'evo', u('ir\xE0'), 'ire', u('ir\xF2'), 'ita', 'ite', 'iti', 'ito', 'iva', 'ivi', 'ivo', 'ono', 'uta', 'ute', 'uti', 'uto', 'ar', 'ir') def stem(self, word): """ Stem an Italian word and return the stemmed form. :param word: The word that is stemmed. :type word: str or unicode :return: The stemmed form. :rtype: unicode """ word = word.lower() step1_success = False # All acute accents are replaced by grave accents. word = (word.replace(u("\xE1"), u("\xE0")) .replace(u("\xE9"), u("\xE8")) .replace(u("\xED"), u("\xEC")) .replace(u("\xF3"), u("\xF2")) .replace(u("\xFA"), u("\xF9"))) # Every occurrence of 'u' after 'q' # is put into upper case. for i in range(1, len(word)): if word[i - 1] == "q" and word[i] == "u": word = "".join((word[:i], "U", word[i + 1:])) # Every occurrence of 'u' and 'i' # between vowels is put into upper case. for i in range(1, len(word) - 1): if word[i - 1] in self.__vowels and word[i + 1] in self.__vowels: if word[i] == "u": word = "".join((word[:i], "U", word[i + 1:])) elif word[i] == "i": word = "".join((word[:i], "I", word[i + 1:])) r1, r2 = self._r1r2_standard(word, self.__vowels) rv = self._rv_standard(word, self.__vowels) # STEP 0: Attached pronoun for suffix in self.__step0_suffixes: if rv.endswith(suffix): if rv[-len(suffix) - 4:-len(suffix)] in ("ando", "endo"): word = word[:-len(suffix)] r1 = r1[:-len(suffix)] r2 = r2[:-len(suffix)] rv = rv[:-len(suffix)] elif (rv[-len(suffix) - 2:-len(suffix)] in ("ar", "er", "ir")): word = "".join((word[:-len(suffix)], "e")) r1 = "".join((r1[:-len(suffix)], "e")) r2 = "".join((r2[:-len(suffix)], "e")) rv = "".join((rv[:-len(suffix)], "e")) break # STEP 1: Standard suffix removal for suffix in self.__step1_suffixes: if word.endswith(suffix): if suffix == "amente" and r1.endswith(suffix): step1_success = True word = word[:-6] r2 = r2[:-6] rv = rv[:-6] if r2.endswith("iv"): word = word[:-2] r2 = r2[:-2] rv = rv[:-2] if r2.endswith("at"): word = word[:-2] rv = rv[:-2] elif r2.endswith(("os", "ic")): word = word[:-2] rv = rv[:-2] elif r2 .endswith("abil"): word = word[:-4] rv = rv[:-4] elif (suffix in ("amento", "amenti", "imento", "imenti") and rv.endswith(suffix)): step1_success = True word = word[:-6] rv = rv[:-6] elif r2.endswith(suffix): step1_success = True if suffix in ("azione", "azioni", "atore", "atori"): word = word[:-len(suffix)] r2 = r2[:-len(suffix)] rv = rv[:-len(suffix)] if r2.endswith("ic"): word = word[:-2] rv = rv[:-2] elif suffix in ("logia", "logie"): word = word[:-2] rv = word[:-2] elif suffix in ("uzione", "uzioni", "usione", "usioni"): word = word[:-5] rv = rv[:-5] elif suffix in ("enza", "enze"): word = "".join((word[:-2], "te")) rv = "".join((rv[:-2], "te")) elif suffix == u("it\xE0"): word = word[:-3] r2 = r2[:-3] rv = rv[:-3] if r2.endswith(("ic", "iv")): word = word[:-2] rv = rv[:-2] elif r2.endswith("abil"): word = word[:-4] rv = rv[:-4] elif suffix in ("ivo", "ivi", "iva", "ive"): word = word[:-3] r2 = r2[:-3] rv = rv[:-3] if r2.endswith("at"): word = word[:-2] r2 = r2[:-2] rv = rv[:-2] if r2.endswith("ic"): word = word[:-2] rv = rv[:-2] else: word = word[:-len(suffix)] rv = rv[:-len(suffix)] break # STEP 2: Verb suffixes if not step1_success: for suffix in self.__step2_suffixes: if rv.endswith(suffix): word = word[:-len(suffix)] rv = rv[:-len(suffix)] break # STEP 3a if rv.endswith(("a", "e", "i", "o", u("\xE0"), u("\xE8"), u("\xEC"), u("\xF2"))): word = word[:-1] rv = rv[:-1] if rv.endswith("i"): word = word[:-1] rv = rv[:-1] # STEP 3b if rv.endswith(("ch", "gh")): word = word[:-1] word = word.replace("I", "i").replace("U", "u") return word
bsd-3-clause
nebril/fuel-web
fuel_agent/fuel_agent/utils/grub.py
2
9231
# Copyright 2014 Mirantis, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os import re import shutil from oslo.config import cfg from fuel_agent import errors from fuel_agent.openstack.common import log as logging from fuel_agent.utils import utils LOG = logging.getLogger(__name__) gu_opts = [ cfg.IntOpt( 'grub_timeout', default=5, help='Timeout in secs for GRUB' ), ] CONF = cfg.CONF CONF.register_opts(gu_opts) def guess_grub2_conf(chroot=''): for filename in ('/boot/grub/grub.cfg', '/boot/grub2/grub.cfg'): if os.path.isdir(os.path.dirname(chroot + filename)): return filename def guess_grub2_default(chroot=''): for filename in ('/etc/default/grub', '/etc/sysconfig/grub'): if os.path.isfile(chroot + filename): return filename def guess_grub2_mkconfig(chroot=''): for grub_mkconfig in \ ('/sbin/grub-mkconfig', '/sbin/grub2-mkconfig', '/usr/sbin/grub-mkconfig', '/usr/sbin/grub2-mkconfig'): if os.path.isfile(chroot + grub_mkconfig): return grub_mkconfig def guess_grub_version(chroot=''): grub_install = guess_grub_install(chroot=chroot) LOG.debug('Trying to run %s --version' % grub_install) cmd = [grub_install, '--version'] if chroot: cmd[:0] = ['chroot', chroot] result = utils.execute(*cmd) version = 1 if result[0].find('0.97') > 0 else 2 LOG.debug('Looks like grub version is %s' % version) return version def guess_grub(chroot=''): for grub in ('/sbin/grub', '/usr/sbin/grub'): LOG.debug('Looking for grub: trying %s' % grub) if os.path.isfile(chroot + grub): LOG.debug('grub found: %s' % grub) return grub raise errors.GrubUtilsError('grub not found') def guess_grub_install(chroot=''): for grub_install in ('/sbin/grub-install', '/sbin/grub2-install', '/usr/sbin/grub-install', '/usr/sbin/grub2-install'): LOG.debug('Looking for grub-install: trying %s' % grub_install) if os.path.isfile(chroot + grub_install): LOG.debug('grub-install found: %s' % grub_install) return grub_install raise errors.GrubUtilsError('grub-install not found') def guess_grub1_datadir(chroot='', arch='x86_64'): LOG.debug('Looking for grub data directory') for d in os.listdir(chroot + '/usr/share/grub'): if arch in d: LOG.debug('Looks like grub data directory ' 'is /usr/share/grub/%s' % d) return '/usr/share/grub/' + d def guess_kernel(chroot='', regexp=None): """Tries to guess kernel by regexp :param chroot: Path to chroot :param regexp: (String) Regular expression (must have python syntax). Default is r'^vmlinuz.*' """ kernel = utils.guess_filename( path=os.path.join(chroot, 'boot'), regexp=(regexp or r'^vmlinuz.*')) if kernel: return kernel raise errors.GrubUtilsError('Error while trying to find kernel: ' 'regexp=%s' % regexp) def guess_initrd(chroot='', regexp=None): """Tries to guess initrd by regexp :param chroot: Path to chroot :param regexp: (String) Regular expression (must have python syntax). Default is r'^(initrd|initramfs).*' """ initrd = utils.guess_filename( path=os.path.join(chroot, 'boot'), regexp=(regexp or r'^(initrd|initramfs).*')) if initrd: return initrd raise errors.GrubUtilsError('Error while trying to find initrd: ' 'regexp=%s' % regexp) def grub1_install(install_devices, boot_device, chroot=''): match = re.search(r'(.+?)(p?)(\d*)$', boot_device) # Checking whether boot device is a partition # !!! It must be a partition not a whole disk. !!! if not match.group(3): raise errors.GrubUtilsError( 'Error while installing legacy grub: ' 'boot device must be a partition') boot_disk = match.group(1) boot_part = str(int(match.group(3)) - 1) grub1_stage1(chroot=chroot) for install_device in install_devices: grub1_mbr(install_device, boot_disk, boot_part, chroot=chroot) def grub1_mbr(install_device, boot_disk, boot_part, chroot=''): # The device on which we are going to install # stage1 needs to be mapped as hd0, otherwise system won't be able to boot. batch = 'device (hd0) {0}\n'.format(install_device) # That is much easier to use grub-install, but unfortunately # it is not able to install bootloader on huge disks. # Instead we set drive geometry manually to avoid grub register # overlapping. We set it so as to make grub # thinking that disk size is equal to 1G. # 130 cylinders * (16065 * 512 = 8225280 bytes) = 1G # We also assume that boot partition is in the beginning # of disk between 0 and 1G. batch += 'geometry (hd0) 130 255 63\n' if boot_disk != install_device: batch += 'device (hd1) {0}\n'.format(boot_disk) batch += 'geometry (hd1) 130 255 63\n' batch += 'root (hd1,{0})\n'.format(boot_part) else: batch += 'root (hd0,{0})\n'.format(boot_part) batch += 'setup (hd0)\n' batch += 'quit\n' with open(chroot + '/tmp/grub.batch', 'wb') as f: LOG.debug('Grub batch content: \n%s' % batch) f.write(batch) script = 'cat /tmp/grub.batch | {0} --no-floppy --batch'.format( guess_grub(chroot=chroot)) with open(chroot + '/tmp/grub.sh', 'wb') as f: LOG.debug('Grub script content: \n%s' % script) f.write(script) os.chmod(chroot + '/tmp/grub.sh', 0o755) cmd = ['/tmp/grub.sh'] if chroot: cmd[:0] = ['chroot', chroot] stdout, stderr = utils.execute(*cmd, run_as_root=True, check_exit_code=[0]) LOG.debug('Grub script stdout: \n%s' % stdout) LOG.debug('Grub script stderr: \n%s' % stderr) def grub1_stage1(chroot=''): LOG.debug('Installing grub stage1 files') for f in os.listdir(chroot + '/boot/grub'): if f in ('stage1', 'stage2') or 'stage1_5' in f: LOG.debug('Removing: %s' % chroot + os.path.join('/boot/grub', f)) os.remove(chroot + os.path.join('/boot/grub', f)) grub1_datadir = guess_grub1_datadir(chroot=chroot) for f in os.listdir(chroot + grub1_datadir): if f in ('stage1', 'stage2') or 'stage1_5' in f: LOG.debug('Copying %s from %s to /boot/grub' % (f, grub1_datadir)) shutil.copy(chroot + os.path.join(grub1_datadir, f), chroot + os.path.join('/boot/grub', f)) def grub1_cfg(kernel=None, initrd=None, kernel_params='', chroot='', grub_timeout=CONF.grub_timeout): if not kernel: kernel = guess_kernel(chroot=chroot) if not initrd: initrd = guess_initrd(chroot=chroot) config = """ default=0 timeout={grub_timeout} title Default ({kernel}) kernel /{kernel} {kernel_params} initrd /{initrd} """.format(kernel=kernel, initrd=initrd, kernel_params=kernel_params, grub_timeout=grub_timeout) with open(chroot + '/boot/grub/grub.conf', 'wb') as f: f.write(config) def grub2_install(install_devices, chroot=''): grub_install = guess_grub_install(chroot=chroot) for install_device in install_devices: cmd = [grub_install, install_device] if chroot: cmd[:0] = ['chroot', chroot] utils.execute(*cmd, run_as_root=True, check_exit_code=[0]) def grub2_cfg(kernel_params='', chroot='', grub_timeout=CONF.grub_timeout): grub_defaults = chroot + guess_grub2_default(chroot=chroot) rekerparams = re.compile(r'^.*GRUB_CMDLINE_LINUX=.*') retimeout = re.compile(r'^.*GRUB_HIDDEN_TIMEOUT=.*') new_content = '' with open(grub_defaults) as f: for line in f: line = rekerparams.sub( 'GRUB_CMDLINE_LINUX="{kernel_params}"'. format(kernel_params=kernel_params), line) line = retimeout.sub('GRUB_HIDDEN_TIMEOUT={grub_timeout}'. format(grub_timeout=grub_timeout), line) new_content += line # NOTE(agordeev): explicitly add record fail timeout, in order to # prevent user confirmation appearing if unexpected reboot occured. new_content += '\nGRUB_RECORDFAIL_TIMEOUT={grub_timeout}\n'.\ format(grub_timeout=grub_timeout) with open(grub_defaults, 'wb') as f: f.write(new_content) cmd = [guess_grub2_mkconfig(chroot), '-o', guess_grub2_conf(chroot)] if chroot: cmd[:0] = ['chroot', chroot] utils.execute(*cmd, run_as_root=True)
apache-2.0
SEMT2Group1/Node.js_SmartCar_API_Server
server/restful.py
1
1389
from flask import Flask, request from flask_restful import Resource, Api, reqparse from json import dumps import account app = Flask(__name__) api = Api(app) #Parser that parses incoming messages. userParser = reqparse.RequestParser() userParser.add_argument('username', help='The username the user wants to register.') userParser.add_argument('password', help='The password the user wants to register.') userParser.add_argument('token', help='User token to be verified by the system') # The Login endpoint class Login(Resource): def post(self): #Parse the message. args = userParser.parse_args() #Separate the message into variables. username = args['username'] password = args['password'] bytePassword = password.encode('utf-8') return account.login(username, bytePassword) # The Create User endpoint. class CreateAccount(Resource): def post(self): #Parse the message. args = userParser.parse_args() #Separate the message into variables. username = args['username'] password = args['password'] bytePassword = password.encode('utf-8') return account.createAccount(username, bytePassword) api.add_resource(Login, '/login') api.add_resource(CreateAccount, '/create-account') if __name__ == '__main__': app.run("0.0.0.0", port=int("10000"), debug=True)
mit
nzavagli/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/construct-2.5.2/construct/protocols/layer3/ipv6.py
9
1239
""" Internet Protocol version 6 (TCP/IP protocol stack) """ from construct import * from ipv4 import ProtocolEnum from binascii import unhexlify import six class Ipv6AddressAdapter(Adapter): def _encode(self, obj, context): if bytes is str: return "".join(part.decode("hex") for part in obj.split(":")) else: return bytes(int(part, 16) for part in obj.split(":")) def _decode(self, obj, context): if bytes is str: return ":".join(b.encode("hex") for b in obj) else: return ":".join("%02x" % (b,) for b in obj) def Ipv6Address(name): return Ipv6AddressAdapter(Bytes(name, 16)) ipv6_header = Struct("ip_header", EmbeddedBitStruct( OneOf(Bits("version", 4), [6]), Bits("traffic_class", 8), Bits("flow_label", 20), ), UBInt16("payload_length"), ProtocolEnum(UBInt8("protocol")), UBInt8("hoplimit"), Alias("ttl", "hoplimit"), Ipv6Address("source"), Ipv6Address("destination"), ) if __name__ == "__main__": o = ipv6_header.parse(six.b("\x6f\xf0\x00\x00\x01\x02\x06\x80" "0123456789ABCDEF" "FEDCBA9876543210" )) print (o) print (repr(ipv6_header.build(o)))
mit
nzavagli/UnrealPy
UnrealPyEmbed/Development/Python/2015.08.07-Python2710-x64-Source-vs2015/Python27/Source/boto-2.38.0/boto/glacier/writer.py
153
9668
# -*- coding: utf-8 -*- # Copyright (c) 2012 Thomas Parslow http://almostobsolete.net/ # Copyright (c) 2012 Robie Basak <robie@justgohome.co.uk> # Tree hash implementation from Aaron Brady bradya@gmail.com # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. # import hashlib from boto.glacier.utils import chunk_hashes, tree_hash, bytes_to_hex # This import is provided for backwards compatibility. This function is # now in boto.glacier.utils, but any existing code can still import # this directly from this module. from boto.glacier.utils import compute_hashes_from_fileobj _ONE_MEGABYTE = 1024 * 1024 class _Partitioner(object): """Convert variable-size writes into part-sized writes Call write(data) with variable sized data as needed to write all data. Call flush() after all data is written. This instance will call send_fn(part_data) as needed in part_size pieces, except for the final part which may be shorter than part_size. Make sure to call flush() to ensure that a short final part results in a final send_fn call. """ def __init__(self, part_size, send_fn): self.part_size = part_size self.send_fn = send_fn self._buffer = [] self._buffer_size = 0 def write(self, data): if data == b'': return self._buffer.append(data) self._buffer_size += len(data) while self._buffer_size > self.part_size: self._send_part() def _send_part(self): data = b''.join(self._buffer) # Put back any data remaining over the part size into the # buffer if len(data) > self.part_size: self._buffer = [data[self.part_size:]] self._buffer_size = len(self._buffer[0]) else: self._buffer = [] self._buffer_size = 0 # The part we will send part = data[:self.part_size] self.send_fn(part) def flush(self): if self._buffer_size > 0: self._send_part() class _Uploader(object): """Upload to a Glacier upload_id. Call upload_part for each part (in any order) and then close to complete the upload. """ def __init__(self, vault, upload_id, part_size, chunk_size=_ONE_MEGABYTE): self.vault = vault self.upload_id = upload_id self.part_size = part_size self.chunk_size = chunk_size self.archive_id = None self._uploaded_size = 0 self._tree_hashes = [] self.closed = False def _insert_tree_hash(self, index, raw_tree_hash): list_length = len(self._tree_hashes) if index >= list_length: self._tree_hashes.extend([None] * (list_length - index + 1)) self._tree_hashes[index] = raw_tree_hash def upload_part(self, part_index, part_data): """Upload a part to Glacier. :param part_index: part number where 0 is the first part :param part_data: data to upload corresponding to this part """ if self.closed: raise ValueError("I/O operation on closed file") # Create a request and sign it part_tree_hash = tree_hash(chunk_hashes(part_data, self.chunk_size)) self._insert_tree_hash(part_index, part_tree_hash) hex_tree_hash = bytes_to_hex(part_tree_hash) linear_hash = hashlib.sha256(part_data).hexdigest() start = self.part_size * part_index content_range = (start, (start + len(part_data)) - 1) response = self.vault.layer1.upload_part(self.vault.name, self.upload_id, linear_hash, hex_tree_hash, content_range, part_data) response.read() self._uploaded_size += len(part_data) def skip_part(self, part_index, part_tree_hash, part_length): """Skip uploading of a part. The final close call needs to calculate the tree hash and total size of all uploaded data, so this is the mechanism for resume functionality to provide it without actually uploading the data again. :param part_index: part number where 0 is the first part :param part_tree_hash: binary tree_hash of part being skipped :param part_length: length of part being skipped """ if self.closed: raise ValueError("I/O operation on closed file") self._insert_tree_hash(part_index, part_tree_hash) self._uploaded_size += part_length def close(self): if self.closed: return if None in self._tree_hashes: raise RuntimeError("Some parts were not uploaded.") # Complete the multiplart glacier upload hex_tree_hash = bytes_to_hex(tree_hash(self._tree_hashes)) response = self.vault.layer1.complete_multipart_upload( self.vault.name, self.upload_id, hex_tree_hash, self._uploaded_size) self.archive_id = response['ArchiveId'] self.closed = True def generate_parts_from_fobj(fobj, part_size): data = fobj.read(part_size) while data: yield data.encode('utf-8') data = fobj.read(part_size) def resume_file_upload(vault, upload_id, part_size, fobj, part_hash_map, chunk_size=_ONE_MEGABYTE): """Resume upload of a file already part-uploaded to Glacier. The resumption of an upload where the part-uploaded section is empty is a valid degenerate case that this function can handle. In this case, part_hash_map should be an empty dict. :param vault: boto.glacier.vault.Vault object. :param upload_id: existing Glacier upload id of upload being resumed. :param part_size: part size of existing upload. :param fobj: file object containing local data to resume. This must read from the start of the entire upload, not just from the point being resumed. Use fobj.seek(0) to achieve this if necessary. :param part_hash_map: {part_index: part_tree_hash, ...} of data already uploaded. Each supplied part_tree_hash will be verified and the part re-uploaded if there is a mismatch. :param chunk_size: chunk size of tree hash calculation. This must be 1 MiB for Amazon. """ uploader = _Uploader(vault, upload_id, part_size, chunk_size) for part_index, part_data in enumerate( generate_parts_from_fobj(fobj, part_size)): part_tree_hash = tree_hash(chunk_hashes(part_data, chunk_size)) if (part_index not in part_hash_map or part_hash_map[part_index] != part_tree_hash): uploader.upload_part(part_index, part_data) else: uploader.skip_part(part_index, part_tree_hash, len(part_data)) uploader.close() return uploader.archive_id class Writer(object): """ Presents a file-like object for writing to a Amazon Glacier Archive. The data is written using the multi-part upload API. """ def __init__(self, vault, upload_id, part_size, chunk_size=_ONE_MEGABYTE): self.uploader = _Uploader(vault, upload_id, part_size, chunk_size) self.partitioner = _Partitioner(part_size, self._upload_part) self.closed = False self.next_part_index = 0 def write(self, data): if self.closed: raise ValueError("I/O operation on closed file") self.partitioner.write(data) def _upload_part(self, part_data): self.uploader.upload_part(self.next_part_index, part_data) self.next_part_index += 1 def close(self): if self.closed: return self.partitioner.flush() self.uploader.close() self.closed = True def get_archive_id(self): self.close() return self.uploader.archive_id @property def current_tree_hash(self): """ Returns the current tree hash for the data that's been written **so far**. Only once the writing is complete is the final tree hash returned. """ return tree_hash(self.uploader._tree_hashes) @property def current_uploaded_size(self): """ Returns the current uploaded size for the data that's been written **so far**. Only once the writing is complete is the final uploaded size returned. """ return self.uploader._uploaded_size @property def upload_id(self): return self.uploader.upload_id @property def vault(self): return self.uploader.vault
mit
Incoming5643/-tg-station
SQL/admin_import_2018-02-03.py
61
4853
#Python 3+ Script for importing admins.txt and admin_ranks.txt made by Jordie0608 # #Before starting ensure you have installed the mysqlclient package https://github.com/PyMySQL/mysqlclient-python #It can be downloaded from command line with pip: #pip install mysqlclient #And that you have run the most recent commands listed in database_changelog.txt # #To view the parameters for this script, execute it with the argument --help #All the positional arguments are required, remember to include prefixes in your table names if you use them #An example of the command used to execute this script from powershell: #python admin_import_2018-02-03.py "localhost" "root" "password" "feedback" "SS13_admin" "SS13_admin_ranks" # #This script performs no error-correction, improper configurations of admins.txt or admin_ranks.txt will cause either breaking exceptions or invalid table rows #It's safe to run this script with your game server(s) active. import MySQLdb import argparse import re import sys import string def parse_text_flags(text, previous): flag_values = {"BUILDMODE":1, "BUILD":1, "ADMIN":2, "REJUVINATE":2, "REJUV":2, "BAN":4, "FUN":8, "SERVER":16, "DEBUG":32, "POSSESS":64, "PERMISSIONS":128, "RIGHTS":128, "STEALTH":256, "POLL":512, "VAREDIT":1024, "SOUNDS":2048, "SOUND":2048, "SPAWN":4096, "CREATE":4096, "AUTOLOGIN":8192, "AUTOADMIN":8192, "DBRANKS":16384} flags_int = 8192 exclude_flags_int = 0 can_edit_flags_int = 0 flags = text.split(" ") if flags: for flag in flags: sign = flag[:1] if flag[1:] in ("@", "prev"): if sign is "+": flags_int = previous[0] elif sign is "-": exclude_flags_int = previous[1] elif sign is "*": can_edit_flags_int = previous[2] continue if flag[1:] in ("EVERYTHING", "HOST", "ALL"): if sign is "+": flags_int = 65535 elif sign is "-": exclude_flags_int = 65535 elif sign is "*": can_edit_flags_int = 65535 continue if flag[1:] in flag_values: if sign is "+": flags_int += flag_values[flag[1:]] elif sign is "-": exclude_flags_int += flag_values[flag[1:]] elif sign is "*": can_edit_flags_int += flag_values[flag[1:]] flags_int = max(min(65535, flags_int), 0) exclude_flags_int = max(min(65535, exclude_flags_int), 0) can_edit_flags_int = max(min(65535, can_edit_flags_int), 0) return flags_int, exclude_flags_int, can_edit_flags_int if sys.version_info[0] < 3: raise Exception("Python must be at least version 3 for this script.") parser = argparse.ArgumentParser() parser.add_argument("address", help="MySQL server address (use localhost for the current computer)") parser.add_argument("username", help="MySQL login username") parser.add_argument("password", help="MySQL login username") parser.add_argument("database", help="Database name") parser.add_argument("admintable", help="Name of the current admin table (remember prefixes if you use them)") parser.add_argument("rankstable", help="Name of the current admin ranks (remember prefixes)") args = parser.parse_args() db=MySQLdb.connect(host=args.address, user=args.username, passwd=args.password, db=args.database) cursor=db.cursor() ranks_table = args.rankstable admin_table = args.admintable ckeyExformat = re.sub("@|-|_", " ", string.punctuation) with open("..\\config\\admin_ranks.txt") as rank_file: previous = 0 for line in rank_file: if line.strip(): if line.startswith("#"): continue matches = re.match("(.+)\\b\\s+=\\s*(.*)", line) rank = "".join((c for c in matches.group(1) if c not in ckeyExformat)) flags = parse_text_flags(matches.group(2), previous) previous = flags cursor.execute("INSERT INTO {0} (rank, flags, exclude_flags, can_edit_flags) VALUES ('{1}', {2}, {3}, {4})".format(ranks_table, rank, flags[0], flags[1], flags[2])) with open("..\\config\\admins.txt") as admins_file: previous = 0 ckeyformat = string.punctuation.replace("@", " ") for line in admins_file: if line.strip(): if line.startswith("#"): continue matches = re.match("(.+)\\b\\s+=\\s+(.+)", line) ckey = "".join((c for c in matches.group(1) if c not in ckeyformat)).lower() rank = "".join((c for c in matches.group(2) if c not in ckeyExformat)) cursor.execute("INSERT INTO {0} (ckey, rank) VALUES ('{1}', '{2}')".format(admin_table, ckey, rank)) db.commit() cursor.close() print("Import complete.")
agpl-3.0
XMPPwocky/rust
src/etc/snapshot.py
37
8006
# Copyright 2011-2015 The Rust Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution and at # http://rust-lang.org/COPYRIGHT. # # Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or # http://www.apache.org/licenses/LICENSE-2.0> or the MIT license # <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your # option. This file may not be copied, modified, or distributed # except according to those terms. import re import os import sys import glob import tarfile import shutil import subprocess import distutils.spawn try: import hashlib sha_func = hashlib.sha1 except ImportError: import sha sha_func = sha.new def scrub(b): if sys.version_info >= (3,) and type(b) == bytes: return b.decode('ascii') else: return b src_dir = scrub(os.getenv("CFG_SRC_DIR")) if not src_dir: raise Exception("missing env var CFG_SRC_DIR") snapshotfile = os.path.join(src_dir, "src", "snapshots.txt") download_url_base = "https://static.rust-lang.org/stage0-snapshots" download_dir_base = "dl" download_unpack_base = os.path.join(download_dir_base, "unpack") snapshot_files = { "linux": ["bin/rustc"], "macos": ["bin/rustc"], "winnt": ["bin/rustc.exe"], "freebsd": ["bin/rustc"], "dragonfly": ["bin/rustc"], "bitrig": ["bin/rustc"], "openbsd": ["bin/rustc"], } winnt_runtime_deps_32 = ["libgcc_s_dw2-1.dll", "libstdc++-6.dll"] winnt_runtime_deps_64 = ["libgcc_s_seh-1.dll", "libstdc++-6.dll"] def parse_line(n, line): global snapshotfile if re.match(r"\s*$", line): return None if re.match(r"^T\s*$", line): return None match = re.match(r"\s+([\w_-]+) ([a-fA-F\d]{40})\s*$", line) if match: return {"type": "file", "platform": match.group(1), "hash": match.group(2).lower()} match = re.match(r"([ST]) (\d{4}-\d{2}-\d{2}) ([a-fA-F\d]+)\s*$", line) if not match: raise Exception("%s:%d:E syntax error: " % (snapshotfile, n)) return {"type": "snapshot", "date": match.group(2), "rev": match.group(3)} def partial_snapshot_name(date, rev, platform): return ("rust-stage0-%s-%s-%s.tar.bz2" % (date, rev, platform)) def full_snapshot_name(date, rev, platform, hsh): return ("rust-stage0-%s-%s-%s-%s.tar.bz2" % (date, rev, platform, hsh)) def get_kernel(triple): t = triple.split('-') if len(t) == 2: os_name = t[1] else: os_name = t[2] if os_name == "windows": return "winnt" if os_name == "darwin": return "macos" if os_name == "freebsd": return "freebsd" if os_name == "dragonfly": return "dragonfly" if os_name == "bitrig": return "bitrig" if os_name == "openbsd": return "openbsd" return "linux" def get_cpu(triple): arch = triple.split('-')[0] if arch == "i686": return "i386" return arch def get_platform(triple): return "%s-%s" % (get_kernel(triple), get_cpu(triple)) def cmd_out(cmdline): p = subprocess.Popen(cmdline, stdout=subprocess.PIPE) return scrub(p.communicate()[0].strip()) def local_rev_info(field): return cmd_out(["git", "--git-dir=" + os.path.join(src_dir, ".git"), "log", "-n", "1", "--format=%%%s" % field, "HEAD"]) def local_rev_full_sha(): return local_rev_info("H").split()[0] def local_rev_short_sha(): return local_rev_info("h").split()[0] def local_rev_committer_date(): return local_rev_info("ci") def get_url_to_file(u, f): # no security issue, just to stop partial download leaving a stale file tmpf = f + '.tmp' returncode = -1 if distutils.spawn.find_executable("curl"): returncode = subprocess.call(["curl", "-o", tmpf, u]) elif distutils.spawn.find_executable("wget"): returncode = subprocess.call(["wget", "-O", tmpf, u]) if returncode != 0: try: os.unlink(tmpf) except OSError: pass raise Exception("failed to fetch url") os.rename(tmpf, f) def snap_filename_hash_part(snap): match = re.match(r".*([a-fA-F\d]{40}).tar.bz2$", snap) if not match: raise Exception("unable to find hash in filename: " + snap) return match.group(1) def hash_file(x): h = sha_func() h.update(open(x, "rb").read()) return scrub(h.hexdigest()) def get_winnt_runtime_deps(platform): """Returns a list of paths of Rust's system runtime dependencies""" if platform == "winnt-x86_64": deps = winnt_runtime_deps_64 else: deps = winnt_runtime_deps_32 runtime_deps = [] path_dirs = os.environ["PATH"].split(os.pathsep) for name in deps: for dir in path_dirs: filepath = os.path.join(dir, name) if os.path.isfile(filepath): runtime_deps.append(filepath) break else: raise Exception("Could not find runtime dependency: %s" % name) return runtime_deps def make_snapshot(stage, triple): kernel = get_kernel(triple) platform = get_platform(triple) rev = local_rev_short_sha() date = local_rev_committer_date().split()[0] file0 = partial_snapshot_name(date, rev, platform) def in_tar_name(fn): cs = re.split(r"[\\/]", fn) if len(cs) >= 2: return os.sep.join(cs[-2:]) tar = tarfile.open(file0, "w:bz2") for name in snapshot_files[kernel]: dir = stage if stage == "stage1" and re.match(r"^lib/(lib)?std.*", name): dir = "stage0" fn_glob = os.path.join(triple, dir, name) matches = glob.glob(fn_glob) if not matches: raise Exception("Not found file with name like " + fn_glob) if len(matches) == 1: tar.add(matches[0], "rust-stage0/" + in_tar_name(matches[0])) else: raise Exception("Found stale files: \n %s\n" "Please make a clean build." % "\n ".join(matches)) if kernel == "winnt": for path in get_winnt_runtime_deps(platform): tar.add(path, "rust-stage0/bin/" + os.path.basename(path)) tar.add(os.path.join(os.path.dirname(__file__), "third-party"), "rust-stage0/bin/third-party") tar.close() h = hash_file(file0) file1 = full_snapshot_name(date, rev, platform, h) shutil.move(file0, file1) return file1 def curr_snapshot_rev(): i = 0 found_snap = False date = None rev = None f = open(snapshotfile) for line in f.readlines(): i += 1 parsed = parse_line(i, line) if not parsed: continue if parsed["type"] == "snapshot": date = parsed["date"] rev = parsed["rev"] found_snap = True break if not found_snap: raise Exception("no snapshot entries in file") return (date, rev) def determine_curr_snapshot(triple): i = 0 platform = get_platform(triple) found_file = False found_snap = False hsh = None date = None rev = None f = open(snapshotfile) for line in f.readlines(): i += 1 parsed = parse_line(i, line) if not parsed: continue if found_snap and parsed["type"] == "file": if parsed["platform"] == platform: hsh = parsed["hash"] found_file = True break elif parsed["type"] == "snapshot": date = parsed["date"] rev = parsed["rev"] found_snap = True if not found_snap: raise Exception("no snapshot entries in file") if not found_file: raise Exception("no snapshot file found for platform %s, rev %s" % (platform, rev)) return full_snapshot_name(date, rev, platform, hsh)
apache-2.0
julienmalard/Tikon
tikon/datos/obs.py
1
2572
import numpy as np import pandas as pd import xarray as xr from tikon.utils import EJE_TIEMPO, EJE_PARC class Obs(object): def __init__(símismo, datos): símismo.datos = datos def fechas(símismo): tiempos = símismo.datos[EJE_TIEMPO].values if np.issubdtype(tiempos.dtype, np.datetime64): return (pd.Timestamp(tiempos.min()), pd.Timestamp(tiempos.max())), 0 else: return (None, None), tiempos.max() def proc_res(símismo, res): if not np.issubdtype(símismo.datos[EJE_TIEMPO].values.dtype, np.datetime64): res = res.copy() res[EJE_TIEMPO] = np.array( [x.days for x in pd.to_datetime(res[EJE_TIEMPO].values) - pd.to_datetime(res[EJE_TIEMPO].values[0])] ) return res @property def mód(símismo): raise NotImplementedError @property def var(símismo): raise NotImplementedError @classmethod def de_cuadro(cls, datos_pd, corresp, eje_principal, parc=None, tiempo=None, coords=None, factor=1, **argsll): if isinstance(datos_pd, str): datos_pd = pd.read_csv(datos_pd, encoding='utf8') corresp = corresp or {} for ll, v in corresp.items(): if isinstance(v, list): corresp[ll] = tuple(v) coords = { EJE_PARC: parc or EJE_PARC, EJE_TIEMPO: tiempo or EJE_TIEMPO, **(coords or {}) } coords_xr = coords.copy() for dim, crd in coords.items(): if isinstance(dim, str) and crd in datos_pd.columns: coords_xr[dim] = datos_pd[crd].unique() else: coords_xr[dim] = [crd] coords_xr[eje_principal] = list(corresp.values()) datos = xr.DataArray(np.nan, coords=coords_xr, dims=list(coords_xr)) for f in datos_pd.iterrows(): d = f[1] índs = { **{dim: d[vl] if isinstance(vl, str) and vl in d else vl for dim, vl in coords.items()}, **{eje_principal: [corresp[x] for x in list(d.axes[0]) if x in corresp]} } vals = d[[x for x in list(d.axes[0]) if x in corresp]] datos.loc[índs] = vals * factor datos.coords[EJE_PARC] = [str(prc) for prc in datos.coords[EJE_PARC].values] return cls(datos) def __contains__(símismo, itema): coords = {ll: v.values for ll, v in símismo.datos.coords.items()} return all(ll in coords and v in coords[ll] for ll, v in itema.items())
agpl-3.0
watonyweng/horizon
openstack_dashboard/enabled/_1020_project_overview_panel.py
39
1025
# Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # The slug of the panel to be added to HORIZON_CONFIG. Required. PANEL = 'overview' # The slug of the dashboard the PANEL associated with. Required. PANEL_DASHBOARD = 'project' # The slug of the panel group the PANEL is associated with. PANEL_GROUP = 'compute' # If set, it will update the default panel of the PANEL_DASHBOARD. DEFAULT_PANEL = 'overview' # Python panel class of the PANEL to be added. ADD_PANEL = 'openstack_dashboard.dashboards.project.overview.panel.Overview'
apache-2.0
Embisto/lightblue-0.4
build/lib/lightblue/_lightblue.py
52
19766
# Copyright (c) 2009 Bea Lam. All rights reserved. # # This file is part of LightBlue. # # LightBlue is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # LightBlue is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with LightBlue. If not, see <http://www.gnu.org/licenses/>. # Mac OS X main module implementation. import types import warnings import Foundation import AppKit import objc import _IOBluetooth import _LightAquaBlue import _lightbluecommon import _macutil import _bluetoothsockets # public attributes __all__ = ("finddevices", "findservices", "finddevicename", "selectdevice", "selectservice", "gethostaddr", "gethostclass", "socket", "advertise", "stopadvertise") # details of advertised services __advertised = {} def finddevices(getnames=True, length=10): inquiry = _SyncDeviceInquiry() inquiry.run(getnames, length) devices = inquiry.getfounddevices() return devices def findservices(addr=None, name=None, servicetype=None): if servicetype not in (_lightbluecommon.RFCOMM, _lightbluecommon.OBEX, None): raise ValueError("servicetype must be RFCOMM, OBEX or None, was %s" % \ servicetype) if addr is None: try: founddevices = finddevices() except _lightbluecommon.BluetoothError, e: msg = "findservices() failed, " +\ "error while finding devices: " + str(e) raise _lightbluecommon.BluetoothError(msg) #print founddevices addresses = [dev[0] for dev in founddevices] else: addresses = [addr] services = [] for devaddr in addresses: iobtdevice = _IOBluetooth.IOBluetoothDevice.withAddress_( _macutil.createbtdevaddr(devaddr)) try: lastseen = iobtdevice.getLastServicesUpdate() if lastseen is None or lastseen.timeIntervalSinceNow() < -2: # perform SDP query to update known services. # wait at least a few seconds between service discovery cos # sometimes it doesn't work if doing updates too often. # In future should have option to not do updates. serviceupdater = _SDPQueryRunner.alloc().init() try: serviceupdater.query(iobtdevice) # blocks until updated except _lightbluecommon.BluetoothError, e: msg = "findservices() couldn't get services for %s: %s" % \ (iobtdevice.getNameOrAddress(), str(e)) warnings.warn(msg) # or should I use cached services instead of warning? # but sometimes the cached ones are totally wrong. # if searching for RFCOMM, exclude OBEX services if servicetype == _lightbluecommon.RFCOMM: uuidbad = _macutil.PROTO_UUIDS.get(_lightbluecommon.OBEX) else: uuidbad = None filtered = _searchservices(iobtdevice, name=name, uuid=_macutil.PROTO_UUIDS.get(servicetype), uuidbad=uuidbad) #print "unfiltered:", iobtdevice.getServices() services.extend([_getservicetuple(s) for s in filtered]) finally: # close baseband connection (not sure if this is necessary, but # sometimes the transport connection seems to stay open?) iobtdevice.closeConnection() return services def finddevicename(address, usecache=True): if not _lightbluecommon._isbtaddr(address): raise TypeError("%s is not a valid bluetooth address" % str(address)) if address == gethostaddr(): return _gethostname() device = _IOBluetooth.IOBluetoothDevice.withAddress_( _macutil.createbtdevaddr(address)) if usecache: name = device.getName() if name is not None: return name # do name request with timeout of 10 seconds result = device.remoteNameRequest_withPageTimeout_(None, 10000) if result == _macutil.kIOReturnSuccess: return device.getName() raise _lightbluecommon.BluetoothError( "Could not find device name for %s" % address) ### local device ### def gethostaddr(): addr = _LightAquaBlue.BBLocalDevice.getAddressString() if addr is not None: # PyObjC returns all strings as unicode, but the address doesn't need # to be unicode cos it's just hex values return _macutil.formatdevaddr(addr) raise _lightbluecommon.BluetoothError("Cannot read local device address") def gethostclass(): cod = _LightAquaBlue.BBLocalDevice.getClassOfDevice() if cod != -1: return int(cod) raise _lightbluecommon.BluetoothError("Cannot read local device class") def _gethostname(): name = _LightAquaBlue.BBLocalDevice.getName() if name is not None: return name raise _lightbluecommon.BluetoothError("Cannot read local device name") ### socket ### def socket(proto=_lightbluecommon.RFCOMM): return _bluetoothsockets._getsocketobject(proto) ### advertising services ### def advertise(name, sock, servicetype): if not isinstance(name, types.StringTypes): raise TypeError("name must be string, was %s" % \ type(name)) # raises exception if socket is not bound boundchannelID = sock._getport() # advertise the service if servicetype == _lightbluecommon.RFCOMM: try: result, finalchannelID, servicerecordhandle = _LightAquaBlue.BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_( _LightAquaBlue.BBServiceAdvertiser.serialPortProfileDictionary(), name, None, None, None) except: result, finalchannelID, servicerecordhandle = _LightAquaBlue.BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_( _LightAquaBlue.BBServiceAdvertiser.serialPortProfileDictionary(), name, None) elif servicetype == _lightbluecommon.OBEX: try: result, finalchannelID, servicerecordhandle = _LightAquaBlue.BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_( _LightAquaBlue.BBServiceAdvertiser.objectPushProfileDictionary(), name, None, None, None) except: result, finalchannelID, servicerecordhandle = _LightAquaBlue.BBServiceAdvertiser.addRFCOMMServiceDictionary_withName_UUID_channelID_serviceRecordHandle_( _LightAquaBlue.BBServiceAdvertiser.objectPushProfileDictionary(), name, None) else: raise ValueError("servicetype must be either RFCOMM or OBEX") if result != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError( result, "Error advertising service") if boundchannelID != finalchannelID: msg = "socket bound to unavailable channel (%d), " % boundchannelID +\ "use channel value of 0 to bind to dynamically assigned channel" raise _lightbluecommon.BluetoothError(msg) # note service record handle, so that the service can be stopped later __advertised[id(sock)] = servicerecordhandle def stopadvertise(sock): if sock is None: raise TypeError("Given socket is None") servicerecordhandle = __advertised.get(id(sock)) if servicerecordhandle is None: raise _lightbluecommon.BluetoothError("no service advertised") result = _LightAquaBlue.BBServiceAdvertiser.removeService_(servicerecordhandle) if result != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError( result, "Error stopping advertising of service") ### GUI ### def selectdevice(): import _IOBluetoothUI gui = _IOBluetoothUI.IOBluetoothDeviceSelectorController.deviceSelector() # try to bring GUI to foreground by setting it as floating panel # (if this is called from pyobjc app, it would automatically be in foreground) try: gui.window().setFloatingPanel_(True) except: pass # show the window and wait for user's selection response = gui.runModal() # problems here if transferring a lot of data?? if response == AppKit.NSRunStoppedResponse: results = gui.getResults() if len(results) > 0: # should always be > 0, but check anyway devinfo = _getdevicetuple(results[0]) # sometimes the baseband connection stays open which causes # problems with connections w so close it here, see if this fixes # it dev = _IOBluetooth.IOBluetoothDevice.withAddress_( _macutil.createbtdevaddr(devinfo[0])) if dev.isConnected(): dev.closeConnection() return devinfo # user cancelled selection return None def selectservice(): import _IOBluetoothUI gui = _IOBluetoothUI.IOBluetoothServiceBrowserController.serviceBrowserController_( _macutil.kIOBluetoothServiceBrowserControllerOptionsNone) # try to bring GUI to foreground by setting it as floating panel # (if this is called from pyobjc app, it would automatically be in foreground) try: gui.window().setFloatingPanel_(True) except: pass # show the window and wait for user's selection response = gui.runModal() if response == AppKit.NSRunStoppedResponse: results = gui.getResults() if len(results) > 0: # should always be > 0, but check anyway serviceinfo = _getservicetuple(results[0]) # sometimes the baseband connection stays open which causes # problems with connections ... so close it here, see if this fixes # it dev = _IOBluetooth.IOBluetoothDevice.deviceWithAddressString_(serviceinfo[0]) if dev.isConnected(): dev.closeConnection() return serviceinfo # user cancelled selection return None ### classes ### class _SDPQueryRunner(Foundation.NSObject): """ Convenience class for performing a synchronous or asynchronous SDP query on an IOBluetoothDevice. """ def query(self, device, timeout=10.0): # do SDP query err = device.performSDPQuery_(self) if err != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError(err, self._errmsg(device)) # performSDPQuery_ is async, so block-wait self._queryresult = None if not _macutil.waituntil(lambda: self._queryresult is not None, timeout): raise _lightbluecommon.BluetoothError( "Timed out getting services for %s" % \ device.getNameOrAddress()) # query is now complete if self._queryresult != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError( self._queryresult, self._errmsg(device)) def sdpQueryComplete_status_(self, device, status): # can't raise exception during a callback, so just keep the err value self._queryresult = status _macutil.interruptwait() sdpQueryComplete_status_ = objc.selector( sdpQueryComplete_status_, signature="v@:@i") # accept object, int def _errmsg(self, device): return "Error getting services for %s" % device.getNameOrAddress() class _SyncDeviceInquiry(object): def __init__(self): super(_SyncDeviceInquiry, self).__init__() self._inquiry = _AsyncDeviceInquiry.alloc().init() self._inquiry.cb_completed = self._inquirycomplete self._inquiring = False def run(self, getnames, duration): if self._inquiring: raise _lightbluecommon.BluetoothError( "Another inquiry in progress") # set inquiry attributes self._inquiry.updatenames = getnames self._inquiry.length = duration # start the inquiry err = self._inquiry.start() if err != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError( err, "Error starting device inquiry") # if error occurs during inquiry, set _inquiryerr to the error code self._inquiryerr = _macutil.kIOReturnSuccess # wait until the inquiry is complete self._inquiring = True _macutil.waituntil(lambda: not self._inquiring) # if error occured during inquiry, raise exception if self._inquiryerr != _macutil.kIOReturnSuccess: raise _lightbluecommon.BluetoothError(self._inquiryerr, "Error during device inquiry") def getfounddevices(self): # return as list of device-info tuples return [_getdevicetuple(device) for device in \ self._inquiry.getfounddevices()] def _inquirycomplete(self, err, aborted): if err != 188: # no devices found self._inquiryerr = err self._inquiring = False _macutil.interruptwait() def __del__(self): self._inquiry.__del__() super(_SyncDeviceInquiry, self).__del__() # Wrapper around IOBluetoothDeviceInquiry, with python callbacks that you can # set to receive callbacks when the inquiry is started or stopped, or when it # finds a device. # # This discovery doesn't block, so it could be used in a PyObjC application # that is running an event loop. # # Properties: # - 'length': the inquiry length (seconds) # - 'updatenames': whether to update device names during the inquiry # (i.e. perform remote name requests, which will take a little longer) # class _AsyncDeviceInquiry(Foundation.NSObject): # NSObject init, not python __init__ def init(self): try: attr = _IOBluetooth.IOBluetoothDeviceInquiry except AttributeError: raise ImportError("Cannot find IOBluetoothDeviceInquiry class " +\ "to perform device discovery. This class was introduced in " +\ "Mac OS X 10.4, are you running an earlier version?") self = super(_AsyncDeviceInquiry, self).init() self._inquiry = \ _IOBluetooth.IOBluetoothDeviceInquiry.inquiryWithDelegate_(self) # callbacks self.cb_started = None self.cb_completed = None self.cb_founddevice = None return self # length property def _setlength(self, length): self._inquiry.setInquiryLength_(length) length = property( lambda self: self._inquiry.inquiryLength(), _setlength) # updatenames property def _setupdatenames(self, update): self._inquiry.setUpdateNewDeviceNames_(update) updatenames = property( lambda self: self._inquiry.updateNewDeviceNames(), _setupdatenames) # returns error code def start(self): return self._inquiry.start() # returns error code def stop(self): return self._inquiry.stop() # returns list of IOBluetoothDevice objects def getfounddevices(self): return self._inquiry.foundDevices() def __del__(self): super(_AsyncDeviceInquiry, self).dealloc() # # delegate methods follow (these are called by the internal # IOBluetoothDeviceInquiry object when inquiry events occur) # # - (void)deviceInquiryDeviceFound:(IOBluetoothDeviceInquiry*)sender # device:(IOBluetoothDevice*)device; def deviceInquiryDeviceFound_device_(self, inquiry, device): if self.cb_founddevice: self.cb_founddevice(device) deviceInquiryDeviceFound_device_ = objc.selector( deviceInquiryDeviceFound_device_, signature="v@:@@") # - (void)deviceInquiryComplete:error:aborted; def deviceInquiryComplete_error_aborted_(self, inquiry, err, aborted): if self.cb_completed: self.cb_completed(err, aborted) deviceInquiryComplete_error_aborted_ = objc.selector( deviceInquiryComplete_error_aborted_, signature="v@:@iB") # - (void)deviceInquiryStarted:(IOBluetoothDeviceInquiry*)sender; def deviceInquiryStarted_(self, inquiry): if self.cb_started: self.cb_started() ### utility methods ### def _searchservices(device, name=None, uuid=None, uuidbad=None): """ Searches the given IOBluetoothDevice using the specified parameters. Returns an empty list if the device has no services. uuid should be IOBluetoothSDPUUID object. """ if not isinstance(device, _IOBluetooth.IOBluetoothDevice): raise ValueError("device must be IOBluetoothDevice, was %s" % \ type(device)) services = [] allservices = device.getServices() if uuid: gooduuids = (uuid, ) else: gooduuids = () if uuidbad: baduuids = (uuidbad, ) else: baduuids = () if allservices is not None: for s in allservices: if gooduuids and not s.hasServiceFromArray_(gooduuids): continue if baduuids and s.hasServiceFromArray_(baduuids): continue if name is None or s.getServiceName() == name: services.append(s) return services def _getdevicetuple(iobtdevice): """ Returns an (addr, name, COD) device tuple from a IOBluetoothDevice object. """ addr = _macutil.formatdevaddr(iobtdevice.getAddressString()) name = iobtdevice.getName() cod = iobtdevice.getClassOfDevice() return (addr, name, cod) def _getservicetuple(servicerecord): """ Returns a (device-addr, service-channel, service-name) tuple from the given IOBluetoothSDPServiceRecord. """ addr = _macutil.formatdevaddr(servicerecord.getDevice().getAddressString()) name = servicerecord.getServiceName() try: result, channel = servicerecord.getRFCOMMChannelID_(None) # pyobjc 2.0 except TypeError: result, channel = servicerecord.getRFCOMMChannelID_() if result != _macutil.kIOReturnSuccess: try: result, channel = servicerecord.getL2CAPPSM_(None) # pyobjc 2.0 except: result, channel = servicerecord.getL2CAPPSM_() if result != _macutil.kIOReturnSuccess: channel = None return (addr, channel, name)
gpl-3.0
cchurch/ansible
lib/ansible/modules/windows/win_partition.py
48
4629
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2018, Varun Chopra (@chopraaa) <v@chopraaa.com> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) ANSIBLE_METADATA = { 'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community' } DOCUMENTATION = r''' --- module: win_partition version_added: '2.8' short_description: Creates, changes and removes partitions on Windows Server description: - The M(win_partition) module can create, modify or delete a partition on a disk options: state: description: - Used to specify the state of the partition. Use C(absent) to specify if a partition should be removed and C(present) to specify if the partition should be created or updated. type: str choices: [ absent, present] default: present drive_letter: description: - Used for accessing partitions if I(disk_number) and I(partition_number) are not provided. - Use C(auto) for automatically assigning a drive letter, or a letter A-Z for manually assigning a drive letter to a new partition. If not specified, no drive letter is assigned when creating a new partition. type: str disk_number: description: - Disk number is mandatory for creating new partitions. - A combination of I(disk_number) and I(partition_number) can be used to specify the partition instead of I(drive_letter) if required. type: int partition_number: description: - Used in conjunction with I(disk_number) to uniquely identify a partition. type: int partition_size: description: - Specify size of the partition in B, KB, KiB, MB, MiB, GB, GiB, TB or TiB. Use -1 to specify maximum supported size. - Partition size is mandatory for creating a new partition but not for updating or deleting a partition. - The decimal SI prefixes kilo, mega, giga, tera, etc., are powers of 10^3 = 1000. The binary prefixes kibi, mebi, gibi, tebi, etc. respectively refer to the corresponding power of 2^10 = 1024. Thus, a gigabyte (GB) is 1000000000 (1000^3) bytes while 1 gibibyte (GiB) is 1073741824 (1024^3) bytes. type: str read_only: description: - Make the partition read only, restricting changes from being made to the partition. type: bool active: description: - Specifies if the partition is active and can be used to start the system. This property is only valid when the disk's partition style is MBR. type: bool hidden: description: - Hides the target partition, making it undetectable by the mount manager. type: bool offline: description: - Sets the partition offline. - Adding a mount point (such as a drive letter) will cause the partition to go online again. type: bool required: no mbr_type: description: - Specify the partition's MBR type if the disk's partition style is MBR. - This only applies to new partitions. - This does not relate to the partitions file system formatting. type: str choices: [ fat12, fat16, extended, huge, ifs, fat32 ] gpt_type: description: - Specify the partition's GPT type if the disk's partition style is GPT. - This only applies to new partitions. - This does not relate to the partitions file system formatting. type: str choices: [ system_partition, microsoft_reserved, basic_data, microsoft_recovery ] notes: - A minimum Operating System Version of 6.2 is required to use this module. To check if your OS is compatible, see U(https://docs.microsoft.com/en-us/windows/desktop/sysinfo/operating-system-version). - This module cannot be used for removing the drive letter associated with a partition, initializing a disk or, file system formatting. - Idempotence works only if you're specifying a drive letter or other unique attributes such as a combination of disk number and partition number. - For more information, see U(https://msdn.microsoft.com/en-us/library/windows/desktop/hh830524.aspx). author: - Varun Chopra (@chopraaa) <v@chopraaa.com> ''' EXAMPLES = r''' - name: Create a partition with drive letter D and size 5 GiB win_partition: drive_letter: D partition_size: 5 GiB disk_number: 1 - name: Resize previously created partition to it's maximum size and change it's drive letter to E win_partition: drive_letter: E partition_size: -1 partition_number: 1 disk_number: 1 - name: Delete partition win_partition: disk_number: 1 partition_number: 1 state: absent ''' RETURN = r''' # '''
gpl-3.0
dulton/mpv
waftools/waf_customizations.py
22
1574
from waflib.Configure import conf @conf def get_config_header(self, defines=True, headers=False, define_prefix=''): """ Only difference is it outputs `#define VAR 0` or `#define VAR value` instead of `#undef VAR` or `#define VAR val`. """ from waflib.Tools.c_config import DEFKEYS, INCKEYS lst = [] if headers: for x in self.env[INCKEYS]: lst.append('#include <%s>' % x) if defines: for x in self.env[DEFKEYS]: val = self.is_defined(x) and self.get_define(x) or "0" lst.append('#define %s %s' % (define_prefix + x, val)) return "\n".join(lst) from waflib import TaskGen @TaskGen.extension('.m') def m_hook(self, node): """ Makes waf call the c compiler for objective-c files """ return self.create_compiled_task('c', node) def build(ctx): from waflib import Task cls = Task.classes['cprogram'] class cprogram(cls): try: run_str = cls.orig_run_str + '${LAST_LINKFLAGS}' except AttributeError: try: run_str = cls.hcode + '${LAST_LINKFLAGS}' except TypeError: run_str = cls.hcode.decode('iso8859-1') + '${LAST_LINKFLAGS}' cls = Task.classes['macplist'] class macplist(cls): def run(self): from waflib import Utils if getattr(self, 'code', None): txt = self.code else: txt = self.inputs[0].read() txt = Utils.subst_vars(txt, self.env) self.outputs[0].write(txt)
gpl-2.0
krash86/android_kernel_google_pixel
tools/perf/scripts/python/sctop.py
1996
2102
# system call top # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # Periodically displays system-wide system call totals, broken down by # syscall. If a [comm] arg is specified, only syscalls called by # [comm] are displayed. If an [interval] arg is specified, the display # will be refreshed every [interval] seconds. The default interval is # 3 seconds. import os, sys, thread, time sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s sctop.py [comm] [interval]\n"; for_comm = None default_interval = 3 interval = default_interval if len(sys.argv) > 3: sys.exit(usage) if len(sys.argv) > 2: for_comm = sys.argv[1] interval = int(sys.argv[2]) elif len(sys.argv) > 1: try: interval = int(sys.argv[1]) except ValueError: for_comm = sys.argv[1] interval = default_interval syscalls = autodict() def trace_begin(): thread.start_new_thread(print_syscall_totals, (interval,)) pass def raw_syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, common_callchain, id, args): if for_comm is not None: if common_comm != for_comm: return try: syscalls[id] += 1 except TypeError: syscalls[id] = 1 def syscalls__sys_enter(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, args): raw_syscalls__sys_enter(**locals()) def print_syscall_totals(interval): while 1: clear_term() if for_comm is not None: print "\nsyscall events for %s:\n\n" % (for_comm), else: print "\nsyscall events:\n\n", print "%-40s %10s\n" % ("event", "count"), print "%-40s %10s\n" % ("----------------------------------------", \ "----------"), for id, val in sorted(syscalls.iteritems(), key = lambda(k, v): (v, k), \ reverse = True): try: print "%-40s %10d\n" % (syscall_name(id), val), except TypeError: pass syscalls.clear() time.sleep(interval)
gpl-2.0
bhargav2408/python-for-android
python3-alpha/python3-src/Lib/encodings/cp1140.py
272
13105
""" Python Character Mapping Codec cp1140 generated from 'python-mappings/CP1140.TXT' with gencodec.py. """#" import codecs ### Codec APIs class Codec(codecs.Codec): def encode(self,input,errors='strict'): return codecs.charmap_encode(input,errors,encoding_table) def decode(self,input,errors='strict'): return codecs.charmap_decode(input,errors,decoding_table) class IncrementalEncoder(codecs.IncrementalEncoder): def encode(self, input, final=False): return codecs.charmap_encode(input,self.errors,encoding_table)[0] class IncrementalDecoder(codecs.IncrementalDecoder): def decode(self, input, final=False): return codecs.charmap_decode(input,self.errors,decoding_table)[0] class StreamWriter(Codec,codecs.StreamWriter): pass class StreamReader(Codec,codecs.StreamReader): pass ### encodings module API def getregentry(): return codecs.CodecInfo( name='cp1140', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter, ) ### Decoding Table decoding_table = ( '\x00' # 0x00 -> NULL '\x01' # 0x01 -> START OF HEADING '\x02' # 0x02 -> START OF TEXT '\x03' # 0x03 -> END OF TEXT '\x9c' # 0x04 -> CONTROL '\t' # 0x05 -> HORIZONTAL TABULATION '\x86' # 0x06 -> CONTROL '\x7f' # 0x07 -> DELETE '\x97' # 0x08 -> CONTROL '\x8d' # 0x09 -> CONTROL '\x8e' # 0x0A -> CONTROL '\x0b' # 0x0B -> VERTICAL TABULATION '\x0c' # 0x0C -> FORM FEED '\r' # 0x0D -> CARRIAGE RETURN '\x0e' # 0x0E -> SHIFT OUT '\x0f' # 0x0F -> SHIFT IN '\x10' # 0x10 -> DATA LINK ESCAPE '\x11' # 0x11 -> DEVICE CONTROL ONE '\x12' # 0x12 -> DEVICE CONTROL TWO '\x13' # 0x13 -> DEVICE CONTROL THREE '\x9d' # 0x14 -> CONTROL '\x85' # 0x15 -> CONTROL '\x08' # 0x16 -> BACKSPACE '\x87' # 0x17 -> CONTROL '\x18' # 0x18 -> CANCEL '\x19' # 0x19 -> END OF MEDIUM '\x92' # 0x1A -> CONTROL '\x8f' # 0x1B -> CONTROL '\x1c' # 0x1C -> FILE SEPARATOR '\x1d' # 0x1D -> GROUP SEPARATOR '\x1e' # 0x1E -> RECORD SEPARATOR '\x1f' # 0x1F -> UNIT SEPARATOR '\x80' # 0x20 -> CONTROL '\x81' # 0x21 -> CONTROL '\x82' # 0x22 -> CONTROL '\x83' # 0x23 -> CONTROL '\x84' # 0x24 -> CONTROL '\n' # 0x25 -> LINE FEED '\x17' # 0x26 -> END OF TRANSMISSION BLOCK '\x1b' # 0x27 -> ESCAPE '\x88' # 0x28 -> CONTROL '\x89' # 0x29 -> CONTROL '\x8a' # 0x2A -> CONTROL '\x8b' # 0x2B -> CONTROL '\x8c' # 0x2C -> CONTROL '\x05' # 0x2D -> ENQUIRY '\x06' # 0x2E -> ACKNOWLEDGE '\x07' # 0x2F -> BELL '\x90' # 0x30 -> CONTROL '\x91' # 0x31 -> CONTROL '\x16' # 0x32 -> SYNCHRONOUS IDLE '\x93' # 0x33 -> CONTROL '\x94' # 0x34 -> CONTROL '\x95' # 0x35 -> CONTROL '\x96' # 0x36 -> CONTROL '\x04' # 0x37 -> END OF TRANSMISSION '\x98' # 0x38 -> CONTROL '\x99' # 0x39 -> CONTROL '\x9a' # 0x3A -> CONTROL '\x9b' # 0x3B -> CONTROL '\x14' # 0x3C -> DEVICE CONTROL FOUR '\x15' # 0x3D -> NEGATIVE ACKNOWLEDGE '\x9e' # 0x3E -> CONTROL '\x1a' # 0x3F -> SUBSTITUTE ' ' # 0x40 -> SPACE '\xa0' # 0x41 -> NO-BREAK SPACE '\xe2' # 0x42 -> LATIN SMALL LETTER A WITH CIRCUMFLEX '\xe4' # 0x43 -> LATIN SMALL LETTER A WITH DIAERESIS '\xe0' # 0x44 -> LATIN SMALL LETTER A WITH GRAVE '\xe1' # 0x45 -> LATIN SMALL LETTER A WITH ACUTE '\xe3' # 0x46 -> LATIN SMALL LETTER A WITH TILDE '\xe5' # 0x47 -> LATIN SMALL LETTER A WITH RING ABOVE '\xe7' # 0x48 -> LATIN SMALL LETTER C WITH CEDILLA '\xf1' # 0x49 -> LATIN SMALL LETTER N WITH TILDE '\xa2' # 0x4A -> CENT SIGN '.' # 0x4B -> FULL STOP '<' # 0x4C -> LESS-THAN SIGN '(' # 0x4D -> LEFT PARENTHESIS '+' # 0x4E -> PLUS SIGN '|' # 0x4F -> VERTICAL LINE '&' # 0x50 -> AMPERSAND '\xe9' # 0x51 -> LATIN SMALL LETTER E WITH ACUTE '\xea' # 0x52 -> LATIN SMALL LETTER E WITH CIRCUMFLEX '\xeb' # 0x53 -> LATIN SMALL LETTER E WITH DIAERESIS '\xe8' # 0x54 -> LATIN SMALL LETTER E WITH GRAVE '\xed' # 0x55 -> LATIN SMALL LETTER I WITH ACUTE '\xee' # 0x56 -> LATIN SMALL LETTER I WITH CIRCUMFLEX '\xef' # 0x57 -> LATIN SMALL LETTER I WITH DIAERESIS '\xec' # 0x58 -> LATIN SMALL LETTER I WITH GRAVE '\xdf' # 0x59 -> LATIN SMALL LETTER SHARP S (GERMAN) '!' # 0x5A -> EXCLAMATION MARK '$' # 0x5B -> DOLLAR SIGN '*' # 0x5C -> ASTERISK ')' # 0x5D -> RIGHT PARENTHESIS ';' # 0x5E -> SEMICOLON '\xac' # 0x5F -> NOT SIGN '-' # 0x60 -> HYPHEN-MINUS '/' # 0x61 -> SOLIDUS '\xc2' # 0x62 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX '\xc4' # 0x63 -> LATIN CAPITAL LETTER A WITH DIAERESIS '\xc0' # 0x64 -> LATIN CAPITAL LETTER A WITH GRAVE '\xc1' # 0x65 -> LATIN CAPITAL LETTER A WITH ACUTE '\xc3' # 0x66 -> LATIN CAPITAL LETTER A WITH TILDE '\xc5' # 0x67 -> LATIN CAPITAL LETTER A WITH RING ABOVE '\xc7' # 0x68 -> LATIN CAPITAL LETTER C WITH CEDILLA '\xd1' # 0x69 -> LATIN CAPITAL LETTER N WITH TILDE '\xa6' # 0x6A -> BROKEN BAR ',' # 0x6B -> COMMA '%' # 0x6C -> PERCENT SIGN '_' # 0x6D -> LOW LINE '>' # 0x6E -> GREATER-THAN SIGN '?' # 0x6F -> QUESTION MARK '\xf8' # 0x70 -> LATIN SMALL LETTER O WITH STROKE '\xc9' # 0x71 -> LATIN CAPITAL LETTER E WITH ACUTE '\xca' # 0x72 -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX '\xcb' # 0x73 -> LATIN CAPITAL LETTER E WITH DIAERESIS '\xc8' # 0x74 -> LATIN CAPITAL LETTER E WITH GRAVE '\xcd' # 0x75 -> LATIN CAPITAL LETTER I WITH ACUTE '\xce' # 0x76 -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX '\xcf' # 0x77 -> LATIN CAPITAL LETTER I WITH DIAERESIS '\xcc' # 0x78 -> LATIN CAPITAL LETTER I WITH GRAVE '`' # 0x79 -> GRAVE ACCENT ':' # 0x7A -> COLON '#' # 0x7B -> NUMBER SIGN '@' # 0x7C -> COMMERCIAL AT "'" # 0x7D -> APOSTROPHE '=' # 0x7E -> EQUALS SIGN '"' # 0x7F -> QUOTATION MARK '\xd8' # 0x80 -> LATIN CAPITAL LETTER O WITH STROKE 'a' # 0x81 -> LATIN SMALL LETTER A 'b' # 0x82 -> LATIN SMALL LETTER B 'c' # 0x83 -> LATIN SMALL LETTER C 'd' # 0x84 -> LATIN SMALL LETTER D 'e' # 0x85 -> LATIN SMALL LETTER E 'f' # 0x86 -> LATIN SMALL LETTER F 'g' # 0x87 -> LATIN SMALL LETTER G 'h' # 0x88 -> LATIN SMALL LETTER H 'i' # 0x89 -> LATIN SMALL LETTER I '\xab' # 0x8A -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK '\xbb' # 0x8B -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK '\xf0' # 0x8C -> LATIN SMALL LETTER ETH (ICELANDIC) '\xfd' # 0x8D -> LATIN SMALL LETTER Y WITH ACUTE '\xfe' # 0x8E -> LATIN SMALL LETTER THORN (ICELANDIC) '\xb1' # 0x8F -> PLUS-MINUS SIGN '\xb0' # 0x90 -> DEGREE SIGN 'j' # 0x91 -> LATIN SMALL LETTER J 'k' # 0x92 -> LATIN SMALL LETTER K 'l' # 0x93 -> LATIN SMALL LETTER L 'm' # 0x94 -> LATIN SMALL LETTER M 'n' # 0x95 -> LATIN SMALL LETTER N 'o' # 0x96 -> LATIN SMALL LETTER O 'p' # 0x97 -> LATIN SMALL LETTER P 'q' # 0x98 -> LATIN SMALL LETTER Q 'r' # 0x99 -> LATIN SMALL LETTER R '\xaa' # 0x9A -> FEMININE ORDINAL INDICATOR '\xba' # 0x9B -> MASCULINE ORDINAL INDICATOR '\xe6' # 0x9C -> LATIN SMALL LIGATURE AE '\xb8' # 0x9D -> CEDILLA '\xc6' # 0x9E -> LATIN CAPITAL LIGATURE AE '\u20ac' # 0x9F -> EURO SIGN '\xb5' # 0xA0 -> MICRO SIGN '~' # 0xA1 -> TILDE 's' # 0xA2 -> LATIN SMALL LETTER S 't' # 0xA3 -> LATIN SMALL LETTER T 'u' # 0xA4 -> LATIN SMALL LETTER U 'v' # 0xA5 -> LATIN SMALL LETTER V 'w' # 0xA6 -> LATIN SMALL LETTER W 'x' # 0xA7 -> LATIN SMALL LETTER X 'y' # 0xA8 -> LATIN SMALL LETTER Y 'z' # 0xA9 -> LATIN SMALL LETTER Z '\xa1' # 0xAA -> INVERTED EXCLAMATION MARK '\xbf' # 0xAB -> INVERTED QUESTION MARK '\xd0' # 0xAC -> LATIN CAPITAL LETTER ETH (ICELANDIC) '\xdd' # 0xAD -> LATIN CAPITAL LETTER Y WITH ACUTE '\xde' # 0xAE -> LATIN CAPITAL LETTER THORN (ICELANDIC) '\xae' # 0xAF -> REGISTERED SIGN '^' # 0xB0 -> CIRCUMFLEX ACCENT '\xa3' # 0xB1 -> POUND SIGN '\xa5' # 0xB2 -> YEN SIGN '\xb7' # 0xB3 -> MIDDLE DOT '\xa9' # 0xB4 -> COPYRIGHT SIGN '\xa7' # 0xB5 -> SECTION SIGN '\xb6' # 0xB6 -> PILCROW SIGN '\xbc' # 0xB7 -> VULGAR FRACTION ONE QUARTER '\xbd' # 0xB8 -> VULGAR FRACTION ONE HALF '\xbe' # 0xB9 -> VULGAR FRACTION THREE QUARTERS '[' # 0xBA -> LEFT SQUARE BRACKET ']' # 0xBB -> RIGHT SQUARE BRACKET '\xaf' # 0xBC -> MACRON '\xa8' # 0xBD -> DIAERESIS '\xb4' # 0xBE -> ACUTE ACCENT '\xd7' # 0xBF -> MULTIPLICATION SIGN '{' # 0xC0 -> LEFT CURLY BRACKET 'A' # 0xC1 -> LATIN CAPITAL LETTER A 'B' # 0xC2 -> LATIN CAPITAL LETTER B 'C' # 0xC3 -> LATIN CAPITAL LETTER C 'D' # 0xC4 -> LATIN CAPITAL LETTER D 'E' # 0xC5 -> LATIN CAPITAL LETTER E 'F' # 0xC6 -> LATIN CAPITAL LETTER F 'G' # 0xC7 -> LATIN CAPITAL LETTER G 'H' # 0xC8 -> LATIN CAPITAL LETTER H 'I' # 0xC9 -> LATIN CAPITAL LETTER I '\xad' # 0xCA -> SOFT HYPHEN '\xf4' # 0xCB -> LATIN SMALL LETTER O WITH CIRCUMFLEX '\xf6' # 0xCC -> LATIN SMALL LETTER O WITH DIAERESIS '\xf2' # 0xCD -> LATIN SMALL LETTER O WITH GRAVE '\xf3' # 0xCE -> LATIN SMALL LETTER O WITH ACUTE '\xf5' # 0xCF -> LATIN SMALL LETTER O WITH TILDE '}' # 0xD0 -> RIGHT CURLY BRACKET 'J' # 0xD1 -> LATIN CAPITAL LETTER J 'K' # 0xD2 -> LATIN CAPITAL LETTER K 'L' # 0xD3 -> LATIN CAPITAL LETTER L 'M' # 0xD4 -> LATIN CAPITAL LETTER M 'N' # 0xD5 -> LATIN CAPITAL LETTER N 'O' # 0xD6 -> LATIN CAPITAL LETTER O 'P' # 0xD7 -> LATIN CAPITAL LETTER P 'Q' # 0xD8 -> LATIN CAPITAL LETTER Q 'R' # 0xD9 -> LATIN CAPITAL LETTER R '\xb9' # 0xDA -> SUPERSCRIPT ONE '\xfb' # 0xDB -> LATIN SMALL LETTER U WITH CIRCUMFLEX '\xfc' # 0xDC -> LATIN SMALL LETTER U WITH DIAERESIS '\xf9' # 0xDD -> LATIN SMALL LETTER U WITH GRAVE '\xfa' # 0xDE -> LATIN SMALL LETTER U WITH ACUTE '\xff' # 0xDF -> LATIN SMALL LETTER Y WITH DIAERESIS '\\' # 0xE0 -> REVERSE SOLIDUS '\xf7' # 0xE1 -> DIVISION SIGN 'S' # 0xE2 -> LATIN CAPITAL LETTER S 'T' # 0xE3 -> LATIN CAPITAL LETTER T 'U' # 0xE4 -> LATIN CAPITAL LETTER U 'V' # 0xE5 -> LATIN CAPITAL LETTER V 'W' # 0xE6 -> LATIN CAPITAL LETTER W 'X' # 0xE7 -> LATIN CAPITAL LETTER X 'Y' # 0xE8 -> LATIN CAPITAL LETTER Y 'Z' # 0xE9 -> LATIN CAPITAL LETTER Z '\xb2' # 0xEA -> SUPERSCRIPT TWO '\xd4' # 0xEB -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX '\xd6' # 0xEC -> LATIN CAPITAL LETTER O WITH DIAERESIS '\xd2' # 0xED -> LATIN CAPITAL LETTER O WITH GRAVE '\xd3' # 0xEE -> LATIN CAPITAL LETTER O WITH ACUTE '\xd5' # 0xEF -> LATIN CAPITAL LETTER O WITH TILDE '0' # 0xF0 -> DIGIT ZERO '1' # 0xF1 -> DIGIT ONE '2' # 0xF2 -> DIGIT TWO '3' # 0xF3 -> DIGIT THREE '4' # 0xF4 -> DIGIT FOUR '5' # 0xF5 -> DIGIT FIVE '6' # 0xF6 -> DIGIT SIX '7' # 0xF7 -> DIGIT SEVEN '8' # 0xF8 -> DIGIT EIGHT '9' # 0xF9 -> DIGIT NINE '\xb3' # 0xFA -> SUPERSCRIPT THREE '\xdb' # 0xFB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX '\xdc' # 0xFC -> LATIN CAPITAL LETTER U WITH DIAERESIS '\xd9' # 0xFD -> LATIN CAPITAL LETTER U WITH GRAVE '\xda' # 0xFE -> LATIN CAPITAL LETTER U WITH ACUTE '\x9f' # 0xFF -> CONTROL ) ### Encoding table encoding_table=codecs.charmap_build(decoding_table)
apache-2.0
skyfielders/python-skyfield
skyfield/tests/test_earth_deflection.py
1
1520
"""Hand-crafted tests against specific NOVAS behaviors. The tests in the neighboring `tests_against_novas.py` are automatically generated and in general demonstrate close agreement with NOVAS. But the hand-crafted tests here are aimed at specific edge conditions that we want to make sure we get correct. """ from numpy import arange, diff from skyfield.api import Topos, load def test_earth_deflection(): # The NOVAS library includes the Earth's gravitational deflection of # light for both topocentric observers and observers in Earth orbit, # but shuts this effect off once the object is behind the Earth 20% # of the way from its limb towards its center. This test determines # whether Skyfield puts the resulting discontinuity in the same # place as the NOVAS library does. # # For more details see: # https://github.com/skyfielders/astronomy-notebooks/blob/master/Skyfield-Notes/Fixing-earth-deflection.ipynb t = load.timescale(delta_t=0.0) t = t.tt(2016, 7, 2, arange(10.5628, 10.5639, 0.0002)) planets = load('de405.bsp') earth = planets['earth'] mars = planets['mars'] lowell = earth + Topos(latitude_degrees=35.2029, longitude_degrees=-111.6646) ra, dec, distance = lowell.at(t).observe(mars).apparent().radec() h = ra.hours hprime = diff(h) assert hprime[0] > 1.8e-8 assert hprime[1] > 1.8e-8 assert hprime[2] < 1.3e-8 # moment when nadir angle crosses 0.8 assert hprime[3] > 1.8e-8 assert hprime[4] > 1.8e-8
mit
Percona-QA/percona-qa
pxc-tests/percona-xtradb-cluster-tests/sst/python/testtools/tests/test_content.py
42
7772
# Copyright (c) 2008-2011 testtools developers. See LICENSE for details. import os import tempfile import unittest from testtools import TestCase from testtools.compat import ( _b, _u, StringIO, ) from testtools.content import ( attach_file, Content, content_from_file, content_from_stream, TracebackContent, text_content, ) from testtools.content_type import ( ContentType, UTF8_TEXT, ) from testtools.matchers import ( Equals, MatchesException, Raises, raises, ) from testtools.tests.helpers import an_exc_info raises_value_error = Raises(MatchesException(ValueError)) class TestContent(TestCase): def test___init___None_errors(self): self.assertThat(lambda: Content(None, None), raises_value_error) self.assertThat( lambda: Content(None, lambda: ["traceback"]), raises_value_error) self.assertThat( lambda: Content(ContentType("text", "traceback"), None), raises_value_error) def test___init___sets_ivars(self): content_type = ContentType("foo", "bar") content = Content(content_type, lambda: ["bytes"]) self.assertEqual(content_type, content.content_type) self.assertEqual(["bytes"], list(content.iter_bytes())) def test___eq__(self): content_type = ContentType("foo", "bar") one_chunk = lambda: [_b("bytes")] two_chunk = lambda: [_b("by"), _b("tes")] content1 = Content(content_type, one_chunk) content2 = Content(content_type, one_chunk) content3 = Content(content_type, two_chunk) content4 = Content(content_type, lambda: [_b("by"), _b("te")]) content5 = Content(ContentType("f", "b"), two_chunk) self.assertEqual(content1, content2) self.assertEqual(content1, content3) self.assertNotEqual(content1, content4) self.assertNotEqual(content1, content5) def test___repr__(self): content = Content(ContentType("application", "octet-stream"), lambda: [_b("\x00bin"), _b("ary\xff")]) self.assertIn("\\x00binary\\xff", repr(content)) def test_iter_text_not_text_errors(self): content_type = ContentType("foo", "bar") content = Content(content_type, lambda: ["bytes"]) self.assertThat(content.iter_text, raises_value_error) def test_iter_text_decodes(self): content_type = ContentType("text", "strange", {"charset": "utf8"}) content = Content( content_type, lambda: [_u("bytes\xea").encode("utf8")]) self.assertEqual([_u("bytes\xea")], list(content.iter_text())) def test_iter_text_default_charset_iso_8859_1(self): content_type = ContentType("text", "strange") text = _u("bytes\xea") iso_version = text.encode("ISO-8859-1") content = Content(content_type, lambda: [iso_version]) self.assertEqual([text], list(content.iter_text())) def test_from_file(self): fd, path = tempfile.mkstemp() self.addCleanup(os.remove, path) os.write(fd, 'some data') os.close(fd) content = content_from_file(path, UTF8_TEXT, chunk_size=2) self.assertThat( list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a'])) def test_from_nonexistent_file(self): directory = tempfile.mkdtemp() nonexistent = os.path.join(directory, 'nonexistent-file') content = content_from_file(nonexistent) self.assertThat(content.iter_bytes, raises(IOError)) def test_from_file_default_type(self): content = content_from_file('/nonexistent/path') self.assertThat(content.content_type, Equals(UTF8_TEXT)) def test_from_file_eager_loading(self): fd, path = tempfile.mkstemp() os.write(fd, 'some data') os.close(fd) content = content_from_file(path, UTF8_TEXT, buffer_now=True) os.remove(path) self.assertThat( _b('').join(content.iter_bytes()), Equals('some data')) def test_from_stream(self): data = StringIO('some data') content = content_from_stream(data, UTF8_TEXT, chunk_size=2) self.assertThat( list(content.iter_bytes()), Equals(['so', 'me', ' d', 'at', 'a'])) def test_from_stream_default_type(self): data = StringIO('some data') content = content_from_stream(data) self.assertThat(content.content_type, Equals(UTF8_TEXT)) def test_from_stream_eager_loading(self): fd, path = tempfile.mkstemp() self.addCleanup(os.remove, path) os.write(fd, 'some data') stream = open(path, 'rb') content = content_from_stream(stream, UTF8_TEXT, buffer_now=True) os.write(fd, 'more data') os.close(fd) self.assertThat( _b('').join(content.iter_bytes()), Equals('some data')) def test_from_text(self): data = _u("some data") expected = Content(UTF8_TEXT, lambda: [data.encode('utf8')]) self.assertEqual(expected, text_content(data)) class TestTracebackContent(TestCase): def test___init___None_errors(self): self.assertThat( lambda: TracebackContent(None, None), raises_value_error) def test___init___sets_ivars(self): content = TracebackContent(an_exc_info, self) content_type = ContentType("text", "x-traceback", {"language": "python", "charset": "utf8"}) self.assertEqual(content_type, content.content_type) result = unittest.TestResult() expected = result._exc_info_to_string(an_exc_info, self) self.assertEqual(expected, ''.join(list(content.iter_text()))) class TestAttachFile(TestCase): def make_file(self, data): fd, path = tempfile.mkstemp() self.addCleanup(os.remove, path) os.write(fd, data) os.close(fd) return path def test_simple(self): class SomeTest(TestCase): def test_foo(self): pass test = SomeTest('test_foo') data = 'some data' path = self.make_file(data) my_content = text_content(data) attach_file(test, path, name='foo') self.assertEqual({'foo': my_content}, test.getDetails()) def test_optional_name(self): # If no name is provided, attach_file just uses the base name of the # file. class SomeTest(TestCase): def test_foo(self): pass test = SomeTest('test_foo') path = self.make_file('some data') base_path = os.path.basename(path) attach_file(test, path) self.assertEqual([base_path], list(test.getDetails())) def test_lazy_read(self): class SomeTest(TestCase): def test_foo(self): pass test = SomeTest('test_foo') path = self.make_file('some data') attach_file(test, path, name='foo', buffer_now=False) content = test.getDetails()['foo'] content_file = open(path, 'w') content_file.write('new data') content_file.close() self.assertEqual(''.join(content.iter_bytes()), 'new data') def test_eager_read_by_default(self): class SomeTest(TestCase): def test_foo(self): pass test = SomeTest('test_foo') path = self.make_file('some data') attach_file(test, path, name='foo') content = test.getDetails()['foo'] content_file = open(path, 'w') content_file.write('new data') content_file.close() self.assertEqual(''.join(content.iter_bytes()), 'some data') def test_suite(): from unittest import TestLoader return TestLoader().loadTestsFromName(__name__)
gpl-2.0
seamicro/python-seamicroclient
seamicroclient/tests/v2/test_SMCards.py
1
1137
# -*- coding: utf-8 -*- # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from seamicroclient.tests import utils from seamicroclient.tests.v2 import fakes from seamicroclient.v2 import smcards cs = fakes.FakeClient() class SMCardstest(utils.TestCase): def test_list_smcards(self): pl = cs.smcards.list() cs.assert_called('GET', '/chassis/smcard') [self.assertTrue(isinstance(s, smcards.SMCard)) for s in pl] def test_get_smcards(self): p = cs.smcards.get(1) cs.assert_called('GET', '/chassis/smcard/1') self.assertTrue(isinstance(p, smcards.SMCard))
apache-2.0
sebrandon1/nova
nova/virt/libvirt/firewall.py
7
15320
# Copyright 2010 United States Government as represented by the # Administrator of the National Aeronautics and Space Administration. # All Rights Reserved. # Copyright (c) 2010 Citrix Systems, Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import uuid from eventlet import greenthread from lxml import etree from oslo_log import log as logging from oslo_utils import excutils from oslo_utils import importutils from nova.cloudpipe import pipelib import nova.conf from nova.i18n import _LI from nova.i18n import _LW import nova.virt.firewall as base_firewall from nova.virt import netutils LOG = logging.getLogger(__name__) CONF = nova.conf.CONF libvirt = None class NWFilterFirewall(base_firewall.FirewallDriver): """This class implements a network filtering mechanism by using libvirt's nwfilter. all instances get a filter ("nova-base") applied. This filter provides some basic security such as protection against MAC spoofing, IP spoofing, and ARP spoofing. """ def __init__(self, host, **kwargs): """Create an NWFilter firewall driver :param host: nova.virt.libvirt.host.Host instance :param kwargs: currently unused """ global libvirt if libvirt is None: try: libvirt = importutils.import_module('libvirt') except ImportError: LOG.warning(_LW("Libvirt module could not be loaded. " "NWFilterFirewall will not work correctly.")) self._host = host self.static_filters_configured = False self.handle_security_groups = False def apply_instance_filter(self, instance, network_info): """No-op. Everything is done in prepare_instance_filter.""" pass def _get_connection(self): return self._host.get_connection() _conn = property(_get_connection) def nova_no_nd_reflection_filter(self): """This filter protects false positives on IPv6 Duplicate Address Detection(DAD). """ uuid = self._get_filter_uuid('nova-no-nd-reflection') return '''<filter name='nova-no-nd-reflection' chain='ipv6'> <!-- no nd reflection --> <!-- drop if destination mac is v6 mcast mac addr and we sent it. --> <uuid>%s</uuid> <rule action='drop' direction='in'> <mac dstmacaddr='33:33:00:00:00:00' dstmacmask='ff:ff:00:00:00:00' srcmacaddr='$MAC'/> </rule> </filter>''' % uuid def nova_dhcp_filter(self): """The standard allow-dhcp-server filter is an <ip> one, so it uses ebtables to allow traffic through. Without a corresponding rule in iptables, it'll get blocked anyway. """ uuid = self._get_filter_uuid('nova-allow-dhcp-server') return '''<filter name='nova-allow-dhcp-server' chain='ipv4'> <uuid>%s</uuid> <rule action='accept' direction='out' priority='100'> <udp srcipaddr='0.0.0.0' dstipaddr='255.255.255.255' srcportstart='68' dstportstart='67'/> </rule> <rule action='accept' direction='in' priority='100'> <udp srcipaddr='$DHCPSERVER' srcportstart='67' dstportstart='68'/> </rule> </filter>''' % uuid def setup_basic_filtering(self, instance, network_info): """Set up basic filtering (MAC, IP, and ARP spoofing protection).""" LOG.info(_LI('Called setup_basic_filtering in nwfilter'), instance=instance) if self.handle_security_groups: # No point in setting up a filter set that we'll be overriding # anyway. return LOG.info(_LI('Ensuring static filters'), instance=instance) self._ensure_static_filters() nodhcp_base_filter = self.get_base_filter_list(instance, False) dhcp_base_filter = self.get_base_filter_list(instance, True) for vif in network_info: _base_filter = nodhcp_base_filter for subnet in vif['network']['subnets']: if subnet.get_meta('dhcp_server'): _base_filter = dhcp_base_filter break self._define_filter(self._get_instance_filter_xml(instance, _base_filter, vif)) def _get_instance_filter_parameters(self, vif): parameters = [] def format_parameter(parameter, value): return ("<parameter name='%s' value='%s'/>" % (parameter, value)) network = vif['network'] if not vif['network'] or not vif['network']['subnets']: return parameters v4_subnets = [s for s in network['subnets'] if s['version'] == 4] v6_subnets = [s for s in network['subnets'] if s['version'] == 6] for subnet in v4_subnets: for ip in subnet['ips']: parameters.append(format_parameter('IP', ip['address'])) dhcp_server = subnet.get_meta('dhcp_server') if dhcp_server: parameters.append(format_parameter('DHCPSERVER', dhcp_server)) if CONF.use_ipv6: for subnet in v6_subnets: gateway = subnet.get('gateway') if gateway: ra_server = gateway['address'] + "/128" parameters.append(format_parameter('RASERVER', ra_server)) if CONF.allow_same_net_traffic: for subnet in v4_subnets: ipv4_cidr = subnet['cidr'] net, mask = netutils.get_net_and_mask(ipv4_cidr) parameters.append(format_parameter('PROJNET', net)) parameters.append(format_parameter('PROJMASK', mask)) if CONF.use_ipv6: for subnet in v6_subnets: ipv6_cidr = subnet['cidr'] net, prefix = netutils.get_net_and_prefixlen(ipv6_cidr) parameters.append(format_parameter('PROJNET6', net)) parameters.append(format_parameter('PROJMASK6', prefix)) return parameters def _get_instance_filter_xml(self, instance, filters, vif): nic_id = vif['address'].replace(':', '') instance_filter_name = self._instance_filter_name(instance, nic_id) parameters = self._get_instance_filter_parameters(vif) uuid = self._get_filter_uuid(instance_filter_name) xml = '''<filter name='%s' chain='root'>''' % instance_filter_name xml += '<uuid>%s</uuid>' % uuid for f in filters: xml += '''<filterref filter='%s'>''' % f xml += ''.join(parameters) xml += '</filterref>' xml += '</filter>' return xml def get_base_filter_list(self, instance, allow_dhcp): """Obtain a list of base filters to apply to an instance. The return value should be a list of strings, each specifying a filter name. Subclasses can override this function to add additional filters as needed. Additional filters added to the list must also be correctly defined within the subclass. """ if pipelib.is_vpn_image(instance.image_ref): base_filter = 'nova-vpn' elif allow_dhcp: base_filter = 'nova-base' else: base_filter = 'nova-nodhcp' return [base_filter] def _ensure_static_filters(self): """Static filters are filters that have no need to be IP aware. There is no configuration or tuneability of these filters, so they can be set up once and forgotten about. """ if self.static_filters_configured: return filter_set = ['no-mac-spoofing', 'no-ip-spoofing', 'no-arp-spoofing'] self._define_filter(self.nova_no_nd_reflection_filter()) filter_set.append('nova-no-nd-reflection') self._define_filter(self._filter_container('nova-nodhcp', filter_set)) filter_set.append('allow-dhcp-server') self._define_filter(self._filter_container('nova-base', filter_set)) self._define_filter(self._filter_container('nova-vpn', ['allow-dhcp-server'])) self._define_filter(self.nova_dhcp_filter()) self.static_filters_configured = True def _filter_container(self, name, filters): uuid = self._get_filter_uuid(name) xml = '''<filter name='%s' chain='root'> <uuid>%s</uuid> %s </filter>''' % (name, uuid, ''.join(["<filterref filter='%s'/>" % (f,) for f in filters])) return xml def _get_filter_uuid(self, name): try: flt = self._conn.nwfilterLookupByName(name) xml = flt.XMLDesc(0) doc = etree.fromstring(xml) u = doc.find("./uuid").text except Exception as e: LOG.debug(u"Cannot find UUID for filter '%(name)s': '%(e)s'", {'name': name, 'e': e}) u = uuid.uuid4().hex LOG.debug("UUID for filter '%s' is '%s'", name, u) return u def _define_filter(self, xml): if callable(xml): xml = xml() try: self._conn.nwfilterDefineXML(xml) except libvirt.libvirtError as ex: with excutils.save_and_reraise_exception() as ctxt: errcode = ex.get_error_code() if errcode == libvirt.VIR_ERR_OPERATION_FAILED: # Since libvirt 1.2.7 this operation can fail if the filter # with the same name already exists for the given uuid. # Unfortunately there is not a specific error code for this # so we have to parse the error message to see if that was # the failure. errmsg = ex.get_error_message() if 'already exists with uuid' in errmsg: ctxt.reraise = False def unfilter_instance(self, instance, network_info): """Clear out the nwfilter rules.""" for vif in network_info: nic_id = vif['address'].replace(':', '') instance_filter_name = self._instance_filter_name(instance, nic_id) # nwfilters may be defined in a separate thread in the case # of libvirt non-blocking mode, so we wait for completion max_retry = CONF.live_migration_retry_count for cnt in range(max_retry): try: _nw = self._conn.nwfilterLookupByName(instance_filter_name) _nw.undefine() break except libvirt.libvirtError as e: if cnt == max_retry - 1: raise errcode = e.get_error_code() if errcode == libvirt.VIR_ERR_OPERATION_INVALID: # This happens when the instance filter is still in use # (ie. when the instance has not terminated properly) LOG.info(_LI('Failed to undefine network filter ' '%(name)s. Try %(cnt)d of ' '%(max_retry)d.'), {'name': instance_filter_name, 'cnt': cnt + 1, 'max_retry': max_retry}, instance=instance) greenthread.sleep(1) else: LOG.debug('The nwfilter(%s) is not found.', instance_filter_name, instance=instance) break @staticmethod def _instance_filter_name(instance, nic_id=None): if not nic_id: return 'nova-instance-%s' % (instance.name) return 'nova-instance-%s-%s' % (instance.name, nic_id) def instance_filter_exists(self, instance, network_info): """Check nova-instance-instance-xxx exists.""" for vif in network_info: nic_id = vif['address'].replace(':', '') instance_filter_name = self._instance_filter_name(instance, nic_id) try: self._conn.nwfilterLookupByName(instance_filter_name) except libvirt.libvirtError: name = instance.name LOG.debug('The nwfilter(%(instance_filter_name)s) for' '%(name)s is not found.', {'instance_filter_name': instance_filter_name, 'name': name}, instance=instance) return False return True class IptablesFirewallDriver(base_firewall.IptablesFirewallDriver): def __init__(self, execute=None, **kwargs): """Create an IP tables firewall driver instance :param execute: unused, pass None :param kwargs: extra arguments The @kwargs parameter must contain a key 'host' that maps to an instance of the nova.virt.libvirt.host.Host class. """ super(IptablesFirewallDriver, self).__init__(**kwargs) self.nwfilter = NWFilterFirewall(kwargs['host']) def setup_basic_filtering(self, instance, network_info): """Set up basic NWFilter.""" self.nwfilter.setup_basic_filtering(instance, network_info) def apply_instance_filter(self, instance, network_info): """No-op. Everything is done in prepare_instance_filter.""" pass def unfilter_instance(self, instance, network_info): # NOTE(salvatore-orlando): # Overriding base class method for applying nwfilter operation if self.instance_info.pop(instance.id, None): self.remove_filters_for_instance(instance) self.iptables.apply() self.nwfilter.unfilter_instance(instance, network_info) else: LOG.info(_LI('Attempted to unfilter instance which is not ' 'filtered'), instance=instance) def instance_filter_exists(self, instance, network_info): """Check nova-instance-instance-xxx exists.""" return self.nwfilter.instance_filter_exists(instance, network_info)
apache-2.0