repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
TeamExodus/kernel_cyanogen_msm8916
tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py
12980
5411
# SchedGui.py - Python extension for perf script, basic GUI code for # traces drawing and overview. # # Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com> # # This software is distributed under the terms of the GNU General # Public License ("GPL") version 2 as published by the Free Software # Foundation. try: import wx except ImportError: raise ImportError, "You need to install the wxpython lib for this script" class RootFrame(wx.Frame): Y_OFFSET = 100 RECT_HEIGHT = 100 RECT_SPACE = 50 EVENT_MARKING_WIDTH = 5 def __init__(self, sched_tracer, title, parent = None, id = -1): wx.Frame.__init__(self, parent, id, title) (self.screen_width, self.screen_height) = wx.GetDisplaySize() self.screen_width -= 10 self.screen_height -= 10 self.zoom = 0.5 self.scroll_scale = 20 self.sched_tracer = sched_tracer self.sched_tracer.set_root_win(self) (self.ts_start, self.ts_end) = sched_tracer.interval() self.update_width_virtual() self.nr_rects = sched_tracer.nr_rectangles() + 1 self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) # whole window panel self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height)) # scrollable container self.scroll = wx.ScrolledWindow(self.panel) self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale) self.scroll.EnableScrolling(True, True) self.scroll.SetFocus() # scrollable drawing area self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2)) self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint) self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Bind(wx.EVT_PAINT, self.on_paint) self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press) self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down) self.scroll.Fit() self.Fit() self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING) self.txt = None self.Show(True) def us_to_px(self, val): return val / (10 ** 3) * self.zoom def px_to_us(self, val): return (val / self.zoom) * (10 ** 3) def scroll_start(self): (x, y) = self.scroll.GetViewStart() return (x * self.scroll_scale, y * self.scroll_scale) def scroll_start_us(self): (x, y) = self.scroll_start() return self.px_to_us(x) def paint_rectangle_zone(self, nr, color, top_color, start, end): offset_px = self.us_to_px(start - self.ts_start) width_px = self.us_to_px(end - self.ts_start) offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)) width_py = RootFrame.RECT_HEIGHT dc = self.dc if top_color is not None: (r, g, b) = top_color top_color = wx.Colour(r, g, b) brush = wx.Brush(top_color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH) width_py -= RootFrame.EVENT_MARKING_WIDTH offset_py += RootFrame.EVENT_MARKING_WIDTH (r ,g, b) = color color = wx.Colour(r, g, b) brush = wx.Brush(color, wx.SOLID) dc.SetBrush(brush) dc.DrawRectangle(offset_px, offset_py, width_px, width_py) def update_rectangles(self, dc, start, end): start += self.ts_start end += self.ts_start self.sched_tracer.fill_zone(start, end) def on_paint(self, event): dc = wx.PaintDC(self.scroll_panel) self.dc = dc width = min(self.width_virtual, self.screen_width) (x, y) = self.scroll_start() start = self.px_to_us(x) end = self.px_to_us(x + width) self.update_rectangles(dc, start, end) def rect_from_ypixel(self, y): y -= RootFrame.Y_OFFSET rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE) if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT: return -1 return rect def update_summary(self, txt): if self.txt: self.txt.Destroy() self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50)) def on_mouse_down(self, event): (x, y) = event.GetPositionTuple() rect = self.rect_from_ypixel(y) if rect == -1: return t = self.px_to_us(x) + self.ts_start self.sched_tracer.mouse_down(rect, t) def update_width_virtual(self): self.width_virtual = self.us_to_px(self.ts_end - self.ts_start) def __zoom(self, x): self.update_width_virtual() (xpos, ypos) = self.scroll.GetViewStart() xpos = self.us_to_px(x) / self.scroll_scale self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos) self.Refresh() def zoom_in(self): x = self.scroll_start_us() self.zoom *= 2 self.__zoom(x) def zoom_out(self): x = self.scroll_start_us() self.zoom /= 2 self.__zoom(x) def on_key_press(self, event): key = event.GetRawKeyCode() if key == ord("+"): self.zoom_in() return if key == ord("-"): self.zoom_out() return key = event.GetKeyCode() (x, y) = self.scroll.GetViewStart() if key == wx.WXK_RIGHT: self.scroll.Scroll(x + 1, y) elif key == wx.WXK_LEFT: self.scroll.Scroll(x - 1, y) elif key == wx.WXK_DOWN: self.scroll.Scroll(x, y + 1) elif key == wx.WXK_UP: self.scroll.Scroll(x, y - 1)
gpl-2.0
dariemp/odoo
addons/stock_dropshipping/__openerp__.py
260
2037
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2014 OpenERP S.A. (<http://www.openerp.com>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Drop Shipping', 'version': '1.0', 'category': 'Warehouse Management', 'summary': 'Drop Shipping', 'description': """ Manage drop shipping orders =========================== This module adds a pre-configured Drop Shipping picking type as well as a procurement route that allow configuring Drop Shipping products and orders. When drop shipping is used the goods are directly transferred from suppliers to customers (direct delivery) without going through the retailer's warehouse. In this case no internal transfer document is needed. """, 'author': 'OpenERP SA', 'website': 'https://www.odoo.com/page/warehouse', 'depends': ['purchase', 'sale_stock'], 'data': ['stock_dropshipping.xml'], 'test': [ 'test/cancellation_propagated.yml', 'test/crossdock.yml', 'test/dropship.yml', 'test/procurementexception.yml', 'test/lifo_price.yml' ], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
SRJ9/django-driver27
driver27/management/commands/update_results.py
1
1194
from django.core.management.base import BaseCommand, CommandError from driver27.models import Result import csv class Command(BaseCommand): help = 'Update results via csv' @staticmethod def _create_result(row): invalid_keywords = [] result_fields = [f.column for f in Result._meta.get_fields() if hasattr(f, 'column')] for x in row: if x in ['qualifying', 'finish', 'points', 'race_id', 'seat_id']: if row[x] == '': row[x] = None else: row[x] = int(row[x]) elif x in ['wildcard', 'fastest_lap', 'retired']: row[x] = bool(row[x]) elif x not in result_fields: invalid_keywords.append(x) for invalid_keyword in invalid_keywords: del row[invalid_keyword] return Result.objects.create(**row) def add_arguments(self, parser): parser.add_argument('csv',) def handle(self, *args, **options): with open(options['csv'], 'rb') as csvfile: reader = csv.DictReader(csvfile, delimiter=',') for row in reader: self._create_result(row)
mit
ahmetabdi/SickRage
autoProcessTV/lib/requests/packages/chardet/charsetgroupprober.py
2929
3791
######################## BEGIN LICENSE BLOCK ######################## # The Original Code is Mozilla Communicator client code. # # The Initial Developer of the Original Code is # Netscape Communications Corporation. # Portions created by the Initial Developer are Copyright (C) 1998 # the Initial Developer. All Rights Reserved. # # Contributor(s): # Mark Pilgrim - port to Python # # This library is free software; you can redistribute it and/or # modify it under the terms of the GNU Lesser General Public # License as published by the Free Software Foundation; either # version 2.1 of the License, or (at your option) any later version. # # This library is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # Lesser General Public License for more details. # # You should have received a copy of the GNU Lesser General Public # License along with this library; if not, write to the Free Software # Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA # 02110-1301 USA ######################### END LICENSE BLOCK ######################### from . import constants import sys from .charsetprober import CharSetProber class CharSetGroupProber(CharSetProber): def __init__(self): CharSetProber.__init__(self) self._mActiveNum = 0 self._mProbers = [] self._mBestGuessProber = None def reset(self): CharSetProber.reset(self) self._mActiveNum = 0 for prober in self._mProbers: if prober: prober.reset() prober.active = True self._mActiveNum += 1 self._mBestGuessProber = None def get_charset_name(self): if not self._mBestGuessProber: self.get_confidence() if not self._mBestGuessProber: return None # self._mBestGuessProber = self._mProbers[0] return self._mBestGuessProber.get_charset_name() def feed(self, aBuf): for prober in self._mProbers: if not prober: continue if not prober.active: continue st = prober.feed(aBuf) if not st: continue if st == constants.eFoundIt: self._mBestGuessProber = prober return self.get_state() elif st == constants.eNotMe: prober.active = False self._mActiveNum -= 1 if self._mActiveNum <= 0: self._mState = constants.eNotMe return self.get_state() return self.get_state() def get_confidence(self): st = self.get_state() if st == constants.eFoundIt: return 0.99 elif st == constants.eNotMe: return 0.01 bestConf = 0.0 self._mBestGuessProber = None for prober in self._mProbers: if not prober: continue if not prober.active: if constants._debug: sys.stderr.write(prober.get_charset_name() + ' not active\n') continue cf = prober.get_confidence() if constants._debug: sys.stderr.write('%s confidence = %s\n' % (prober.get_charset_name(), cf)) if bestConf < cf: bestConf = cf self._mBestGuessProber = prober if not self._mBestGuessProber: return 0.0 return bestConf # else: # self._mBestGuessProber = self._mProbers[0] # return self._mBestGuessProber.get_confidence()
gpl-3.0
JAOSP/aosp_platform_external_chromium_org
chromeos/ime/gen_input_methods.py
51
3330
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. """Generate a C++ header from ibus_input_methods.txt. This program generates a C++ header file containing the information on available input methods. It parses input_methods.txt, and then generates a static array definition from the information extracted. The input and output file names are specified on the command line. Run it like: gen_input_methods.py input_methods.txt input_methods.h It will produce output that looks like: // This file is automatically generated by gen_input_methods.py #ifndef CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ #define CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ namespace chromeos { namespace input_method { struct InputMethodsInfo { const char* input_method_id; const char* language_code; const char* xkb_keyboard_id; }; const InputMethodsInfo kInputMethods[] = { {"mozc-chewing", "zh-TW", "us"}, {"xkb:us::eng", "en-US", "us"}, {"xkb:us:dvorak:eng", "en-US", "us(dvorak)"}, {"xkb:be::fra", "fr", "be"}, {"xkb:br::por", "pt-BR", "br"}, }; } // namespace input_method } // namespace chromeos #endif // CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ """ import fileinput import re import sys OUTPUT_HEADER = """// Automatically generated by gen_input_methods.py #ifndef CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ #define CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ namespace chromeos { namespace input_method { struct InputMethodsInfo { const char* input_method_id; const char* language_code; const char* xkb_layout_id; }; const InputMethodsInfo kInputMethods[] = { """ CPP_FORMAT = '#if %s\n' ENGINE_FORMAT = (' {"%(input_method_id)s", "%(language_code)s", ' + '"%(xkb_layout_id)s"},\n') OUTPUT_FOOTER = """ }; } // namespace input_method } // namespace chromeos #endif // CHROME_BROWSER_CHROMEOS_INPUT_METHOD_INPUT_METHODS_H_ """ def CreateEngineHeader(engines): """Create the header file from a list of engines. Arguments: engines: list of engine objects Returns: The text of a C++ header file containing the engine data. """ output = [] output.append(OUTPUT_HEADER) for engine in engines: if engine.has_key('if'): output.append(CPP_FORMAT % engine['if']) output.append(ENGINE_FORMAT % engine) if engine.has_key('if'): output.append('#endif\n') output.append(OUTPUT_FOOTER) return "".join(output) def main(argv): if len(argv) != 3: print 'Usage: gen_input_methods.py [whitelist] [output]' sys.exit(1) engines = [] for line in fileinput.input(sys.argv[1]): line = line.strip() if not line or re.match(r'#', line): continue columns = line.split() assert len(columns) == 3 or len(columns) == 4, "Invalid format: " + line engine = {} engine['input_method_id'] = columns[0] engine['xkb_layout_id'] = columns[1] engine['language_code'] = columns[2] if len(columns) == 4: engine['if'] = columns[3] engines.append(engine) output = CreateEngineHeader(engines) output_file = open(sys.argv[2], 'w') output_file.write(output) if __name__ == '__main__': main(sys.argv)
bsd-3-clause
hickford/youtube-dl
youtube_dl/extractor/pornhd.py
106
2422
from __future__ import unicode_literals import re import json from .common import InfoExtractor from ..utils import ( int_or_none, js_to_json, qualities, ) class PornHdIE(InfoExtractor): _VALID_URL = r'http://(?:www\.)?pornhd\.com/(?:[a-z]{2,4}/)?videos/(?P<id>\d+)(?:/(?P<display_id>.+))?' _TEST = { 'url': 'http://www.pornhd.com/videos/1962/sierra-day-gets-his-cum-all-over-herself-hd-porn-video', 'md5': '956b8ca569f7f4d8ec563e2c41598441', 'info_dict': { 'id': '1962', 'display_id': 'sierra-day-gets-his-cum-all-over-herself-hd-porn-video', 'ext': 'mp4', 'title': 'Sierra loves doing laundry', 'description': 'md5:8ff0523848ac2b8f9b065ba781ccf294', 'thumbnail': 're:^https?://.*\.jpg', 'view_count': int, 'age_limit': 18, } } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') display_id = mobj.group('display_id') webpage = self._download_webpage(url, display_id or video_id) title = self._html_search_regex( r'<title>(.+) porn HD.+?</title>', webpage, 'title') description = self._html_search_regex( r'<div class="description">([^<]+)</div>', webpage, 'description', fatal=False) view_count = int_or_none(self._html_search_regex( r'(\d+) views\s*</span>', webpage, 'view count', fatal=False)) thumbnail = self._search_regex( r"'poster'\s*:\s*'([^']+)'", webpage, 'thumbnail', fatal=False) quality = qualities(['sd', 'hd']) sources = json.loads(js_to_json(self._search_regex( r"(?s)'sources'\s*:\s*(\{.+?\})\s*\}[;,)]", webpage, 'sources'))) formats = [] for qname, video_url in sources.items(): if not video_url: continue formats.append({ 'url': video_url, 'format_id': qname, 'quality': quality(qname), }) self._sort_formats(formats) return { 'id': video_id, 'display_id': display_id, 'title': title, 'description': description, 'thumbnail': thumbnail, 'view_count': view_count, 'formats': formats, 'age_limit': 18, }
unlicense
yufengg/tensorflow
tensorflow/contrib/learn/python/learn/export_strategy.py
76
3566
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """ExportStrategy class represents different flavors of model export.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import collections from tensorflow.python.util import tf_inspect __all__ = ['ExportStrategy'] class ExportStrategy( collections.namedtuple('ExportStrategy', ['name', 'export_fn'])): """A class representing a type of model export. Typically constructed by a utility function specific to the exporter, such as `saved_model_export_utils.make_export_strategy()`. The fields are: name: The directory name under the export base directory where exports of this type will be written. export_fn: A function that writes an export, given an estimator, a destination path, and optionally a checkpoint path and an evaluation result for that checkpoint. This export_fn() may be run repeatedly during continuous training, or just once at the end of fixed-length training. Note the export_fn() may choose whether or not to export based on the eval result or based on an internal timer or any other criterion, if exports are not desired for every checkpoint. The signature of this function must be one of: * `(estimator, export_path) -> export_path` * `(estimator, export_path, checkpoint_path) -> export_path` * `(estimator, export_path, checkpoint_path, eval_result) -> export_path` """ def export(self, estimator, export_path, checkpoint_path=None, eval_result=None): """Exports the given Estimator to a specific format. Args: estimator: the Estimator to export. export_path: A string containing a directory where to write the export. checkpoint_path: The checkpoint path to export. If None (the default), the strategy may locate a checkpoint (e.g. the most recent) by itself. eval_result: The output of Estimator.evaluate on this checkpoint. This should be set only if checkpoint_path is provided (otherwise it is unclear which checkpoint this eval refers to). Returns: The string path to the exported directory. Raises: ValueError: if the export_fn does not have the required signature """ # don't break existing export_fns that don't accept checkpoint_path and # eval_result export_fn_args = tf_inspect.getargspec(self.export_fn).args kwargs = {} if 'checkpoint_path' in export_fn_args: kwargs['checkpoint_path'] = checkpoint_path if 'eval_result' in export_fn_args: if 'checkpoint_path' not in export_fn_args: raise ValueError('An export_fn accepting eval_result must also accept ' 'checkpoint_path.') kwargs['eval_result'] = eval_result return self.export_fn(estimator, export_path, **kwargs)
apache-2.0
mravikumar281/staging-server
schoolnew/django-simple-captcha-master/captcha/conf/settings.py
2
3061
import os from django.conf import settings CAPTCHA_FONT_PATH = getattr(settings, 'CAPTCHA_FONT_PATH', os.path.normpath(os.path.join(os.path.dirname(__file__), '..', 'fonts/Vera.ttf'))) CAPTCHA_FONT_SIZE = getattr(settings, 'CAPTCHA_FONT_SIZE', 22) CAPTCHA_LETTER_ROTATION = getattr(settings, 'CAPTCHA_LETTER_ROTATION', (-35, 35)) CAPTCHA_BACKGROUND_COLOR = getattr(settings, 'CAPTCHA_BACKGROUND_COLOR', '#ffffff') CAPTCHA_FOREGROUND_COLOR = getattr(settings, 'CAPTCHA_FOREGROUND_COLOR', '#001100') CAPTCHA_CHALLENGE_FUNCT = getattr(settings, 'CAPTCHA_CHALLENGE_FUNCT', 'captcha.helpers.random_char_challenge') CAPTCHA_NOISE_FUNCTIONS = getattr(settings, 'CAPTCHA_NOISE_FUNCTIONS', ('captcha.helpers.noise_arcs', 'captcha.helpers.noise_dots',)) CAPTCHA_FILTER_FUNCTIONS = getattr(settings, 'CAPTCHA_FILTER_FUNCTIONS', ('captcha.helpers.post_smooth',)) CAPTCHA_WORDS_DICTIONARY = getattr(settings, 'CAPTCHA_WORDS_DICTIONARY', '/usr/share/dict/words') CAPTCHA_PUNCTUATION = getattr(settings, 'CAPTCHA_PUNCTUATION', '''_"',.;:-''') CAPTCHA_FLITE_PATH = getattr(settings, 'CAPTCHA_FLITE_PATH', None) CAPTCHA_TIMEOUT = getattr(settings, 'CAPTCHA_TIMEOUT', 5) # Minutes CAPTCHA_LENGTH = int(getattr(settings, 'CAPTCHA_LENGTH', 4)) # Chars # CAPTCHA_IMAGE_BEFORE_FIELD = getattr(settings, 'CAPTCHA_IMAGE_BEFORE_FIELD', True) CAPTCHA_DICTIONARY_MIN_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MIN_LENGTH', 0) CAPTCHA_DICTIONARY_MAX_LENGTH = getattr(settings, 'CAPTCHA_DICTIONARY_MAX_LENGTH', 99) CAPTCHA_IMAGE_SIZE = getattr(settings, 'CAPTCHA_IMAGE_SIZE', None) CAPTCHA_IMAGE_TEMPLATE = getattr(settings, 'CAPTCHA_IMAGE_TEMPLATE', 'captcha/image.html') CAPTCHA_HIDDEN_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_HIDDEN_FIELD_TEMPLATE', 'captcha/hidden_field.html') CAPTCHA_TEXT_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_TEXT_FIELD_TEMPLATE', 'captcha/text_field.html') CAPTCHA_FIELD_TEMPLATE = getattr(settings, 'CAPTCHA_FIELD_TEMPLATE', 'captcha/field.html') CAPTCHA_OUTPUT_FORMAT = getattr(settings, 'CAPTCHA_OUTPUT_FORMAT', None) CAPTCHA_MATH_CHALLENGE_OPERATOR = getattr(settings, 'CAPTCHA_MATH_CHALLENGE_OPERATOR', '*') CAPTCHA_TEST_MODE = getattr(settings, 'CAPTCHA_TEST_MODE', getattr(settings, 'CATPCHA_TEST_MODE', False)) # Failsafe if CAPTCHA_DICTIONARY_MIN_LENGTH > CAPTCHA_DICTIONARY_MAX_LENGTH: CAPTCHA_DICTIONARY_MIN_LENGTH, CAPTCHA_DICTIONARY_MAX_LENGTH = CAPTCHA_DICTIONARY_MAX_LENGTH, CAPTCHA_DICTIONARY_MIN_LENGTH def _callable_from_string(string_or_callable): if callable(string_or_callable): return string_or_callable else: return getattr(__import__('.'.join(string_or_callable.split('.')[:-1]), {}, {}, ['']), string_or_callable.split('.')[-1]) def get_challenge(): return _callable_from_string(CAPTCHA_CHALLENGE_FUNCT) def noise_functions(): if CAPTCHA_NOISE_FUNCTIONS: return map(_callable_from_string, CAPTCHA_NOISE_FUNCTIONS) return [] def filter_functions(): if CAPTCHA_FILTER_FUNCTIONS: return map(_callable_from_string, CAPTCHA_FILTER_FUNCTIONS) return []
mit
kdeloach/model-my-watershed
src/mmw/apps/bigcz/views.py
1
1538
# -*- coding: utf-8 -*- from __future__ import print_function from __future__ import unicode_literals from __future__ import division import dateutil.parser from rest_framework import decorators from rest_framework.exceptions import ValidationError, ParseError from rest_framework.permissions import AllowAny from rest_framework.response import Response from apps.bigcz.clients import SEARCH_FUNCTIONS from apps.bigcz.serializers import ResourceListSerializer def parse_date(value): if not value: return None return dateutil.parser.parse(value) def _do_search(request): catalog = request.query_params.get('catalog') if not catalog: raise ValidationError({ 'error': 'Required argument: catalog'}) search_kwargs = { 'query': request.query_params.get('query'), 'to_date': parse_date(request.query_params.get('to_date')), 'from_date': parse_date(request.query_params.get('from_date')), 'bbox': request.query_params.get('bbox'), } search = SEARCH_FUNCTIONS.get(catalog) if search: try: return search(**search_kwargs) except ValueError as ex: raise ParseError(ex.message) raise ValidationError({ 'error': 'Catalog must be one of: {}' .format(', '.join(SEARCH_FUNCTIONS.keys())) }) @decorators.api_view(['GET']) @decorators.permission_classes((AllowAny,)) def search(request): result = ResourceListSerializer(_do_search(request)) return Response(result.data)
apache-2.0
sogelink/ansible
lib/ansible/modules/packaging/os/apt_key.py
22
12347
#!/usr/bin/python # -*- coding: utf-8 -*- # Copyright: (c) 2012, Michael DeHaan <michael.dehaan@gmail.com> # Copyright: (c) 2012, Jayson Vantuyl <jayson@aggressive.ly> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'core'} DOCUMENTATION = ''' --- module: apt_key author: - Jayson Vantuyl (@jvantuyl) version_added: "1.0" short_description: Add or remove an apt key description: - Add or remove an I(apt) key, optionally downloading it. notes: - doesn't download the key unless it really needs it - as a sanity check, downloaded key id must match the one specified - best practice is to specify the key id and the url options: id: description: - The identifier of the key. - Including this allows check mode to correctly report the changed state. - If specifying a subkey's id be aware that apt-key does not understand how to remove keys via a subkey id. Specify the primary key's id instead. - This parameter is required when C(state) is set to C(absent). data: description: - The keyfile contents to add to the keyring. file: description: - The path to a keyfile on the remote server to add to the keyring. keyring: description: -The path to specific keyring file in /etc/apt/trusted.gpg.d/ version_added: "1.3" url: description: - The URL to retrieve key from. keyserver: description: - The keyserver to retrieve key from. version_added: "1.6" state: description: - Ensures that the key is present (added) or absent (revoked). choices: [ absent, present ] default: present validate_certs: description: - If C(no), SSL certificates for the target url will not be validated. This should only be used on personally controlled sites using self-signed certificates. type: bool default: 'yes' ''' EXAMPLES = ''' - name: Add an apt key by id from a keyserver apt_key: keyserver: keyserver.ubuntu.com id: 36A1D7869245C8950F966E92D8576A8BA88D21E9 - name: Add an Apt signing key, uses whichever key is at the URL apt_key: url: https://ftp-master.debian.org/keys/archive-key-6.0.asc state: present - name: Add an Apt signing key, will not download if present apt_key: id: 473041FA url: https://ftp-master.debian.org/keys/archive-key-6.0.asc state: present - name: Remove a Apt specific signing key, leading 0x is valid apt_key: id: 0x473041FA state: absent # Use armored file since utf-8 string is expected. Must be of "PGP PUBLIC KEY BLOCK" type. - name: Add a key from a file on the Ansible server. apt_key: data: "{{ lookup('file', 'apt.asc') }}" state: present - name: Add an Apt signing key to a specific keyring file apt_key: id: 473041FA url: https://ftp-master.debian.org/keys/archive-key-6.0.asc keyring: /etc/apt/trusted.gpg.d/debian.gpg - name: Add Apt signing key on remote server to keyring apt_key: id: 473041FA file: /tmp/apt.gpg state: present ''' # FIXME: standardize into module_common from traceback import format_exc from ansible.module_utils.basic import AnsibleModule from ansible.module_utils._text import to_native from ansible.module_utils.urls import fetch_url apt_key_bin = None def find_needed_binaries(module): global apt_key_bin apt_key_bin = module.get_bin_path('apt-key', required=True) ### FIXME: Is there a reason that gpg and grep are checked? Is it just # cruft or does the apt .deb package not require them (and if they're not # installed, /usr/bin/apt-key fails?) module.get_bin_path('gpg', required=True) module.get_bin_path('grep', required=True) def parse_key_id(key_id): """validate the key_id and break it into segments :arg key_id: The key_id as supplied by the user. A valid key_id will be 8, 16, or more hexadecimal chars with an optional leading ``0x``. :returns: The portion of key_id suitable for apt-key del, the portion suitable for comparisons with --list-public-keys, and the portion that can be used with --recv-key. If key_id is long enough, these will be the last 8 characters of key_id, the last 16 characters, and all of key_id. If key_id is not long enough, some of the values will be the same. * apt-key del <= 1.10 has a bug with key_id != 8 chars * apt-key adv --list-public-keys prints 16 chars * apt-key adv --recv-key can take more chars """ # Make sure the key_id is valid hexadecimal int(key_id, 16) key_id = key_id.upper() if key_id.startswith('0X'): key_id = key_id[2:] key_id_len = len(key_id) if (key_id_len != 8 and key_id_len != 16) and key_id_len <= 16: raise ValueError('key_id must be 8, 16, or 16+ hexadecimal characters in length') short_key_id = key_id[-8:] fingerprint = key_id if key_id_len > 16: fingerprint = key_id[-16:] return short_key_id, fingerprint, key_id def all_keys(module, keyring, short_format): if keyring: cmd = "%s --keyring %s adv --list-public-keys --keyid-format=long" % (apt_key_bin, keyring) else: cmd = "%s adv --list-public-keys --keyid-format=long" % apt_key_bin (rc, out, err) = module.run_command(cmd) results = [] lines = to_native(out).split('\n') for line in lines: if (line.startswith("pub") or line.startswith("sub")) and not "expired" in line: tokens = line.split() code = tokens[1] (len_type, real_code) = code.split("/") results.append(real_code) if short_format: results = shorten_key_ids(results) return results def shorten_key_ids(key_id_list): """ Takes a list of key ids, and converts them to the 'short' format, by reducing them to their last 8 characters. """ short = [] for key in key_id_list: short.append(key[-8:]) return short def download_key(module, url): # FIXME: move get_url code to common, allow for in-memory D/L, support proxies # and reuse here if url is None: module.fail_json(msg="needed a URL but was not specified") try: rsp, info = fetch_url(module, url) if info['status'] != 200: module.fail_json(msg="Failed to download key at %s: %s" % (url, info['msg'])) return rsp.read() except Exception: module.fail_json(msg="error getting key id from url: %s" % url, traceback=format_exc()) def import_key(module, keyring, keyserver, key_id): if keyring: cmd = "%s --keyring %s adv --keyserver %s --recv %s" % (apt_key_bin, keyring, keyserver, key_id) else: cmd = "%s adv --keyserver %s --recv %s" % (apt_key_bin, keyserver, key_id) for retry in range(5): lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C') (rc, out, err) = module.run_command(cmd, environ_update=lang_env) if rc == 0: break else: # Out of retries if rc == 2 and 'not found on keyserver' in out: msg = 'Key %s not found on keyserver %s' % (key_id, keyserver) module.fail_json(cmd=cmd, msg=msg) else: msg = "Error fetching key %s from keyserver: %s" % (key_id, keyserver) module.fail_json(cmd=cmd, msg=msg, rc=rc, stdout=out, stderr=err) return True def add_key(module, keyfile, keyring, data=None): if data is not None: if keyring: cmd = "%s --keyring %s add -" % (apt_key_bin, keyring) else: cmd = "%s add -" % apt_key_bin (rc, out, err) = module.run_command(cmd, data=data, check_rc=True, binary_data=True) else: if keyring: cmd = "%s --keyring %s add %s" % (apt_key_bin, keyring, keyfile) else: cmd = "%s add %s" % (apt_key_bin, keyfile) (rc, out, err) = module.run_command(cmd, check_rc=True) return True def remove_key(module, key_id, keyring): # FIXME: use module.run_command, fail at point of error and don't discard useful stdin/stdout if keyring: cmd = '%s --keyring %s del %s' % (apt_key_bin, keyring, key_id) else: cmd = '%s del %s' % (apt_key_bin, key_id) (rc, out, err) = module.run_command(cmd, check_rc=True) return True def main(): module = AnsibleModule( argument_spec=dict( id=dict(required=False, default=None), url=dict(required=False), data=dict(required=False), file=dict(required=False, type='path'), key=dict(required=False), keyring=dict(required=False, type='path'), validate_certs=dict(default='yes', type='bool'), keyserver=dict(required=False), state=dict(required=False, choices=['present', 'absent'], default='present') ), supports_check_mode=True, mutually_exclusive=(('filename', 'keyserver', 'data', 'url'),), ) key_id = module.params['id'] url = module.params['url'] data = module.params['data'] filename = module.params['file'] keyring = module.params['keyring'] state = module.params['state'] keyserver = module.params['keyserver'] changed = False fingerprint = short_key_id = key_id short_format = False if key_id: try: short_key_id, fingerprint, key_id = parse_key_id(key_id) except ValueError: module.fail_json(msg='Invalid key_id', id=key_id) if len(fingerprint) == 8: short_format = True find_needed_binaries(module) keys = all_keys(module, keyring, short_format) return_values = {} if state == 'present': if fingerprint and fingerprint in keys: module.exit_json(changed=False) elif fingerprint and fingerprint not in keys and module.check_mode: ### TODO: Someday we could go further -- write keys out to # a temporary file and then extract the key id from there via gpg # to decide if the key is installed or not. module.exit_json(changed=True) else: if not filename and not data and not keyserver: data = download_key(module, url) if filename: add_key(module, filename, keyring) elif keyserver: import_key(module, keyring, keyserver, key_id) else: add_key(module, "-", keyring, data) changed = False keys2 = all_keys(module, keyring, short_format) if len(keys) != len(keys2): changed=True if fingerprint and fingerprint not in keys2: module.fail_json(msg="key does not seem to have been added", id=key_id) module.exit_json(changed=changed) elif state == 'absent': if not key_id: module.fail_json(msg="key is required") if fingerprint in keys: if module.check_mode: module.exit_json(changed=True) # we use the "short" id: key_id[-8:], short_format=True # it's a workaround for https://bugs.launchpad.net/ubuntu/+source/apt/+bug/1481871 if remove_key(module, short_key_id, keyring): keys = all_keys(module, keyring, short_format) if fingerprint in keys: module.fail_json(msg="apt-key del did not return an error but the key was not removed (check that the id is correct and *not* a subkey)", id=key_id) changed = True else: # FIXME: module.fail_json or exit-json immediately at point of failure module.fail_json(msg="error removing key_id", **return_values) module.exit_json(changed=changed, **return_values) if __name__ == '__main__': main()
gpl-3.0
mlperf/inference_results_v0.7
closed/Cisco/code/rnnt/tensorrt/preprocessing/parts/text/numbers.py
12
3598
# Copyright (c) 2017 Keith Ito # Copyright (c) 2020, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ from https://github.com/keithito/tacotron Modifed to add support for time and slight tweaks to _expand_number """ import inflect import re _inflect = inflect.engine() _comma_number_re = re.compile(r'([0-9][0-9\,]+[0-9])') _decimal_number_re = re.compile(r'([0-9]+\.[0-9]+)') _pounds_re = re.compile(r'£([0-9\,]*[0-9]+)') _dollars_re = re.compile(r'\$([0-9\.\,]*[0-9]+)') _ordinal_re = re.compile(r'[0-9]+(st|nd|rd|th)') _number_re = re.compile(r'[0-9]+') _time_re = re.compile(r'([0-9]{1,2}):([0-9]{2})') def _remove_commas(m): return m.group(1).replace(',', '') def _expand_decimal_point(m): return m.group(1).replace('.', ' point ') def _expand_dollars(m): match = m.group(1) parts = match.split('.') if len(parts) > 2: return match + ' dollars' # Unexpected format dollars = int(parts[0]) if parts[0] else 0 cents = int(parts[1]) if len(parts) > 1 and parts[1] else 0 if dollars and cents: dollar_unit = 'dollar' if dollars == 1 else 'dollars' cent_unit = 'cent' if cents == 1 else 'cents' return '%s %s, %s %s' % (dollars, dollar_unit, cents, cent_unit) elif dollars: dollar_unit = 'dollar' if dollars == 1 else 'dollars' return '%s %s' % (dollars, dollar_unit) elif cents: cent_unit = 'cent' if cents == 1 else 'cents' return '%s %s' % (cents, cent_unit) else: return 'zero dollars' def _expand_ordinal(m): return _inflect.number_to_words(m.group(0)) def _expand_number(m): if int(m.group(0)[0]) == 0: return _inflect.number_to_words(m.group(0), andword='', group=1) num = int(m.group(0)) if num > 1000 and num < 3000: if num == 2000: return 'two thousand' elif num > 2000 and num < 2010: return 'two thousand ' + _inflect.number_to_words(num % 100) elif num % 100 == 0: return _inflect.number_to_words(num // 100) + ' hundred' else: return _inflect.number_to_words(num, andword='', zero='oh', group=2).replace(', ', ' ') # Add check for number phones and other large numbers elif num > 1000000000 and num % 10000 != 0: return _inflect.number_to_words(num, andword='', group=1) else: return _inflect.number_to_words(num, andword='') def _expand_time(m): mins = int(m.group(2)) if mins == 0: return _inflect.number_to_words(m.group(1)) return " ".join([_inflect.number_to_words(m.group(1)), _inflect.number_to_words(m.group(2))]) def normalize_numbers(text): text = re.sub(_comma_number_re, _remove_commas, text) text = re.sub(_pounds_re, r'\1 pounds', text) text = re.sub(_dollars_re, _expand_dollars, text) text = re.sub(_decimal_number_re, _expand_decimal_point, text) text = re.sub(_ordinal_re, _expand_ordinal, text) text = re.sub(_number_re, _expand_number, text) text = re.sub(_time_re, _expand_time, text) return text
apache-2.0
kevinkrenz/py-jeopardy
main.py
1
20444
# -*- coding: utf-8 -*- # Form implementation generated from reading ui file 'main.ui' # # Created: Mon Nov 29 22:17:37 2010 # by: PyQt4 UI code generator 4.7.4 # # WARNING! All changes made in this file will be lost! from PyQt4 import QtCore, QtGui class Ui_Jeopardy(object): def setupUi(self, Jeopardy): Jeopardy.setObjectName("Jeopardy") Jeopardy.resize(1095, 640) self.cat1level1 = QtGui.QPushButton(Jeopardy) self.cat1level1.setGeometry(QtCore.QRect(10, 40, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat1level1.setFont(font) self.cat1level1.setObjectName("cat1level1") self.cat2level1 = QtGui.QPushButton(Jeopardy) self.cat2level1.setGeometry(QtCore.QRect(170, 40, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat2level1.setFont(font) self.cat2level1.setObjectName("cat2level1") self.cat3level1 = QtGui.QPushButton(Jeopardy) self.cat3level1.setGeometry(QtCore.QRect(330, 40, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat3level1.setFont(font) self.cat3level1.setObjectName("cat3level1") self.cat4level1 = QtGui.QPushButton(Jeopardy) self.cat4level1.setGeometry(QtCore.QRect(490, 40, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat4level1.setFont(font) self.cat4level1.setObjectName("cat4level1") self.cat1level2 = QtGui.QPushButton(Jeopardy) self.cat1level2.setGeometry(QtCore.QRect(10, 160, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat1level2.setFont(font) self.cat1level2.setObjectName("cat1level2") self.cat3level2 = QtGui.QPushButton(Jeopardy) self.cat3level2.setGeometry(QtCore.QRect(330, 160, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat3level2.setFont(font) self.cat3level2.setObjectName("cat3level2") self.cat2level2 = QtGui.QPushButton(Jeopardy) self.cat2level2.setGeometry(QtCore.QRect(170, 160, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat2level2.setFont(font) self.cat2level2.setObjectName("cat2level2") self.cat4level2 = QtGui.QPushButton(Jeopardy) self.cat4level2.setGeometry(QtCore.QRect(490, 160, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat4level2.setFont(font) self.cat4level2.setObjectName("cat4level2") self.cat2level4 = QtGui.QPushButton(Jeopardy) self.cat2level4.setGeometry(QtCore.QRect(170, 400, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat2level4.setFont(font) self.cat2level4.setObjectName("cat2level4") self.cat4level4 = QtGui.QPushButton(Jeopardy) self.cat4level4.setGeometry(QtCore.QRect(490, 400, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat4level4.setFont(font) self.cat4level4.setObjectName("cat4level4") self.cat1level3 = QtGui.QPushButton(Jeopardy) self.cat1level3.setGeometry(QtCore.QRect(10, 280, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat1level3.setFont(font) self.cat1level3.setObjectName("cat1level3") self.cat3level3 = QtGui.QPushButton(Jeopardy) self.cat3level3.setGeometry(QtCore.QRect(330, 280, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat3level3.setFont(font) self.cat3level3.setObjectName("cat3level3") self.cat2level3 = QtGui.QPushButton(Jeopardy) self.cat2level3.setGeometry(QtCore.QRect(170, 280, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat2level3.setFont(font) self.cat2level3.setObjectName("cat2level3") self.cat1level4 = QtGui.QPushButton(Jeopardy) self.cat1level4.setGeometry(QtCore.QRect(10, 400, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat1level4.setFont(font) self.cat1level4.setObjectName("cat1level4") self.cat4level3 = QtGui.QPushButton(Jeopardy) self.cat4level3.setGeometry(QtCore.QRect(490, 280, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat4level3.setFont(font) self.cat4level3.setObjectName("cat4level3") self.cat3level4 = QtGui.QPushButton(Jeopardy) self.cat3level4.setGeometry(QtCore.QRect(330, 400, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat3level4.setFont(font) self.cat3level4.setObjectName("cat3level4") self.cat1 = QtGui.QLabel(Jeopardy) self.cat1.setGeometry(QtCore.QRect(20, 0, 121, 41)) font = QtGui.QFont() font.setPointSize(18) self.cat1.setFont(font) self.cat1.setObjectName("cat1") self.cat2 = QtGui.QLabel(Jeopardy) self.cat2.setGeometry(QtCore.QRect(180, 0, 121, 41)) font = QtGui.QFont() font.setPointSize(18) self.cat2.setFont(font) self.cat2.setObjectName("cat2") self.cat3 = QtGui.QLabel(Jeopardy) self.cat3.setGeometry(QtCore.QRect(330, 0, 121, 41)) font = QtGui.QFont() font.setPointSize(18) self.cat3.setFont(font) self.cat3.setObjectName("cat3") self.cat4 = QtGui.QLabel(Jeopardy) self.cat4.setGeometry(QtCore.QRect(490, 0, 121, 41)) font = QtGui.QFont() font.setPointSize(18) self.cat4.setFont(font) self.cat4.setObjectName("cat4") self.cat3level5 = QtGui.QPushButton(Jeopardy) self.cat3level5.setGeometry(QtCore.QRect(330, 520, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat3level5.setFont(font) self.cat3level5.setObjectName("cat3level5") self.cat1level5 = QtGui.QPushButton(Jeopardy) self.cat1level5.setGeometry(QtCore.QRect(10, 520, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat1level5.setFont(font) self.cat1level5.setObjectName("cat1level5") self.cat4level5 = QtGui.QPushButton(Jeopardy) self.cat4level5.setGeometry(QtCore.QRect(490, 520, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat4level5.setFont(font) self.cat4level5.setObjectName("cat4level5") self.cat2level5 = QtGui.QPushButton(Jeopardy) self.cat2level5.setGeometry(QtCore.QRect(170, 520, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat2level5.setFont(font) self.cat2level5.setObjectName("cat2level5") self.cat5level2 = QtGui.QPushButton(Jeopardy) self.cat5level2.setGeometry(QtCore.QRect(650, 160, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat5level2.setFont(font) self.cat5level2.setObjectName("cat5level2") self.cat5level3 = QtGui.QPushButton(Jeopardy) self.cat5level3.setGeometry(QtCore.QRect(650, 280, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat5level3.setFont(font) self.cat5level3.setObjectName("cat5level3") self.cat5level1 = QtGui.QPushButton(Jeopardy) self.cat5level1.setGeometry(QtCore.QRect(650, 40, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat5level1.setFont(font) self.cat5level1.setObjectName("cat5level1") self.cat5 = QtGui.QLabel(Jeopardy) self.cat5.setGeometry(QtCore.QRect(650, 0, 121, 41)) font = QtGui.QFont() font.setPointSize(18) self.cat5.setFont(font) self.cat5.setObjectName("cat5") self.cat5level4 = QtGui.QPushButton(Jeopardy) self.cat5level4.setGeometry(QtCore.QRect(650, 400, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat5level4.setFont(font) self.cat5level4.setObjectName("cat5level4") self.cat5level5 = QtGui.QPushButton(Jeopardy) self.cat5level5.setGeometry(QtCore.QRect(650, 520, 141, 111)) font = QtGui.QFont() font.setPointSize(28) self.cat5level5.setFont(font) self.cat5level5.setObjectName("cat5level5") self.answer = QtGui.QLabel(Jeopardy) self.answer.setEnabled(False) self.answer.setGeometry(QtCore.QRect(160, 110, 501, 341)) font = QtGui.QFont() font.setPointSize(36) self.answer.setFont(font) self.answer.setWordWrap(True) self.answer.setObjectName("answer") self.label = QtGui.QLabel(Jeopardy) self.label.setGeometry(QtCore.QRect(860, 70, 111, 51)) font = QtGui.QFont() font.setPointSize(20) self.label.setFont(font) self.label.setObjectName("label") self.line = QtGui.QFrame(Jeopardy) self.line.setGeometry(QtCore.QRect(810, 40, 20, 581)) self.line.setFrameShape(QtGui.QFrame.VLine) self.line.setFrameShadow(QtGui.QFrame.Sunken) self.line.setObjectName("line") self.right = QtGui.QPushButton(Jeopardy) self.right.setGeometry(QtCore.QRect(950, 460, 98, 27)) self.right.setObjectName("right") self.wrong = QtGui.QPushButton(Jeopardy) self.wrong.setGeometry(QtCore.QRect(950, 500, 98, 27)) self.wrong.setObjectName("wrong") self.selectTeam = QtGui.QListWidget(Jeopardy) self.selectTeam.setGeometry(QtCore.QRect(870, 450, 61, 121)) self.selectTeam.setObjectName("selectTeam") QtGui.QListWidgetItem(self.selectTeam) QtGui.QListWidgetItem(self.selectTeam) QtGui.QListWidgetItem(self.selectTeam) QtGui.QListWidgetItem(self.selectTeam) QtGui.QListWidgetItem(self.selectTeam) QtGui.QListWidgetItem(self.selectTeam) self.label_2 = QtGui.QLabel(Jeopardy) self.label_2.setGeometry(QtCore.QRect(860, 130, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_2.setFont(font) self.label_2.setObjectName("label_2") self.label_3 = QtGui.QLabel(Jeopardy) self.label_3.setGeometry(QtCore.QRect(860, 170, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_3.setFont(font) self.label_3.setObjectName("label_3") self.label_4 = QtGui.QLabel(Jeopardy) self.label_4.setGeometry(QtCore.QRect(860, 210, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_4.setFont(font) self.label_4.setObjectName("label_4") self.team1 = QtGui.QLabel(Jeopardy) self.team1.setGeometry(QtCore.QRect(970, 130, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team1.setFont(font) self.team1.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team1.setObjectName("team1") self.team2 = QtGui.QLabel(Jeopardy) self.team2.setGeometry(QtCore.QRect(970, 170, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team2.setFont(font) self.team2.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team2.setObjectName("team2") self.team3 = QtGui.QLabel(Jeopardy) self.team3.setGeometry(QtCore.QRect(970, 210, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team3.setFont(font) self.team3.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team3.setObjectName("team3") self.team4 = QtGui.QLabel(Jeopardy) self.team4.setGeometry(QtCore.QRect(970, 250, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team4.setFont(font) self.team4.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team4.setObjectName("team4") self.label_5 = QtGui.QLabel(Jeopardy) self.label_5.setGeometry(QtCore.QRect(860, 330, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_5.setFont(font) self.label_5.setObjectName("label_5") self.team5 = QtGui.QLabel(Jeopardy) self.team5.setGeometry(QtCore.QRect(970, 290, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team5.setFont(font) self.team5.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team5.setObjectName("team5") self.team6 = QtGui.QLabel(Jeopardy) self.team6.setGeometry(QtCore.QRect(970, 330, 101, 31)) font = QtGui.QFont() font.setPointSize(20) self.team6.setFont(font) self.team6.setAlignment(QtCore.Qt.AlignRight|QtCore.Qt.AlignTrailing|QtCore.Qt.AlignVCenter) self.team6.setObjectName("team6") self.label_6 = QtGui.QLabel(Jeopardy) self.label_6.setGeometry(QtCore.QRect(860, 250, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_6.setFont(font) self.label_6.setObjectName("label_6") self.label_7 = QtGui.QLabel(Jeopardy) self.label_7.setGeometry(QtCore.QRect(860, 290, 121, 31)) font = QtGui.QFont() font.setPointSize(20) self.label_7.setFont(font) self.label_7.setObjectName("label_7") self.skip = QtGui.QPushButton(Jeopardy) self.skip.setGeometry(QtCore.QRect(950, 540, 98, 27)) self.skip.setObjectName("skip") self.retranslateUi(Jeopardy) QtCore.QMetaObject.connectSlotsByName(Jeopardy) def retranslateUi(self, Jeopardy): Jeopardy.setWindowTitle(QtGui.QApplication.translate("Jeopardy", "Jeopardy", None, QtGui.QApplication.UnicodeUTF8)) self.cat1level1.setText(QtGui.QApplication.translate("Jeopardy", "100", None, QtGui.QApplication.UnicodeUTF8)) self.cat2level1.setText(QtGui.QApplication.translate("Jeopardy", "100", None, QtGui.QApplication.UnicodeUTF8)) self.cat3level1.setText(QtGui.QApplication.translate("Jeopardy", "100", None, QtGui.QApplication.UnicodeUTF8)) self.cat4level1.setText(QtGui.QApplication.translate("Jeopardy", "100", None, QtGui.QApplication.UnicodeUTF8)) self.cat1level2.setText(QtGui.QApplication.translate("Jeopardy", "200", None, QtGui.QApplication.UnicodeUTF8)) self.cat3level2.setText(QtGui.QApplication.translate("Jeopardy", "200", None, QtGui.QApplication.UnicodeUTF8)) self.cat2level2.setText(QtGui.QApplication.translate("Jeopardy", "200", None, QtGui.QApplication.UnicodeUTF8)) self.cat4level2.setText(QtGui.QApplication.translate("Jeopardy", "200", None, QtGui.QApplication.UnicodeUTF8)) self.cat2level4.setText(QtGui.QApplication.translate("Jeopardy", "400", None, QtGui.QApplication.UnicodeUTF8)) self.cat4level4.setText(QtGui.QApplication.translate("Jeopardy", "400", None, QtGui.QApplication.UnicodeUTF8)) self.cat1level3.setText(QtGui.QApplication.translate("Jeopardy", "300", None, QtGui.QApplication.UnicodeUTF8)) self.cat3level3.setText(QtGui.QApplication.translate("Jeopardy", "300", None, QtGui.QApplication.UnicodeUTF8)) self.cat2level3.setText(QtGui.QApplication.translate("Jeopardy", "300", None, QtGui.QApplication.UnicodeUTF8)) self.cat1level4.setText(QtGui.QApplication.translate("Jeopardy", "400", None, QtGui.QApplication.UnicodeUTF8)) self.cat4level3.setText(QtGui.QApplication.translate("Jeopardy", "300", None, QtGui.QApplication.UnicodeUTF8)) self.cat3level4.setText(QtGui.QApplication.translate("Jeopardy", "400", None, QtGui.QApplication.UnicodeUTF8)) self.cat1.setText(QtGui.QApplication.translate("Jeopardy", "Category 1", None, QtGui.QApplication.UnicodeUTF8)) self.cat2.setText(QtGui.QApplication.translate("Jeopardy", "Category 2", None, QtGui.QApplication.UnicodeUTF8)) self.cat3.setText(QtGui.QApplication.translate("Jeopardy", "Category 3", None, QtGui.QApplication.UnicodeUTF8)) self.cat4.setText(QtGui.QApplication.translate("Jeopardy", "Category 4", None, QtGui.QApplication.UnicodeUTF8)) self.cat3level5.setText(QtGui.QApplication.translate("Jeopardy", "500", None, QtGui.QApplication.UnicodeUTF8)) self.cat1level5.setText(QtGui.QApplication.translate("Jeopardy", "500", None, QtGui.QApplication.UnicodeUTF8)) self.cat4level5.setText(QtGui.QApplication.translate("Jeopardy", "500", None, QtGui.QApplication.UnicodeUTF8)) self.cat2level5.setText(QtGui.QApplication.translate("Jeopardy", "500", None, QtGui.QApplication.UnicodeUTF8)) self.cat5level2.setText(QtGui.QApplication.translate("Jeopardy", "200", None, QtGui.QApplication.UnicodeUTF8)) self.cat5level3.setText(QtGui.QApplication.translate("Jeopardy", "300", None, QtGui.QApplication.UnicodeUTF8)) self.cat5level1.setText(QtGui.QApplication.translate("Jeopardy", "100", None, QtGui.QApplication.UnicodeUTF8)) self.cat5.setText(QtGui.QApplication.translate("Jeopardy", "Category 5", None, QtGui.QApplication.UnicodeUTF8)) self.cat5level4.setText(QtGui.QApplication.translate("Jeopardy", "400", None, QtGui.QApplication.UnicodeUTF8)) self.cat5level5.setText(QtGui.QApplication.translate("Jeopardy", "500", None, QtGui.QApplication.UnicodeUTF8)) self.answer.setText(QtGui.QApplication.translate("Jeopardy", "Answer", None, QtGui.QApplication.UnicodeUTF8)) self.label.setText(QtGui.QApplication.translate("Jeopardy", "Scores", None, QtGui.QApplication.UnicodeUTF8)) self.right.setText(QtGui.QApplication.translate("Jeopardy", "Right", None, QtGui.QApplication.UnicodeUTF8)) self.wrong.setText(QtGui.QApplication.translate("Jeopardy", "Wrong", None, QtGui.QApplication.UnicodeUTF8)) __sortingEnabled = self.selectTeam.isSortingEnabled() self.selectTeam.setSortingEnabled(False) self.selectTeam.item(0).setText(QtGui.QApplication.translate("Jeopardy", "Team 1", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.item(1).setText(QtGui.QApplication.translate("Jeopardy", "Team 2", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.item(2).setText(QtGui.QApplication.translate("Jeopardy", "Team 3", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.item(3).setText(QtGui.QApplication.translate("Jeopardy", "Team 4", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.item(4).setText(QtGui.QApplication.translate("Jeopardy", "Team 5", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.item(5).setText(QtGui.QApplication.translate("Jeopardy", "Team 6", None, QtGui.QApplication.UnicodeUTF8)) self.selectTeam.setSortingEnabled(__sortingEnabled) self.label_2.setText(QtGui.QApplication.translate("Jeopardy", "Team 1:", None, QtGui.QApplication.UnicodeUTF8)) self.label_3.setText(QtGui.QApplication.translate("Jeopardy", "Team 2:", None, QtGui.QApplication.UnicodeUTF8)) self.label_4.setText(QtGui.QApplication.translate("Jeopardy", "Team 3:", None, QtGui.QApplication.UnicodeUTF8)) self.team1.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.team2.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.team3.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.team4.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.label_5.setText(QtGui.QApplication.translate("Jeopardy", "Team 6:", None, QtGui.QApplication.UnicodeUTF8)) self.team5.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.team6.setText(QtGui.QApplication.translate("Jeopardy", "0", None, QtGui.QApplication.UnicodeUTF8)) self.label_6.setText(QtGui.QApplication.translate("Jeopardy", "Team 4:", None, QtGui.QApplication.UnicodeUTF8)) self.label_7.setText(QtGui.QApplication.translate("Jeopardy", "Team 5:", None, QtGui.QApplication.UnicodeUTF8)) self.skip.setText(QtGui.QApplication.translate("Jeopardy", "Skip", None, QtGui.QApplication.UnicodeUTF8))
mit
PawarPawan/h2o-v3
h2o-py/tests/testdir_misc/pyunit_levels_nlevels_setlevel_setLevels.py
3
4202
import sys sys.path.insert(1, "../../") import h2o def levels_nlevels_setlevel_setLevels_test(ip,port): iris = h2o.import_file(path=h2o.locate("smalldata/iris/iris.csv")) # frame (default) levels = iris.levels() nlevels = iris.nlevels() # frame (w/ index) levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) # vec iris[4] = iris[4].setLevel(level='Iris-setosa') levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) assert iris[0,4] == 'Iris-setosa' levels = iris[4].levels() nlevels = iris[4].nlevels() assert set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) iris[4] = iris[4].setLevel(level='Iris-versicolor') levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['Iris-setosa', 'Iris-versicolor', 'Iris-virginica']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) assert iris[0,4] == 'Iris-versicolor' levels = iris[1].levels() nlevels = iris[1].nlevels() assert levels == None, "Expected levels to be None, but got {0}".format(levels) assert nlevels == 0, "Expected nlevels to be 0, but got {0}".format(nlevels) ################### reimport, set new domains, rerun tests ################################### iris = h2o.import_file(path=h2o.locate("smalldata/iris/iris.csv")) iris[4] = iris[4].setLevels(levels=["a", "b", "c"]) # frame (default) levels = iris.levels() nlevels = iris.nlevels() # frame (w/ index) levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['a', 'b', 'c']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['a', 'b', 'c']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) # vec iris[4] = iris[4].setLevel(level='a') levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['a', 'b', 'c']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['a', 'b', 'c']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) assert iris[0,4] == 'a' levels = iris[4].levels() nlevels = iris[4].nlevels() assert set(['a', 'b', 'c']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['a', 'b', 'c']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) iris[4] = iris[4].setLevel(level='b') levels = iris.levels(col=4) nlevels = iris.nlevels(col=4) assert set(['a', 'b', 'c']) == set(levels), \ "Expected levels to be {0}, but got {1}".format(set(['a', 'b', 'c']),levels) assert nlevels == 3, "Expected nlevels to be 3, but got {0}".format(nlevels) assert iris[0,4] == 'b' levels = iris[1].levels() nlevels = iris[1].nlevels() assert levels == None, "Expected levels to be None, but got {0}".format(levels) assert nlevels == 0, "Expected nlevels to be 0, but got {0}".format(nlevels) one_column_frame = iris[4] one_column_frame = one_column_frame.setLevel(level='c') assert one_column_frame[0,0] == 'c' if __name__ == "__main__": h2o.run_test(sys.argv, levels_nlevels_setlevel_setLevels_test)
apache-2.0
adazey/Muzez
libs/youtube_dl/extractor/xstream.py
102
3981
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import ( int_or_none, parse_iso8601, xpath_with_ns, xpath_text, find_xpath_attr, ) class XstreamIE(InfoExtractor): _VALID_URL = r'''(?x) (?: xstream:| https?://frontend\.xstream\.(?:dk|net)/ ) (?P<partner_id>[^/]+) (?: :| /feed/video/\?.*?\bid= ) (?P<id>\d+) ''' _TESTS = [{ 'url': 'http://frontend.xstream.dk/btno/feed/video/?platform=web&id=86588', 'md5': 'd7d17e3337dc80de6d3a540aefbe441b', 'info_dict': { 'id': '86588', 'ext': 'mov', 'title': 'Otto Wollertsen', 'description': 'Vestlendingen Otto Fredrik Wollertsen', 'timestamp': 1430473209, 'upload_date': '20150501', }, }, { 'url': 'http://frontend.xstream.dk/ap/feed/video/?platform=web&id=21039', 'only_matching': True, }] def _extract_video_info(self, partner_id, video_id): data = self._download_xml( 'http://frontend.xstream.dk/%s/feed/video/?platform=web&id=%s' % (partner_id, video_id), video_id) NS_MAP = { 'atom': 'http://www.w3.org/2005/Atom', 'xt': 'http://xstream.dk/', 'media': 'http://search.yahoo.com/mrss/', } entry = data.find(xpath_with_ns('./atom:entry', NS_MAP)) title = xpath_text( entry, xpath_with_ns('./atom:title', NS_MAP), 'title') description = xpath_text( entry, xpath_with_ns('./atom:summary', NS_MAP), 'description') timestamp = parse_iso8601(xpath_text( entry, xpath_with_ns('./atom:published', NS_MAP), 'upload date')) formats = [] media_group = entry.find(xpath_with_ns('./media:group', NS_MAP)) for media_content in media_group.findall(xpath_with_ns('./media:content', NS_MAP)): media_url = media_content.get('url') if not media_url: continue tbr = int_or_none(media_content.get('bitrate')) mobj = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>[^/]+))/(?P<playpath>.+)$', media_url) if mobj: formats.append({ 'url': mobj.group('url'), 'play_path': 'mp4:%s' % mobj.group('playpath'), 'app': mobj.group('app'), 'ext': 'flv', 'tbr': tbr, 'format_id': 'rtmp-%d' % tbr, }) else: formats.append({ 'url': media_url, 'tbr': tbr, }) self._sort_formats(formats) link = find_xpath_attr( entry, xpath_with_ns('./atom:link', NS_MAP), 'rel', 'original') if link is not None: formats.append({ 'url': link.get('href'), 'format_id': link.get('rel'), 'preference': 1, }) thumbnails = [{ 'url': splash.get('url'), 'width': int_or_none(splash.get('width')), 'height': int_or_none(splash.get('height')), } for splash in media_group.findall(xpath_with_ns('./xt:splash', NS_MAP))] return { 'id': video_id, 'title': title, 'description': description, 'timestamp': timestamp, 'formats': formats, 'thumbnails': thumbnails, } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) partner_id = mobj.group('partner_id') video_id = mobj.group('id') return self._extract_video_info(partner_id, video_id)
gpl-3.0
guillaume-philippon/aquilon
lib/aquilon/aqdb/model/host_environment.py
2
2215
# -*- cpy-indent-level: 4; indent-tabs-mode: nil -*- # ex: set expandtab softtabstop=4 shiftwidth=4: # # Copyright (C) 2008,2009,2010,2011,2012,2013,2014 Contributor # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from datetime import datetime from sqlalchemy.orm import deferred from sqlalchemy import Column, Integer, DateTime, Sequence, String, event from aquilon.aqdb.model import Base, SingleInstanceMixin _TN = 'host_environment' class HostEnvironment(SingleInstanceMixin, Base): """ Describes the state a host is within the provisioning lifecycle """ __tablename__ = _TN _class_label = 'Host Environment' id = Column(Integer, Sequence('%s_id_seq' % _TN), primary_key=True) name = Column(String(16), nullable=False, unique=True) creation_date = deferred(Column(DateTime, default=datetime.now, nullable=False)) __table_args__ = ({'info': {'unique_fields': ['name']}},) __mapper_args__ = {'polymorphic_on': name} def __repr__(self): return str(self.name) host_env = HostEnvironment.__table__ # pylint: disable=C0103 event.listen(host_env, "after_create", HostEnvironment.populate_const_table) class Development(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'dev'} class UAT(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'uat'} class QA(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'qa'} class Legacy(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'legacy'} class Production(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'prod'} class Infra(HostEnvironment): __mapper_args__ = {'polymorphic_identity': 'infra'}
apache-2.0
tersmitten/ansible
lib/ansible/modules/cloud/ovirt/ovirt_tag.py
66
8130
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright (c) 2016 Red Hat, Inc. # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: ovirt_tag short_description: Module to manage tags in oVirt/RHV version_added: "2.3" author: "Ondra Machacek (@machacekondra)" description: - "This module manage tags in oVirt/RHV. It can also manage assignments of those tags to entities." options: id: description: - "ID of the tag to manage." version_added: "2.8" name: description: - "Name of the tag to manage." required: true state: description: - "Should the tag be present/absent/attached/detached." - "C(Note): I(attached) and I(detached) states are supported since version 2.4." choices: ['present', 'absent', 'attached', 'detached'] default: present description: description: - "Description of the tag to manage." parent: description: - "Name of the parent tag." vms: description: - "List of the VMs names, which should have assigned this tag." hosts: description: - "List of the hosts names, which should have assigned this tag." extends_documentation_fragment: ovirt ''' EXAMPLES = ''' # Examples don't contain auth parameter for simplicity, # look at ovirt_auth module to see how to reuse authentication: # Create(if not exists) and assign tag to vms vm1 and vm2: - ovirt_tag: name: mytag vms: - vm1 - vm2 # Attach a tag to VM 'vm3', keeping the rest already attached tags on VM: - ovirt_tag: name: mytag state: attached vms: - vm3 # Detach a tag from VM 'vm3', keeping the rest already attached tags on VM: - ovirt_tag: name: mytag state: detached vms: - vm3 # To detach all VMs from tag: - ovirt_tag: name: mytag vms: [] # Remove tag - ovirt_tag: state: absent name: mytag # Change Tag Name - ovirt_tag: id: 00000000-0000-0000-0000-000000000000 name: "new_tag_name" ''' RETURN = ''' id: description: ID of the tag which is managed returned: On success if tag is found. type: str sample: 7de90f31-222c-436c-a1ca-7e655bd5b60c tag: description: "Dictionary of all the tag attributes. Tag attributes can be found on your oVirt/RHV instance at following url: http://ovirt.github.io/ovirt-engine-api-model/master/#types/tag." returned: On success if tag is found. type: dict ''' import traceback try: import ovirtsdk4.types as otypes except ImportError: pass from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.ovirt import ( BaseModule, check_sdk, create_connection, equal, get_id_by_name, ovirt_full_argument_spec, ) class TagsModule(BaseModule): def build_entity(self): return otypes.Tag( id=self._module.params['id'], name=self._module.params['name'], description=self._module.params['description'], parent=otypes.Tag( name=self._module.params['parent'], ) if self._module.params['parent'] else None, ) def post_create(self, entity): self.update_check(entity) def _update_tag_assignments(self, entity, name): if self._module.params[name] is None: return state = self.param('state') entities_service = getattr(self._connection.system_service(), '%s_service' % name)() current_vms = [ vm.name for vm in entities_service.list(search='tag=%s' % self._module.params['name']) ] # Assign tags: if state in ['present', 'attached', 'detached']: for entity_name in self._module.params[name]: entity_id = get_id_by_name(entities_service, entity_name) tags_service = entities_service.service(entity_id).tags_service() current_tags = [tag.name for tag in tags_service.list()] # Assign the tag: if state in ['attached', 'present']: if self._module.params['name'] not in current_tags: if not self._module.check_mode: tags_service.add( tag=otypes.Tag( name=self._module.params['name'], ), ) self.changed = True # Detach the tag: elif state == 'detached': if self._module.params['name'] in current_tags: tag_id = get_id_by_name(tags_service, self.param('name')) if not self._module.check_mode: tags_service.tag_service(tag_id).remove() self.changed = True # Unassign tags: if state == 'present': for entity_name in [e for e in current_vms if e not in self._module.params[name]]: if not self._module.check_mode: entity_id = get_id_by_name(entities_service, entity_name) tags_service = entities_service.service(entity_id).tags_service() tag_id = get_id_by_name(tags_service, self.param('name')) tags_service.tag_service(tag_id).remove() self.changed = True def _get_parent(self, entity): parent = None if entity.parent: parent = self._connection.follow_link(entity.parent).name return parent def update_check(self, entity): self._update_tag_assignments(entity, 'vms') self._update_tag_assignments(entity, 'hosts') return ( equal(self._module.params.get('description'), entity.description) and equal(self._module.params.get('name'), entity.name) and equal(self._module.params.get('parent'), self._get_parent(entity)) ) def main(): argument_spec = ovirt_full_argument_spec( state=dict( choices=['present', 'absent', 'attached', 'detached'], default='present', ), id=dict(default=None), name=dict(required=True), description=dict(default=None), parent=dict(default=None), vms=dict(default=None, type='list'), hosts=dict(default=None, type='list'), ) module = AnsibleModule( argument_spec=argument_spec, supports_check_mode=True, ) check_sdk(module) try: auth = module.params.pop('auth') connection = create_connection(auth) tags_service = connection.system_service().tags_service() tags_module = TagsModule( connection=connection, module=module, service=tags_service, ) state = module.params['state'] if state in ['present', 'attached', 'detached']: ret = tags_module.create() elif state == 'absent': ret = tags_module.remove() module.exit_json(**ret) except Exception as e: module.fail_json(msg=str(e), exception=traceback.format_exc()) finally: connection.close(logout=auth.get('token') is None) if __name__ == "__main__": main()
gpl-3.0
pombredanne/invenio
modules/bibformat/lib/bibreformat.py
1
18572
## -*- mode: python; coding: utf-8; -*- ## ## This file is part of Invenio. ## Copyright (C) 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. """Call BibFormat engine and create HTML brief (and other) formats for bibliographic records. Upload formats via BibUpload.""" __revision__ = "$Id$" import sys try: from invenio.dbquery import run_sql from invenio.config import \ CFG_SITE_URL,\ CFG_TMPDIR,\ CFG_BINDIR from invenio.intbitset import intbitset from invenio.search_engine import perform_request_search, search_pattern from invenio.search_engine import print_record from invenio.bibformat import format_record from invenio.bibformat_config import CFG_BIBFORMAT_USE_OLD_BIBFORMAT from invenio.bibtask import task_init, write_message, task_set_option, \ task_get_option, task_update_progress, task_has_option, \ task_low_level_submission, task_sleep_now_if_required, \ task_get_task_param import os import time import zlib except ImportError, e: print "Error: %s" % e sys.exit(1) ### run the bibreformat task bibsched scheduled ### def bibreformat_task(fmt, sql, sql_queries, cds_query, process_format, process, recids): """ BibReformat main task """ t1 = os.times()[4] ### Query the database ### task_update_progress('Fetching records to process') if process_format: # '-without' parameter write_message("Querying database for records without cache...") without_format = without_fmt(sql) recIDs = recids if cds_query['field'] != "" or \ cds_query['collection'] != "" or \ cds_query['pattern'] != "": write_message("Querying database (CDS query)...") if cds_query['collection'] == "": # use search_pattern() whenever possible, as it can search # even in private collections res = search_pattern(p=cds_query['pattern'], f=cds_query['field'], m=cds_query['matching']) else: # use perform_request_search when '-c' argument has been # defined, as it is not supported by search_pattern() res = intbitset(perform_request_search(req=None, of='id', c=cds_query['collection'], p=cds_query['pattern'], f=cds_query['field'])) recIDs |= res for sql_query in sql_queries: write_message("Querying database (%s) ..." % sql_query, verbose=2) recIDs |= intbitset(run_sql(sql_query)) ### list of corresponding record IDs was retrieved ### now format the selected records if process_format: write_message("Records to be processed: %d" % (len(recIDs) \ + len(without_format))) write_message("Out of it records without existing cache: %d" % len(without_format)) else: write_message("Records to be processed: %d" % (len(recIDs))) ### Initialize main loop total_rec = 0 # Total number of records tbibformat = 0 # time taken up by external call tbibupload = 0 # time taken up by external call ### Iterate over all records prepared in lists I (option) if process: if CFG_BIBFORMAT_USE_OLD_BIBFORMAT: # FIXME: remove this # when migration from php to # python bibformat is done (total_rec_1, tbibformat_1, tbibupload_1) = iterate_over_old(recIDs, fmt) else: (total_rec_1, tbibformat_1, tbibupload_1) = iterate_over_new(recIDs, fmt) total_rec += total_rec_1 tbibformat += tbibformat_1 tbibupload += tbibupload_1 ### Iterate over all records prepared in list II (no_format) if process_format and process: if CFG_BIBFORMAT_USE_OLD_BIBFORMAT: # FIXME: remove this # when migration from php to # python bibformat is done (total_rec_2, tbibformat_2, tbibupload_2) = iterate_over_old(without_format, fmt) else: (total_rec_2, tbibformat_2, tbibupload_2) = iterate_over_new(without_format, fmt) total_rec += total_rec_2 tbibformat += tbibformat_2 tbibupload += tbibupload_2 ### Final statistics t2 = os.times()[4] elapsed = t2 - t1 message = "total records processed: %d" % total_rec write_message(message) message = "total processing time: %2f sec" % elapsed write_message(message) message = "Time spent on external call (os.system):" write_message(message) message = " bibformat: %2f sec" % tbibformat write_message(message) message = " bibupload: %2f sec" % tbibupload write_message(message) ### Identify recIDs of records with missing format ### def without_fmt(sql): "List of record IDs to be reformated, not having the specified format yet" rec_ids_with_cache = [] all_rec_ids = [] q1 = sql['q1'] q2 = sql['q2'] ## get complete recID list all_rec_ids = intbitset(run_sql(q1)) ## get complete recID list of formatted records rec_ids_with_cache = intbitset(run_sql(q2)) return all_rec_ids - rec_ids_with_cache ### Bibreformat all selected records (using new python bibformat) ### (see iterate_over_old further down) def iterate_over_new(list, fmt): "Iterate over list of IDs" global total_rec formatted_records = '' # (string-)List of formatted record of an iteration tbibformat = 0 # time taken up by external call tbibupload = 0 # time taken up by external call start_date = task_get_task_param('task_starting_time') # Time at which the record was formatted tot = len(list) count = 0 for recID in list: t1 = os.times()[4] start_date = time.strftime('%Y-%m-%d %H:%M:%S') formatted_record = zlib.compress(format_record(recID, fmt, on_the_fly=True)) if run_sql('SELECT id FROM bibfmt WHERE id_bibrec=%s AND format=%s', (recID, fmt)): run_sql('UPDATE bibfmt SET last_updated=%s, value=%s WHERE id_bibrec=%s AND format=%s', (start_date, formatted_record, recID, fmt)) else: run_sql('INSERT INTO bibfmt(id_bibrec, format, last_updated, value) VALUES(%s, %s, %s, %s)', (recID, fmt, start_date, formatted_record)) t2 = os.times()[4] tbibformat += (t2 - t1) count += 1 if (count % 100) == 0: write_message(" ... formatted %s records out of %s" % (count, tot)) task_update_progress('Formatted %s out of %s' % (count, tot)) task_sleep_now_if_required(can_stop_too=True) if (tot % 100) != 0: write_message(" ... formatted %s records out of %s" % (count, tot)) return (tot, tbibformat, tbibupload) def iterate_over_old(list, fmt): "Iterate over list of IDs" n_rec = 0 n_max = 10000 xml_content = '' # hold the contents tbibformat = 0 # time taken up by external call tbibupload = 0 # time taken up by external call total_rec = 0 # Number of formatted records for record in list: n_rec = n_rec + 1 total_rec = total_rec + 1 message = "Processing record: %d" % (record) write_message(message, verbose=9) query = "id=%d&of=xm" % (record) count = 0 contents = print_record(record, 'xm') while (contents == "") and (count < 10): contents = print_record(record, 'xm') count = count + 1 time.sleep(10) if count == 10: sys.stderr.write("Failed to download %s from %s after 10 attempts... terminating" % (query, CFG_SITE_URL)) sys.exit(0) xml_content = xml_content + contents if xml_content: if n_rec >= n_max: finalfilename = "%s/rec_fmt_%s.xml" % (CFG_TMPDIR, time.strftime('%Y%m%d_%H%M%S')) filename = "%s/bibreformat.xml" % CFG_TMPDIR filehandle = open(filename ,"w") filehandle.write(xml_content) filehandle.close() ### bibformat external call ### task_sleep_now_if_required(can_stop_too=True) t11 = os.times()[4] message = "START bibformat external call" write_message(message, verbose=9) command = "%s/bibformat otype='%s' < %s/bibreformat.xml > %s 2> %s/bibreformat.err" % (CFG_BINDIR, fmt.upper(), CFG_TMPDIR, finalfilename, CFG_TMPDIR) os.system(command) t22 = os.times()[4] message = "END bibformat external call (time elapsed:%2f)" % (t22-t11) write_message(message, verbose=9) task_sleep_now_if_required(can_stop_too=True) tbibformat = tbibformat + (t22 - t11) ### bibupload external call ### t11 = os.times()[4] message = "START bibupload external call" write_message(message, verbose=9) task_id = task_low_level_submission('bibupload', 'bibreformat', '-f', finalfilename) write_message("Task #%s submitted" % task_id) t22 = os.times()[4] message = "END bibupload external call (time elapsed:%2f)" % (t22-t11) write_message(message, verbose=9) tbibupload = tbibupload + (t22- t11) n_rec = 0 xml_content = '' ### Process the last re-formated chunk ### if n_rec > 0: write_message("Processing last record set (%d)" % n_rec, verbose=9) finalfilename = "%s/rec_fmt_%s.xml" % (CFG_TMPDIR, time.strftime('%Y%m%d_%H%M%S')) filename = "%s/bibreformat.xml" % CFG_TMPDIR filehandle = open(filename ,"w") filehandle.write(xml_content) filehandle.close() ### bibformat external call ### t11 = os.times()[4] message = "START bibformat external call" write_message(message, verbose=9) command = "%s/bibformat otype='%s' < %s/bibreformat.xml > %s 2> %s/bibreformat.err" % (CFG_BINDIR, fmt.upper(), CFG_TMPDIR, finalfilename, CFG_TMPDIR) os.system(command) t22 = os.times()[4] message = "END bibformat external call (time elapsed:%2f)" % (t22 - t11) write_message(message, verbose=9) tbibformat = tbibformat + (t22 - t11) ### bibupload external call ### t11 = os.times()[4] message = "START bibupload external call" write_message(message, verbose=9) task_id = task_low_level_submission('bibupload', 'bibreformat', '-f', finalfilename) write_message("Task #%s submitted" % task_id) t22 = os.times()[4] message = "END bibupload external call (time elapsed:%2f)" % (t22 - t11) write_message(message, verbose=9) tbibupload = tbibupload + (t22 - t11) return (total_rec, tbibformat, tbibupload) def task_run_core(): """Runs the task by fetching arguments from the BibSched task queue. This is what BibSched will be invoking via daemon call.""" ## initialize parameters fmt = task_get_option('format') sql = { "all" : "select br.id from bibrec as br, bibfmt as bf where bf.id_bibrec=br.id and bf.format ='%s'" % fmt, "last": "select br.id from bibrec as br, bibfmt as bf where bf.id_bibrec=br.id and bf.format='%s' and bf.last_updated < br.modification_date" % fmt, "q1" : "select br.id from bibrec as br", "q2" : "select br.id from bibrec as br, bibfmt as bf where bf.id_bibrec=br.id and bf.format ='%s'" % fmt } sql_queries = [] cds_query = {} if task_has_option("all"): sql_queries.append(sql['all']) if task_has_option("last"): sql_queries.append(sql['last']) if task_has_option("collection"): cds_query['collection'] = task_get_option('collection') else: cds_query['collection'] = "" if task_has_option("field"): cds_query['field'] = task_get_option('field') else: cds_query['field'] = "" if task_has_option("pattern"): cds_query['pattern'] = task_get_option('pattern') else: cds_query['pattern'] = "" if task_has_option("matching"): cds_query['matching'] = task_get_option('matching') else: cds_query['matching'] = "" recids = intbitset() if task_has_option("recids"): for recid in task_get_option('recids').split(','): if ":" in recid: start = int(recid.split(':')[0]) end = int(recid.split(':')[1]) recids += range(start, end) else: recids.add(int(recid)) ### sql commands to be executed during the script run ### bibreformat_task(fmt, sql, sql_queries, cds_query, task_has_option('without'), not task_has_option('noprocess'), recids) return True def main(): """Main that construct all the bibtask.""" task_init(authorization_action='runbibformat', authorization_msg="BibReformat Task Submission", description=""" BibReformat formats the records and saves the produced outputs for later retrieval. BibReformat is usually run periodically via BibSched in order to (1) format new records in the database and to (2) reformat records for which the meta data has been modified. BibReformat has to be run manually when (3) format config files have been modified, in order to see the changes in the web interface. Although it is not necessary to run BibReformat to display formatted records in the web interface, BibReformat allows to improve serving speed by precreating the outputs. It is suggested to run BibReformat for 'HB' output. Option -m cannot be used at the same time as option -c. Option -c prevents from finding records in private collections. Examples: bibreformat Format all new or modified records (in HB). bibreformat -o HD Format all new or modified records in HD. bibreformat -a Force reformatting all records (in HB). bibreformat -c 'Photos' Force reformatting all records in 'Photos' collection (in HB). bibreformat -c 'Photos' -o HD Force reformatting all records in 'Photos' collection in HD. bibreformat -i 15 Force reformatting record 15 (in HB). bibreformat -i 15:20 Force reformatting records 15 to 20 (in HB). bibreformat -i 15,16,17 Force reformatting records 15, 16 and 17 (in HB). bibreformat -n Show how many records are to be (re)formatted. bibreformat -n -c 'Articles' Show how many records are to be (re)formatted in 'Articles' collection. bibreformat -oHB -s1h Format all new and modified records every hour, in HB. """, help_specific_usage=""" -o, --format \t Specify output format (default HB) -n, --noprocess \t Count records to be formatted (no processing done) Reformatting options: -a, --all \t Force reformatting all records -c, --collection \t Force reformatting records by collection -f, --field \t Force reformatting records by field -p, --pattern \t Force reformatting records by pattern -i, --id \t Force reformatting records by record id(s) Pattern options: -m, --matching \t Specify if pattern is exact (e), regular expression (r), \t partial (p), any of the words (o) or all of the words (a) """, version=__revision__, specific_params=("ac:f:p:lo:nm:i:", ["all", "collection=", "matching=", "field=", "pattern=", "format=", "noprocess", "id="]), task_submit_check_options_fnc=task_submit_check_options, task_submit_elaborate_specific_parameter_fnc=task_submit_elaborate_specific_parameter, task_run_fnc=task_run_core) def task_submit_check_options(): """Last checks and updating on the options...""" if not (task_has_option('all') or task_has_option('collection') \ or task_has_option('field') or task_has_option('pattern') \ or task_has_option('matching') or task_has_option('recids')): task_set_option('without', 1) task_set_option('last', 1) return True def task_submit_elaborate_specific_parameter(key, value, opts, args): """Elaborate specific CLI parameters of BibReformat.""" if key in ("-a", "--all"): task_set_option("all", 1) task_set_option("without", 1) elif key in ("-c", "--collection"): task_set_option("collection", value) elif key in ("-n", "--noprocess"): task_set_option("noprocess", 1) elif key in ("-f", "--field"): task_set_option("field", value) elif key in ("-p","--pattern"): task_set_option("pattern", value) elif key in ("-m", "--matching"): task_set_option("matching", value) elif key in ("-o","--format"): task_set_option("format", value) elif key in ("-i","--id"): task_set_option("recids", value) else: return False return True ### okay, here we go: if __name__ == '__main__': main()
gpl-2.0
wong2/sentry
src/sentry/migrations/0107_expand_user.py
36
25693
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): db.alter_column('auth_user', 'username', self.gf('django.db.models.fields.CharField')(max_length=128, unique=True)) def backwards(self, orm): db.alter_column('auth_user', 'username', self.gf('django.db.models.fields.CharField')(max_length=30, unique=True)) models = { u'auth.group': { 'Meta': {'object_name': 'Group'}, u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}), 'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}) }, u'auth.permission': { 'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'}, 'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'content_type': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}), u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'}) }, 'sentry.user': { 'Meta': {'object_name': 'User', 'db_table': "'auth_user'"}, 'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}), 'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}), 'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}), 'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'}) }, u'contenttypes.contenttype': { 'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"}, 'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}), u'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}) }, u'sentry.accessgroup': { 'Meta': {'unique_together': "(('team', 'name'),)", 'object_name': 'AccessGroup'}, 'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'managed': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.User']", 'symmetrical': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'projects': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['sentry.Project']", 'symmetrical': 'False'}), 'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']"}), 'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}) }, u'sentry.activity': { 'Meta': {'object_name': 'Activity'}, 'data': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Event']", 'null': 'True'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'ident': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}), 'type': ('django.db.models.fields.PositiveIntegerField', [], {}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'}) }, u'sentry.alert': { 'Meta': {'object_name': 'Alert'}, 'data': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']", 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}), 'related_groups': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'related_alerts'", 'symmetrical': 'False', 'through': u"orm['sentry.AlertRelatedGroup']", 'to': u"orm['sentry.Group']"}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}) }, u'sentry.alertrelatedgroup': { 'Meta': {'unique_together': "(('group', 'alert'),)", 'object_name': 'AlertRelatedGroup'}, 'alert': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Alert']"}), 'data': ('django.db.models.fields.TextField', [], {'null': 'True'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}) }, u'sentry.event': { 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'Event', 'db_table': "'sentry_message'"}, 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'datetime': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True', 'db_column': "'message_id'"}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'blank': 'True', 'related_name': "'event_set'", 'null': 'True', 'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}), 'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'server_name': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}), 'site': ('django.db.models.fields.CharField', [], {'max_length': '128', 'null': 'True', 'db_index': 'True'}), 'time_spent': ('django.db.models.fields.IntegerField', [], {'null': 'True'}) }, u'sentry.eventmapping': { 'Meta': {'unique_together': "(('project', 'event_id'),)", 'object_name': 'EventMapping'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'event_id': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}) }, u'sentry.group': { 'Meta': {'unique_together': "(('project', 'checksum'),)", 'object_name': 'Group', 'db_table': "'sentry_groupedmessage'"}, 'active_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'checksum': ('django.db.models.fields.CharField', [], {'max_length': '32', 'db_index': 'True'}), 'culprit': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'db_column': "'view'", 'blank': 'True'}), 'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'is_public': ('django.db.models.fields.NullBooleanField', [], {'default': 'False', 'null': 'True', 'blank': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'db_index': 'True'}), 'level': ('django.db.models.fields.PositiveIntegerField', [], {'default': '40', 'db_index': 'True', 'blank': 'True'}), 'logger': ('django.db.models.fields.CharField', [], {'default': "'root'", 'max_length': '64', 'db_index': 'True', 'blank': 'True'}), 'message': ('django.db.models.fields.TextField', [], {}), 'num_comments': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True'}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'resolved_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}), 'score': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}) }, u'sentry.groupbookmark': { 'Meta': {'unique_together': "(('project', 'user', 'group'),)", 'object_name': 'GroupBookmark'}, 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'bookmark_set'", 'to': u"orm['sentry.Project']"}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_bookmark_set'", 'to': u"orm['sentry.User']"}) }, u'sentry.groupcountbyminute': { 'Meta': {'unique_together': "(('project', 'group', 'date'),)", 'object_name': 'GroupCountByMinute', 'db_table': "'sentry_messagecountbyminute'"}, 'date': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, u'sentry.groupmeta': { 'Meta': {'unique_together': "(('group', 'key'),)", 'object_name': 'GroupMeta'}, 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'value': ('django.db.models.fields.TextField', [], {}) }, u'sentry.groupseen': { 'Meta': {'unique_together': "(('user', 'group'),)", 'object_name': 'GroupSeen'}, 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'db_index': 'False'}) }, u'sentry.grouptag': { 'Meta': {'unique_together': "(('project', 'key', 'value', 'group'),)", 'object_name': 'GroupTag', 'db_table': "'sentry_messagefiltervalue'"}, 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, u'sentry.grouptagkey': { 'Meta': {'unique_together': "(('project', 'group', 'key'),)", 'object_name': 'GroupTagKey'}, 'group': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Group']"}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, u'sentry.lostpasswordhash': { 'Meta': {'object_name': 'LostPasswordHash'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'hash': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'unique': 'True'}) }, u'sentry.option': { 'Meta': {'object_name': 'Option'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '64'}), 'value': ('picklefield.fields.PickledObjectField', [], {}) }, u'sentry.pendingteammember': { 'Meta': {'unique_together': "(('team', 'email'),)", 'object_name': 'PendingTeamMember'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'email': ('django.db.models.fields.EmailField', [], {'max_length': '75'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'pending_member_set'", 'to': u"orm['sentry.Team']"}), 'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}) }, u'sentry.project': { 'Meta': {'unique_together': "(('team', 'slug'),)", 'object_name': 'Project'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '200'}), 'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_owned_project_set'", 'null': 'True', 'to': u"orm['sentry.User']"}), 'platform': ('django.db.models.fields.CharField', [], {'max_length': '32', 'null': 'True'}), 'public': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50', 'null': 'True'}), 'status': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}), 'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Team']", 'null': 'True'}) }, u'sentry.projectcountbyminute': { 'Meta': {'unique_together': "(('project', 'date'),)", 'object_name': 'ProjectCountByMinute'}, 'date': ('django.db.models.fields.DateTimeField', [], {}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'time_spent_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'time_spent_total': ('django.db.models.fields.IntegerField', [], {'default': '0'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, u'sentry.projectkey': { 'Meta': {'object_name': 'ProjectKey'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'key_set'", 'to': u"orm['sentry.Project']"}), 'public_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}), 'secret_key': ('django.db.models.fields.CharField', [], {'max_length': '32', 'unique': 'True', 'null': 'True'}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']", 'null': 'True'}), 'user_added': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'keys_added_set'", 'null': 'True', 'to': u"orm['sentry.User']"}) }, u'sentry.projectoption': { 'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'ProjectOption', 'db_table': "'sentry_projectoptions'"}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}), 'value': ('picklefield.fields.PickledObjectField', [], {}) }, u'sentry.tagkey': { 'Meta': {'unique_together': "(('project', 'key'),)", 'object_name': 'TagKey', 'db_table': "'sentry_filterkey'"}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'label': ('django.db.models.fields.CharField', [], {'max_length': '64', 'null': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']"}), 'values_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}) }, u'sentry.tagvalue': { 'Meta': {'unique_together': "(('project', 'key', 'value'),)", 'object_name': 'TagValue', 'db_table': "'sentry_filtervalue'"}, 'data': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'first_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '32'}), 'last_seen': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True', 'db_index': 'True'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'times_seen': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}), 'value': ('django.db.models.fields.CharField', [], {'max_length': '200'}) }, u'sentry.team': { 'Meta': {'object_name': 'Team'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'null': 'True'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'members': ('django.db.models.fields.related.ManyToManyField', [], {'related_name': "'team_memberships'", 'symmetrical': 'False', 'through': u"orm['sentry.TeamMember']", 'to': u"orm['sentry.User']"}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'owner': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '50'}) }, u'sentry.teammember': { 'Meta': {'unique_together': "(('team', 'user'),)", 'object_name': 'TeamMember'}, 'date_added': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}), 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'team': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'member_set'", 'to': u"orm['sentry.Team']"}), 'type': ('django.db.models.fields.IntegerField', [], {'default': '50'}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'related_name': "'sentry_teammember_set'", 'to': u"orm['sentry.User']"}) }, u'sentry.useroption': { 'Meta': {'unique_together': "(('user', 'project', 'key'),)", 'object_name': 'UserOption'}, 'id': ('sentry.db.models.fields.bounded.BoundedBigAutoField', [], {'primary_key': 'True'}), 'key': ('django.db.models.fields.CharField', [], {'max_length': '64'}), 'project': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.Project']", 'null': 'True'}), 'user': ('sentry.db.models.fields.FlexibleForeignKey', [], {'to': u"orm['sentry.User']"}), 'value': ('picklefield.fields.PickledObjectField', [], {}) } } complete_apps = ['sentry']
bsd-3-clause
Neuroglycerin/neukrill-net-work
generate_hlf_testcache_6aug.py
1
1993
#!/usr/bin/env python """ Generate a cache of all the ComputerVision highlevelfeatures to send to Pylearn2 """ from __future__ import division import neukrill_net.augment import neukrill_net.highlevelfeatures import neukrill_net.utils import copy import numpy as np from sklearn.externals import joblib # Define output path pkl_path1 = '/disk/data1/s1145806/cached_hlf_test6_data_raw.pkl' pkl_path2 = '/disk/data1/s1145806/cached_hlf_test6_raw.pkl' pkl_path3 = '/disk/data1/s1145806/cached_hlf_test6_data_ranged.pkl' pkl_path4 = '/disk/data1/s1145806/cached_hlf_test6_ranged.pkl' pkl_path5 = '/disk/data1/s1145806/cached_hlf_test6_data_posranged.pkl' pkl_path6 = '/disk/data1/s1145806/cached_hlf_test6_posranged.pkl' # Define which basic attributes to use attrlst = ['height','width','numpixels','sideratio','mean','std','stderr', 'propwhite','propnonwhite','propbool'] # Parse the data settings = neukrill_net.utils.Settings('settings.json') X,y = neukrill_net.utils.load_rawdata(settings.image_fnames) # Combine all the features we want to use hlf_list = [] hlf_list.append( neukrill_net.highlevelfeatures.BasicAttributes(attrlst) ) hlf_list.append( neukrill_net.highlevelfeatures.ContourMoments() ) hlf_list.append( neukrill_net.highlevelfeatures.ContourHistogram() ) hlf_list.append( neukrill_net.highlevelfeatures.ThresholdAdjacency() ) hlf_list.append( neukrill_net.highlevelfeatures.ZernikeMoments() ) hlf_list.append( neukrill_net.highlevelfeatures.Haralick() ) # hlf_list.append( neukrill_net.highlevelfeatures.CoocurProps() ) augs = {'units': 'uint8', 'rotate': 3, 'rotate_is_resizable': 1, 'flip': 1} aug_fun = neukrill_net.augment.augmentation_wrapper(**augs) hlf = neukrill_net.highlevelfeatures.MultiHighLevelFeature(hlf_list, augment_func=aug_fun) # Save the raw values of every feature X_raw = hlf.generate_cache(X) # Save the feature matrix to disk joblib.dump(X_raw, pkl_path1)
mit
75651/kbengine_cloud
kbe/src/lib/python/Lib/distutils/log.py
163
1908
"""A simple log mechanism styled after PEP 282.""" # The class here is styled after PEP 282 so that it could later be # replaced with a standard Python logging implementation. DEBUG = 1 INFO = 2 WARN = 3 ERROR = 4 FATAL = 5 import sys class Log: def __init__(self, threshold=WARN): self.threshold = threshold def _log(self, level, msg, args): if level not in (DEBUG, INFO, WARN, ERROR, FATAL): raise ValueError('%s wrong log level' % str(level)) if level >= self.threshold: if args: msg = msg % args if level in (WARN, ERROR, FATAL): stream = sys.stderr else: stream = sys.stdout if stream.errors == 'strict': # emulate backslashreplace error handler encoding = stream.encoding msg = msg.encode(encoding, "backslashreplace").decode(encoding) stream.write('%s\n' % msg) stream.flush() def log(self, level, msg, *args): self._log(level, msg, args) def debug(self, msg, *args): self._log(DEBUG, msg, args) def info(self, msg, *args): self._log(INFO, msg, args) def warn(self, msg, *args): self._log(WARN, msg, args) def error(self, msg, *args): self._log(ERROR, msg, args) def fatal(self, msg, *args): self._log(FATAL, msg, args) _global_log = Log() log = _global_log.log debug = _global_log.debug info = _global_log.info warn = _global_log.warn error = _global_log.error fatal = _global_log.fatal def set_threshold(level): # return the old threshold for use from tests old = _global_log.threshold _global_log.threshold = level return old def set_verbosity(v): if v <= 0: set_threshold(WARN) elif v == 1: set_threshold(INFO) elif v >= 2: set_threshold(DEBUG)
lgpl-3.0
sestrella/ansible
lib/ansible/modules/network/fortios/fortios_wireless_controller_utm_profile.py
13
12705
#!/usr/bin/python from __future__ import (absolute_import, division, print_function) # Copyright 2019 Fortinet, Inc. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <https://www.gnu.org/licenses/>. __metaclass__ = type ANSIBLE_METADATA = {'status': ['preview'], 'supported_by': 'community', 'metadata_version': '1.1'} DOCUMENTATION = ''' --- module: fortios_wireless_controller_utm_profile short_description: Configure UTM (Unified Threat Management) profile in Fortinet's FortiOS and FortiGate. description: - This module is able to configure a FortiGate or FortiOS (FOS) device by allowing the user to set and modify wireless_controller feature and utm_profile category. Examples include all parameters and values need to be adjusted to datasources before usage. Tested with FOS v6.0.5 version_added: "2.8" author: - Miguel Angel Munoz (@mamunozgonzalez) - Nicolas Thomas (@thomnico) notes: - Requires fortiosapi library developed by Fortinet - Run as a local_action in your playbook requirements: - fortiosapi>=0.9.8 options: host: description: - FortiOS or FortiGate IP address. type: str required: false username: description: - FortiOS or FortiGate username. type: str required: false password: description: - FortiOS or FortiGate password. type: str default: "" vdom: description: - Virtual domain, among those defined previously. A vdom is a virtual instance of the FortiGate that can be configured and used as a different unit. type: str default: root https: description: - Indicates if the requests towards FortiGate must use HTTPS protocol. type: bool default: true ssl_verify: description: - Ensures FortiGate certificate must be verified by a proper CA. type: bool default: true version_added: 2.9 state: description: - Indicates whether to create or remove the object. This attribute was present already in previous version in a deeper level. It has been moved out to this outer level. type: str required: false choices: - present - absent version_added: 2.9 wireless_controller_utm_profile: description: - Configure UTM (Unified Threat Management) profile. default: null type: dict suboptions: state: description: - B(Deprecated) - Starting with Ansible 2.9 we recommend using the top-level 'state' parameter. - HORIZONTALLINE - Indicates whether to create or remove the object. type: str required: false choices: - present - absent antivirus_profile: description: - AntiVirus profile name. Source antivirus.profile.name. type: str application_list: description: - Application control list name. Source application.list.name. type: str comment: description: - Comment. type: str ips_sensor: description: - IPS sensor name. Source ips.sensor.name. type: str name: description: - UTM profile name. required: true type: str scan_botnet_connections: description: - Block or monitor connections to Botnet servers or disable Botnet scanning. type: str choices: - disable - monitor - block utm_log: description: - Enable/disable UTM logging. type: str choices: - enable - disable webfilter_profile: description: - WebFilter profile name. Source webfilter.profile.name. type: str ''' EXAMPLES = ''' - hosts: localhost vars: host: "192.168.122.40" username: "admin" password: "" vdom: "root" ssl_verify: "False" tasks: - name: Configure UTM (Unified Threat Management) profile. fortios_wireless_controller_utm_profile: host: "{{ host }}" username: "{{ username }}" password: "{{ password }}" vdom: "{{ vdom }}" https: "False" state: "present" wireless_controller_utm_profile: antivirus_profile: "<your_own_value> (source antivirus.profile.name)" application_list: "<your_own_value> (source application.list.name)" comment: "Comment." ips_sensor: "<your_own_value> (source ips.sensor.name)" name: "default_name_7" scan_botnet_connections: "disable" utm_log: "enable" webfilter_profile: "<your_own_value> (source webfilter.profile.name)" ''' RETURN = ''' build: description: Build number of the fortigate image returned: always type: str sample: '1547' http_method: description: Last method used to provision the content into FortiGate returned: always type: str sample: 'PUT' http_status: description: Last result given by FortiGate on last operation applied returned: always type: str sample: "200" mkey: description: Master key (id) used in the last call to FortiGate returned: success type: str sample: "id" name: description: Name of the table used to fulfill the request returned: always type: str sample: "urlfilter" path: description: Path of the table used to fulfill the request returned: always type: str sample: "webfilter" revision: description: Internal revision number returned: always type: str sample: "17.0.2.10658" serial: description: Serial number of the unit returned: always type: str sample: "FGVMEVYYQT3AB5352" status: description: Indication of the operation's result returned: always type: str sample: "success" vdom: description: Virtual domain used returned: always type: str sample: "root" version: description: Version of the FortiGate returned: always type: str sample: "v5.6.3" ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.connection import Connection from ansible.module_utils.network.fortios.fortios import FortiOSHandler from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG def login(data, fos): host = data['host'] username = data['username'] password = data['password'] ssl_verify = data['ssl_verify'] fos.debug('on') if 'https' in data and not data['https']: fos.https('off') else: fos.https('on') fos.login(host, username, password, verify=ssl_verify) def filter_wireless_controller_utm_profile_data(json): option_list = ['antivirus_profile', 'application_list', 'comment', 'ips_sensor', 'name', 'scan_botnet_connections', 'utm_log', 'webfilter_profile'] dictionary = {} for attribute in option_list: if attribute in json and json[attribute] is not None: dictionary[attribute] = json[attribute] return dictionary def underscore_to_hyphen(data): if isinstance(data, list): for elem in data: elem = underscore_to_hyphen(elem) elif isinstance(data, dict): new_data = {} for k, v in data.items(): new_data[k.replace('_', '-')] = underscore_to_hyphen(v) data = new_data return data def wireless_controller_utm_profile(data, fos): vdom = data['vdom'] if 'state' in data and data['state']: state = data['state'] elif 'state' in data['wireless_controller_utm_profile'] and data['wireless_controller_utm_profile']: state = data['wireless_controller_utm_profile']['state'] else: state = True wireless_controller_utm_profile_data = data['wireless_controller_utm_profile'] filtered_data = underscore_to_hyphen(filter_wireless_controller_utm_profile_data(wireless_controller_utm_profile_data)) if state == "present": return fos.set('wireless-controller', 'utm-profile', data=filtered_data, vdom=vdom) elif state == "absent": return fos.delete('wireless-controller', 'utm-profile', mkey=filtered_data['name'], vdom=vdom) def is_successful_status(status): return status['status'] == "success" or \ status['http_method'] == "DELETE" and status['http_status'] == 404 def fortios_wireless_controller(data, fos): if data['wireless_controller_utm_profile']: resp = wireless_controller_utm_profile(data, fos) return not is_successful_status(resp), \ resp['status'] == "success", \ resp def main(): fields = { "host": {"required": False, "type": "str"}, "username": {"required": False, "type": "str"}, "password": {"required": False, "type": "str", "default": "", "no_log": True}, "vdom": {"required": False, "type": "str", "default": "root"}, "https": {"required": False, "type": "bool", "default": True}, "ssl_verify": {"required": False, "type": "bool", "default": True}, "state": {"required": False, "type": "str", "choices": ["present", "absent"]}, "wireless_controller_utm_profile": { "required": False, "type": "dict", "default": None, "options": { "state": {"required": False, "type": "str", "choices": ["present", "absent"]}, "antivirus_profile": {"required": False, "type": "str"}, "application_list": {"required": False, "type": "str"}, "comment": {"required": False, "type": "str"}, "ips_sensor": {"required": False, "type": "str"}, "name": {"required": True, "type": "str"}, "scan_botnet_connections": {"required": False, "type": "str", "choices": ["disable", "monitor", "block"]}, "utm_log": {"required": False, "type": "str", "choices": ["enable", "disable"]}, "webfilter_profile": {"required": False, "type": "str"} } } } module = AnsibleModule(argument_spec=fields, supports_check_mode=False) # legacy_mode refers to using fortiosapi instead of HTTPAPI legacy_mode = 'host' in module.params and module.params['host'] is not None and \ 'username' in module.params and module.params['username'] is not None and \ 'password' in module.params and module.params['password'] is not None if not legacy_mode: if module._socket_path: connection = Connection(module._socket_path) fos = FortiOSHandler(connection) is_error, has_changed, result = fortios_wireless_controller(module.params, fos) else: module.fail_json(**FAIL_SOCKET_MSG) else: try: from fortiosapi import FortiOSAPI except ImportError: module.fail_json(msg="fortiosapi module is required") fos = FortiOSAPI() login(module.params, fos) is_error, has_changed, result = fortios_wireless_controller(module.params, fos) fos.logout() if not is_error: module.exit_json(changed=has_changed, meta=result) else: module.fail_json(msg="Error in repo", meta=result) if __name__ == '__main__': main()
gpl-3.0
aaltinisik/OCBAltinkaya
openerp/tools/yaml_tag.py
337
6256
import yaml import logging class YamlTag(object): """ Superclass for constructors of custom tags defined in yaml file. __str__ is overriden in subclass and used for serialization in module recorder. """ def __init__(self, **kwargs): self.__dict__.update(kwargs) def __getitem__(self, key): return getattr(self, key) def __getattr__(self, attr): return None def __repr__(self): return "<%s %s>" % (self.__class__.__name__, sorted(self.__dict__.items())) class Assert(YamlTag): def __init__(self, model, id=None, severity=logging.WARNING, string="NONAME", **kwargs): self.model = model self.id = id self.severity = severity self.string = string super(Assert, self).__init__(**kwargs) class Record(YamlTag): def __init__(self, model, id, use='id', view=True, **kwargs): self.model = model self.id = id self.view = view super(Record, self).__init__(**kwargs) def __str__(self): return '!record {model: %s, id: %s}:' % (str(self.model,), str(self.id,)) class Python(YamlTag): def __init__(self, model, severity=logging.ERROR, name="", **kwargs): self.model= model self.severity = severity self.name = name super(Python, self).__init__(**kwargs) def __str__(self): return '!python {model: %s}: |' % (str(self.model), ) class Menuitem(YamlTag): def __init__(self, id, name, **kwargs): self.id = id self.name = name super(Menuitem, self).__init__(**kwargs) class Workflow(YamlTag): def __init__(self, model, action, ref=None, **kwargs): self.model = model self.action = action self.ref = ref super(Workflow, self).__init__(**kwargs) def __str__(self): return '!workflow {model: %s, action: %s, ref: %s}' % (str(self.model,), str(self.action,), str(self.ref,)) class ActWindow(YamlTag): def __init__(self, **kwargs): super(ActWindow, self).__init__(**kwargs) class Function(YamlTag): def __init__(self, model, name, **kwargs): self.model = model self.name = name super(Function, self).__init__(**kwargs) class Report(YamlTag): def __init__(self, model, name, string, **kwargs): self.model = model self.name = name self.string = string super(Report, self).__init__(**kwargs) class Delete(YamlTag): def __init__(self, **kwargs): super(Delete, self).__init__(**kwargs) class Context(YamlTag): def __init__(self, **kwargs): super(Context, self).__init__(**kwargs) class Url(YamlTag): def __init__(self, **kwargs): super(Url, self).__init__(**kwargs) class Eval(YamlTag): def __init__(self, expression): self.expression = expression super(Eval, self).__init__() def __str__(self): return '!eval %s' % str(self.expression) class Ref(YamlTag): def __init__(self, expr="False", *args, **kwargs): self.expr = expr super(Ref, self).__init__(*args, **kwargs) def __str__(self): return 'ref(%s)' % repr(self.expr) class IrSet(YamlTag): def __init__(self): super(IrSet, self).__init__() def assert_constructor(loader, node): kwargs = loader.construct_mapping(node) return Assert(**kwargs) def record_constructor(loader, node): kwargs = loader.construct_mapping(node) assert "model" in kwargs, "'model' argument is required for !record" assert "id" in kwargs, "'id' argument is required for !record" return Record(**kwargs) def python_constructor(loader, node): kwargs = loader.construct_mapping(node) kwargs['first_line'] = node.start_mark.line + 1 return Python(**kwargs) def menuitem_constructor(loader, node): kwargs = loader.construct_mapping(node) return Menuitem(**kwargs) def workflow_constructor(loader, node): kwargs = loader.construct_mapping(node) return Workflow(**kwargs) def act_window_constructor(loader, node): kwargs = loader.construct_mapping(node) return ActWindow(**kwargs) def function_constructor(loader, node): kwargs = loader.construct_mapping(node) return Function(**kwargs) def report_constructor(loader, node): kwargs = loader.construct_mapping(node) return Report(**kwargs) def delete_constructor(loader, node): kwargs = loader.construct_mapping(node) return Delete(**kwargs) def context_constructor(loader, node): kwargs = loader.construct_mapping(node) return Context(**kwargs) def url_constructor(loader, node): kwargs = loader.construct_mapping(node) return Url(**kwargs) def eval_constructor(loader, node): expression = loader.construct_scalar(node) return Eval(expression) def ref_constructor(loader, tag_suffix, node): if tag_suffix == "id": kwargs = {"id": loader.construct_scalar(node)} else: kwargs = loader.construct_mapping(node) return Ref(**kwargs) def ir_set_constructor(loader, node): kwargs = loader.construct_mapping(node) return IrSet(**kwargs) # Registers constructors for custom tags. # Constructors are actually defined globally: do not redefined them in another # class/file/package. This means that module recorder need import this file. def add_constructors(): yaml.add_constructor(u"!assert", assert_constructor) yaml.add_constructor(u"!record", record_constructor) yaml.add_constructor(u"!python", python_constructor) yaml.add_constructor(u"!menuitem", menuitem_constructor) yaml.add_constructor(u"!workflow", workflow_constructor) yaml.add_constructor(u"!act_window", act_window_constructor) yaml.add_constructor(u"!function", function_constructor) yaml.add_constructor(u"!report", report_constructor) yaml.add_constructor(u"!context", context_constructor) yaml.add_constructor(u"!delete", delete_constructor) yaml.add_constructor(u"!url", url_constructor) yaml.add_constructor(u"!eval", eval_constructor) yaml.add_multi_constructor(u"!ref", ref_constructor) yaml.add_constructor(u"!ir_set", ir_set_constructor) add_constructors() # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
kernel-bz/ml
Lectures/NN/input_data.py
28
5709
"""Functions for downloading and reading MNIST data.""" from __future__ import print_function import gzip import os import urllib import numpy SOURCE_URL = 'http://yann.lecun.com/exdb/mnist/' def maybe_download(filename, work_directory): """Download the data from Yann's website, unless it's already here.""" if not os.path.exists(work_directory): os.mkdir(work_directory) filepath = os.path.join(work_directory, filename) if not os.path.exists(filepath): filepath, _ = urllib.urlretrieve(SOURCE_URL + filename, filepath) statinfo = os.stat(filepath) print('Succesfully downloaded', filename, statinfo.st_size, 'bytes.') return filepath def _read32(bytestream): dt = numpy.dtype(numpy.uint32).newbyteorder('>') return numpy.frombuffer(bytestream.read(4), dtype=dt) def extract_images(filename): """Extract the images into a 4D uint8 numpy array [index, y, x, depth].""" print('Extracting', filename) with gzip.open(filename) as bytestream: magic = _read32(bytestream) if magic != 2051: raise ValueError( 'Invalid magic number %d in MNIST image file: %s' % (magic, filename)) num_images = _read32(bytestream) rows = _read32(bytestream) cols = _read32(bytestream) buf = bytestream.read(rows * cols * num_images) data = numpy.frombuffer(buf, dtype=numpy.uint8) data = data.reshape(num_images, rows, cols, 1) return data def dense_to_one_hot(labels_dense, num_classes=10): """Convert class labels from scalars to one-hot vectors.""" num_labels = labels_dense.shape[0] index_offset = numpy.arange(num_labels) * num_classes labels_one_hot = numpy.zeros((num_labels, num_classes)) labels_one_hot.flat[index_offset + labels_dense.ravel()] = 1 return labels_one_hot def extract_labels(filename, one_hot=False): """Extract the labels into a 1D uint8 numpy array [index].""" print('Extracting', filename) with gzip.open(filename) as bytestream: magic = _read32(bytestream) if magic != 2049: raise ValueError( 'Invalid magic number %d in MNIST label file: %s' % (magic, filename)) num_items = _read32(bytestream) buf = bytestream.read(num_items) labels = numpy.frombuffer(buf, dtype=numpy.uint8) if one_hot: return dense_to_one_hot(labels) return labels class DataSet(object): def __init__(self, images, labels, fake_data=False): if fake_data: self._num_examples = 10000 else: assert images.shape[0] == labels.shape[0], ( "images.shape: %s labels.shape: %s" % (images.shape, labels.shape)) self._num_examples = images.shape[0] # Convert shape from [num examples, rows, columns, depth] # to [num examples, rows*columns] (assuming depth == 1) assert images.shape[3] == 1 images = images.reshape(images.shape[0], images.shape[1] * images.shape[2]) # Convert from [0, 255] -> [0.0, 1.0]. images = images.astype(numpy.float32) images = numpy.multiply(images, 1.0 / 255.0) self._images = images self._labels = labels self._epochs_completed = 0 self._index_in_epoch = 0 @property def images(self): return self._images @property def labels(self): return self._labels @property def num_examples(self): return self._num_examples @property def epochs_completed(self): return self._epochs_completed def next_batch(self, batch_size, fake_data=False): """Return the next `batch_size` examples from this data set.""" if fake_data: fake_image = [1.0 for _ in xrange(784)] fake_label = 0 return [fake_image for _ in xrange(batch_size)], [ fake_label for _ in xrange(batch_size)] start = self._index_in_epoch self._index_in_epoch += batch_size if self._index_in_epoch > self._num_examples: # Finished epoch self._epochs_completed += 1 # Shuffle the data perm = numpy.arange(self._num_examples) numpy.random.shuffle(perm) self._images = self._images[perm] self._labels = self._labels[perm] # Start next epoch start = 0 self._index_in_epoch = batch_size assert batch_size <= self._num_examples end = self._index_in_epoch return self._images[start:end], self._labels[start:end] def read_data_sets(train_dir, fake_data=False, one_hot=False): class DataSets(object): pass data_sets = DataSets() if fake_data: data_sets.train = DataSet([], [], fake_data=True) data_sets.validation = DataSet([], [], fake_data=True) data_sets.test = DataSet([], [], fake_data=True) return data_sets TRAIN_IMAGES = 'train-images-idx3-ubyte.gz' TRAIN_LABELS = 'train-labels-idx1-ubyte.gz' TEST_IMAGES = 't10k-images-idx3-ubyte.gz' TEST_LABELS = 't10k-labels-idx1-ubyte.gz' VALIDATION_SIZE = 5000 local_file = maybe_download(TRAIN_IMAGES, train_dir) train_images = extract_images(local_file) local_file = maybe_download(TRAIN_LABELS, train_dir) train_labels = extract_labels(local_file, one_hot=one_hot) local_file = maybe_download(TEST_IMAGES, train_dir) test_images = extract_images(local_file) local_file = maybe_download(TEST_LABELS, train_dir) test_labels = extract_labels(local_file, one_hot=one_hot) validation_images = train_images[:VALIDATION_SIZE] validation_labels = train_labels[:VALIDATION_SIZE] train_images = train_images[VALIDATION_SIZE:] train_labels = train_labels[VALIDATION_SIZE:] data_sets.train = DataSet(train_images, train_labels) data_sets.validation = DataSet(validation_images, validation_labels) data_sets.test = DataSet(test_images, test_labels) return data_sets
gpl-3.0
TangXT/edx-platform
common/djangoapps/track/views.py
13
6157
import datetime import pytz from django.contrib.auth.decorators import login_required from django.http import HttpResponse from django.shortcuts import redirect from django_future.csrf import ensure_csrf_cookie from edxmako.shortcuts import render_to_response from track import tracker from track import contexts from track import shim from track.models import TrackingLog from eventtracking import tracker as eventtracker def log_event(event): """Capture a event by sending it to the register trackers""" tracker.send(event) def _get_request_header(request, header_name, default=''): """Helper method to get header values from a request's META dict, if present.""" if request is not None and hasattr(request, 'META') and header_name in request.META: return request.META[header_name] else: return default def user_track(request): """ Log when POST call to "event" URL is made by a user. Uses request.REQUEST to allow for GET calls. GET or POST call should provide "event_type", "event", and "page" arguments. """ try: # TODO: Do the same for many of the optional META parameters username = request.user.username except: username = "anonymous" page = request.REQUEST['page'] with eventtracker.get_tracker().context('edx.course.browser', contexts.course_context_from_url(page)): context = eventtracker.get_tracker().resolve_context() event = { "username": username, "session": context.get('session', ''), "ip": _get_request_header(request, 'REMOTE_ADDR'), "event_source": "browser", "event_type": request.REQUEST['event_type'], "event": request.REQUEST['event'], "agent": _get_request_header(request, 'HTTP_USER_AGENT'), "page": page, "time": datetime.datetime.utcnow(), "host": _get_request_header(request, 'SERVER_NAME'), "context": context, } # Some duplicated fields are passed into event-tracking via the context by track.middleware. # Remove them from the event here since they are captured elsewhere. shim.remove_shim_context(event) log_event(event) return HttpResponse('success') def server_track(request, event_type, event, page=None): """ Log events related to server requests. Handle the situation where the request may be NULL, as may happen with management commands. """ if event_type.startswith("/event_logs") and request.user.is_staff: return # don't log try: username = request.user.username except: username = "anonymous" # define output: event = { "username": username, "ip": _get_request_header(request, 'REMOTE_ADDR'), "event_source": "server", "event_type": event_type, "event": event, "agent": _get_request_header(request, 'HTTP_USER_AGENT'), "page": page, "time": datetime.datetime.utcnow(), "host": _get_request_header(request, 'SERVER_NAME'), "context": eventtracker.get_tracker().resolve_context(), } # Some duplicated fields are passed into event-tracking via the context by track.middleware. # Remove them from the event here since they are captured elsewhere. shim.remove_shim_context(event) log_event(event) def task_track(request_info, task_info, event_type, event, page=None): """ Logs tracking information for events occuring within celery tasks. The `event_type` is a string naming the particular event being logged, while `event` is a dict containing whatever additional contextual information is desired. The `request_info` is a dict containing information about the original task request. Relevant keys are `username`, `ip`, `agent`, and `host`. While the dict is required, the values in it are not, so that {} can be passed in. In addition, a `task_info` dict provides more information about the current task, to be stored with the `event` dict. This may also be an empty dict. The `page` parameter is optional, and allows the name of the page to be provided. """ # supplement event information with additional information # about the task in which it is running. full_event = dict(event, **task_info) # All fields must be specified, in case the tracking information is # also saved to the TrackingLog model. Get values from the task-level # information, or just add placeholder values. with eventtracker.get_tracker().context('edx.course.task', contexts.course_context_from_url(page)): event = { "username": request_info.get('username', 'unknown'), "ip": request_info.get('ip', 'unknown'), "event_source": "task", "event_type": event_type, "event": full_event, "agent": request_info.get('agent', 'unknown'), "page": page, "time": datetime.datetime.utcnow(), "host": request_info.get('host', 'unknown'), "context": eventtracker.get_tracker().resolve_context(), } log_event(event) @login_required @ensure_csrf_cookie def view_tracking_log(request, args=''): """View to output contents of TrackingLog model. For staff use only.""" if not request.user.is_staff: return redirect('/') nlen = 100 username = '' if args: for arg in args.split('/'): if arg.isdigit(): nlen = int(arg) if arg.startswith('username='): username = arg[9:] record_instances = TrackingLog.objects.all().order_by('-time') if username: record_instances = record_instances.filter(username=username) record_instances = record_instances[0:nlen] # fix dtstamp fmt = '%a %d-%b-%y %H:%M:%S' # "%Y-%m-%d %H:%M:%S %Z%z" for rinst in record_instances: rinst.dtstr = rinst.time.replace(tzinfo=pytz.utc).astimezone(pytz.timezone('US/Eastern')).strftime(fmt) return render_to_response('tracking_log.html', {'records': record_instances})
agpl-3.0
nitzmahone/ansible
lib/ansible/modules/network/f5/bigip_dns_nameserver.py
8
14795
#!/usr/bin/python # -*- coding: utf-8 -*- # # Copyright: (c) 2018, F5 Networks Inc. # GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'certified'} DOCUMENTATION = r''' --- module: bigip_dns_nameserver short_description: Manage LTM DNS nameservers on a BIG-IP description: - Manages LTM DNS nameservers on a BIG-IP. These nameservers form part of what is known as DNS Express on a BIG-IP. This module does not configure GTM related functionality, nor does it configure system-level name servers that affect the base system's ability to resolve DNS names. version_added: 2.8 options: name: description: - Specifies the name of the nameserver. required: True address: description: - Specifies the IP address on which the DNS nameserver (client) or back-end DNS authoritative server (DNS Express server) listens for DNS messages. - When creating a new nameserver, if this value is not specified, the default is C(127.0.0.1). service_port: description: - Specifies the service port on which the DNS nameserver (client) or back-end DNS authoritative server (DNS Express server) listens for DNS messages. - When creating a new nameserver, if this value is not specified, the default is C(53). route_domain: description: - Specifies the local route domain that the DNS nameserver (client) or back-end DNS authoritative server (DNS Express server) uses for outbound traffic. - When creating a new nameserver, if this value is not specified, the default is C(0). tsig_key: description: - Specifies the TSIG key the system uses to communicate with this DNS nameserver (client) or back-end DNS authoritative server (DNS Express server) for AXFR zone transfers. - If the nameserver is a client, then the system uses this TSIG key to verify the request and sign the response. - If this nameserver is a DNS Express server, then this TSIG key must match the TSIG key for the zone on the back-end DNS authoritative server. state: description: - When C(present), ensures that the resource exists. - When C(absent), ensures the resource is removed. default: present choices: - present - absent partition: description: - Device partition to manage resources on. default: Common extends_documentation_fragment: f5 author: - Tim Rupp (@caphrim007) ''' EXAMPLES = r''' - name: Create a nameserver bigip_dns_nameserver: name: foo address: 10.10.10.10 service_port: 53 state: present provider: password: secret server: lb.mydomain.com user: admin delegate_to: localhost ''' RETURN = r''' address: description: Address which the nameserver listens for DNS messages. returned: changed type: string sample: 127.0.0.1 service_port: description: Service port on which the nameserver listens for DNS messages. returned: changed type: int sample: 53 ''' from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.basic import env_fallback try: from library.module_utils.network.f5.bigip import F5RestClient from library.module_utils.network.f5.common import F5ModuleError from library.module_utils.network.f5.common import AnsibleF5Parameters from library.module_utils.network.f5.common import cleanup_tokens from library.module_utils.network.f5.common import fq_name from library.module_utils.network.f5.common import f5_argument_spec from library.module_utils.network.f5.common import exit_json from library.module_utils.network.f5.common import fail_json from library.module_utils.network.f5.common import transform_name except ImportError: from ansible.module_utils.network.f5.bigip import F5RestClient from ansible.module_utils.network.f5.common import F5ModuleError from ansible.module_utils.network.f5.common import AnsibleF5Parameters from ansible.module_utils.network.f5.common import cleanup_tokens from ansible.module_utils.network.f5.common import fq_name from ansible.module_utils.network.f5.common import f5_argument_spec from ansible.module_utils.network.f5.common import exit_json from ansible.module_utils.network.f5.common import fail_json from ansible.module_utils.network.f5.common import transform_name class Parameters(AnsibleF5Parameters): api_map = { 'routeDomain': 'route_domain', 'port': 'service_port', 'tsigKey': 'tsig_key' } api_attributes = [ 'address', 'routeDomain', 'port', 'tsigKey' ] returnables = [ 'address', 'service_port', 'route_domain', 'tsig_key', ] updatables = [ 'address', 'service_port', 'route_domain', 'tsig_key', ] class ApiParameters(Parameters): pass class ModuleParameters(Parameters): @property def tsig_key(self): if self._values['tsig_key'] in [None, '']: return self._values['tsig_key'] return fq_name(self.partition, self._values['tsig_key']) @property def route_domain(self): if self._values['route_domain'] is None: return None return fq_name(self.partition, self._values['route_domain']) @property def service_port(self): if self._values['service_port'] is None: return None try: return int(self._values['service_port']) except ValueError: # Reserving the right to add well-known ports raise F5ModuleError( "The 'service_port' must be in numeric form." ) class Changes(Parameters): def to_return(self): result = {} try: for returnable in self.returnables: result[returnable] = getattr(self, returnable) result = self._filter_params(result) except Exception: pass return result class UsableChanges(Changes): pass class ReportableChanges(Changes): pass class Difference(object): def __init__(self, want, have=None): self.want = want self.have = have def compare(self, param): try: result = getattr(self, param) return result except AttributeError: return self.__default(param) def __default(self, param): attr1 = getattr(self.want, param) try: attr2 = getattr(self.have, param) if attr1 != attr2: return attr1 except AttributeError: return attr1 @property def tsig_key(self): if self.want.tsig_key is None: return None if self.have.tsig_key is None and self.want.tsig_key == '': return None if self.want.tsig_key != self.have.tsig_key: return self.want.tsig_key class ModuleManager(object): def __init__(self, *args, **kwargs): self.module = kwargs.get('module', None) self.client = kwargs.get('client', None) self.want = ModuleParameters(params=self.module.params) self.have = ApiParameters() self.changes = UsableChanges() def _set_changed_options(self): changed = {} for key in Parameters.returnables: if getattr(self.want, key) is not None: changed[key] = getattr(self.want, key) if changed: self.changes = UsableChanges(params=changed) def _update_changed_options(self): diff = Difference(self.want, self.have) updatables = Parameters.updatables changed = dict() for k in updatables: change = diff.compare(k) if change is None: continue else: if isinstance(change, dict): changed.update(change) else: changed[k] = change if changed: self.changes = UsableChanges(params=changed) return True return False def should_update(self): result = self._update_changed_options() if result: return True return False def exec_module(self): changed = False result = dict() state = self.want.state if state == "present": changed = self.present() elif state == "absent": changed = self.absent() reportable = ReportableChanges(params=self.changes.to_return()) changes = reportable.to_return() result.update(**changes) result.update(dict(changed=changed)) self._announce_deprecations(result) return result def _announce_deprecations(self, result): warnings = result.pop('__warnings', []) for warning in warnings: self.client.module.deprecate( msg=warning['msg'], version=warning['version'] ) def present(self): if self.exists(): return self.update() else: return self.create() def exists(self): uri = "https://{0}:{1}/mgmt/tm/ltm/dns/nameserver/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError: return False if resp.status == 404 or 'code' in response and response['code'] == 404: return False return True def update(self): self.have = self.read_current_from_device() if not self.should_update(): return False if self.module.check_mode: return True self.update_on_device() return True def remove(self): if self.module.check_mode: return True self.remove_from_device() if self.exists(): raise F5ModuleError("Failed to delete the resource.") return True def create(self): if self.want.address is None: self.want.update({'address': '127.0.0.1'}) if self.want.service_port is None: self.want.update({'service_port': '53'}) if self.want.route_domain is None: self.want.update({'route_domain': '/Common/0'}) self._set_changed_options() if self.module.check_mode: return True self.create_on_device() return True def create_on_device(self): params = self.changes.api_params() params['name'] = self.want.name params['partition'] = self.want.partition uri = "https://{0}:{1}/mgmt/tm/ltm/dns/nameserver/".format( self.client.provider['server'], self.client.provider['server_port'] ) resp = self.client.api.post(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] in [400, 403]: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def update_on_device(self): params = self.changes.api_params() uri = "https://{0}:{1}/mgmt/tm/ltm/dns/nameserver/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.patch(uri, json=params) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) def absent(self): if self.exists(): return self.remove() return False def remove_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/dns/nameserver/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) response = self.client.api.delete(uri) if response.status == 200: return True raise F5ModuleError(response.content) def read_current_from_device(self): uri = "https://{0}:{1}/mgmt/tm/ltm/dns/nameserver/{2}".format( self.client.provider['server'], self.client.provider['server_port'], transform_name(self.want.partition, self.want.name) ) resp = self.client.api.get(uri) try: response = resp.json() except ValueError as ex: raise F5ModuleError(str(ex)) if 'code' in response and response['code'] == 400: if 'message' in response: raise F5ModuleError(response['message']) else: raise F5ModuleError(resp.content) return ApiParameters(params=response) class ArgumentSpec(object): def __init__(self): self.supports_check_mode = True argument_spec = dict( name=dict(required=True), address=dict(), service_port=dict(), route_domain=dict(), tsig_key=dict(), state=dict( default='present', choices=['present', 'absent'] ), partition=dict( default='Common', fallback=(env_fallback, ['F5_PARTITION']) ) ) self.argument_spec = {} self.argument_spec.update(f5_argument_spec) self.argument_spec.update(argument_spec) def main(): spec = ArgumentSpec() module = AnsibleModule( argument_spec=spec.argument_spec, supports_check_mode=spec.supports_check_mode, ) client = F5RestClient(**module.params) try: mm = ModuleManager(module=module, client=client) results = mm.exec_module() cleanup_tokens(client) exit_json(module, results, client) except F5ModuleError as ex: cleanup_tokens(client) fail_json(module, ex, client) if __name__ == '__main__': main()
gpl-3.0
dang03/son-cli
src/son/package/md5.py
5
2123
# Copyright (c) 2015 SONATA-NFV, UBIWHERE # ALL RIGHTS RESERVED. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # # Neither the name of the SONATA-NFV, UBIWHERE # nor the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # This work has been performed in the framework of the SONATA project, # funded by the European Commission under Grant number 671517 through # the Horizon 2020 and 5G-PPP programmes. The authors would like to # acknowledge the contributions of their colleagues of the SONATA # partner consortium (www.sonata-nfv.eu). import hashlib import os def generate_hash(f, cs=128): return __generate_hash__(f, cs) \ if os.path.isfile(f) \ else __generate_hash_path__(f, cs) def __generate_hash__(f, cs=128): hash = hashlib.md5() with open(f, "rb") as file: for chunk in iter(lambda: file.read(cs), b''): hash.update(chunk) return hash.hexdigest() def __generate_hash_path__(p, cs=128): hashes = [] for root, dirs, files in os.walk(p): for f in sorted(files): # guarantee same order to obtain same hash hashes.append(__generate_hash__(os.path.join(root, f), cs)) for d in sorted(dirs): # guarantee same order to obtain same hash hashes.append(__generate_hash_path__(os.path.join(root, d), cs)) break return _reduce_hash(hashes) def _reduce_hash(hashlist): hash = hashlib.md5() for hashvalue in sorted(hashlist): hash.update(hashvalue.encode('utf-8')) return hash.hexdigest()
apache-2.0
tpazderka/pysaml2
example/sp-repoze/attributemaps/shibboleth_uri.py
114
8070
EDUPERSON_OID = "urn:oid:1.3.6.1.4.1.5923.1.1.1." X500ATTR = "urn:oid:2.5.4." NOREDUPERSON_OID = "urn:oid:1.3.6.1.4.1.2428.90.1." NETSCAPE_LDAP = "urn:oid:2.16.840.1.113730.3.1." UCL_DIR_PILOT = "urn:oid:0.9.2342.19200300.100.1." PKCS_9 = "urn:oid:1.2.840.113549.1.9." UMICH = "urn:oid:1.3.6.1.4.1.250.1.57." MAP = { "identifier": "urn:mace:shibboleth:1.0:attributeNamespace:uri", "fro": { EDUPERSON_OID+'2': 'eduPersonNickname', EDUPERSON_OID+'9': 'eduPersonScopedAffiliation', EDUPERSON_OID+'11': 'eduPersonAssurance', EDUPERSON_OID+'10': 'eduPersonTargetedID', EDUPERSON_OID+'4': 'eduPersonOrgUnitDN', NOREDUPERSON_OID+'6': 'norEduOrgAcronym', NOREDUPERSON_OID+'7': 'norEduOrgUniqueIdentifier', NOREDUPERSON_OID+'4': 'norEduPersonLIN', EDUPERSON_OID+'1': 'eduPersonAffiliation', NOREDUPERSON_OID+'2': 'norEduOrgUnitUniqueNumber', NETSCAPE_LDAP+'40': 'userSMIMECertificate', NOREDUPERSON_OID+'1': 'norEduOrgUniqueNumber', NETSCAPE_LDAP+'241': 'displayName', UCL_DIR_PILOT+'37': 'associatedDomain', EDUPERSON_OID+'6': 'eduPersonPrincipalName', NOREDUPERSON_OID+'8': 'norEduOrgUnitUniqueIdentifier', NOREDUPERSON_OID+'9': 'federationFeideSchemaVersion', X500ATTR+'53': 'deltaRevocationList', X500ATTR+'52': 'supportedAlgorithms', X500ATTR+'51': 'houseIdentifier', X500ATTR+'50': 'uniqueMember', X500ATTR+'19': 'physicalDeliveryOfficeName', X500ATTR+'18': 'postOfficeBox', X500ATTR+'17': 'postalCode', X500ATTR+'16': 'postalAddress', X500ATTR+'15': 'businessCategory', X500ATTR+'14': 'searchGuide', EDUPERSON_OID+'5': 'eduPersonPrimaryAffiliation', X500ATTR+'12': 'title', X500ATTR+'11': 'ou', X500ATTR+'10': 'o', X500ATTR+'37': 'cACertificate', X500ATTR+'36': 'userCertificate', X500ATTR+'31': 'member', X500ATTR+'30': 'supportedApplicationContext', X500ATTR+'33': 'roleOccupant', X500ATTR+'32': 'owner', NETSCAPE_LDAP+'1': 'carLicense', PKCS_9+'1': 'email', NETSCAPE_LDAP+'3': 'employeeNumber', NETSCAPE_LDAP+'2': 'departmentNumber', X500ATTR+'39': 'certificateRevocationList', X500ATTR+'38': 'authorityRevocationList', NETSCAPE_LDAP+'216': 'userPKCS12', EDUPERSON_OID+'8': 'eduPersonPrimaryOrgUnitDN', X500ATTR+'9': 'street', X500ATTR+'8': 'st', NETSCAPE_LDAP+'39': 'preferredLanguage', EDUPERSON_OID+'7': 'eduPersonEntitlement', X500ATTR+'2': 'knowledgeInformation', X500ATTR+'7': 'l', X500ATTR+'6': 'c', X500ATTR+'5': 'serialNumber', X500ATTR+'4': 'sn', UCL_DIR_PILOT+'60': 'jpegPhoto', X500ATTR+'65': 'pseudonym', NOREDUPERSON_OID+'5': 'norEduPersonNIN', UCL_DIR_PILOT+'3': 'mail', UCL_DIR_PILOT+'25': 'dc', X500ATTR+'40': 'crossCertificatePair', X500ATTR+'42': 'givenName', X500ATTR+'43': 'initials', X500ATTR+'44': 'generationQualifier', X500ATTR+'45': 'x500UniqueIdentifier', X500ATTR+'46': 'dnQualifier', X500ATTR+'47': 'enhancedSearchGuide', X500ATTR+'48': 'protocolInformation', X500ATTR+'54': 'dmdName', NETSCAPE_LDAP+'4': 'employeeType', X500ATTR+'22': 'teletexTerminalIdentifier', X500ATTR+'23': 'facsimileTelephoneNumber', X500ATTR+'20': 'telephoneNumber', X500ATTR+'21': 'telexNumber', X500ATTR+'26': 'registeredAddress', X500ATTR+'27': 'destinationIndicator', X500ATTR+'24': 'x121Address', X500ATTR+'25': 'internationaliSDNNumber', X500ATTR+'28': 'preferredDeliveryMethod', X500ATTR+'29': 'presentationAddress', EDUPERSON_OID+'3': 'eduPersonOrgDN', NOREDUPERSON_OID+'3': 'norEduPersonBirthDate', }, "to":{ 'roleOccupant': X500ATTR+'33', 'gn': X500ATTR+'42', 'norEduPersonNIN': NOREDUPERSON_OID+'5', 'title': X500ATTR+'12', 'facsimileTelephoneNumber': X500ATTR+'23', 'mail': UCL_DIR_PILOT+'3', 'postOfficeBox': X500ATTR+'18', 'fax': X500ATTR+'23', 'telephoneNumber': X500ATTR+'20', 'norEduPersonBirthDate': NOREDUPERSON_OID+'3', 'rfc822Mailbox': UCL_DIR_PILOT+'3', 'dc': UCL_DIR_PILOT+'25', 'countryName': X500ATTR+'6', 'emailAddress': PKCS_9+'1', 'employeeNumber': NETSCAPE_LDAP+'3', 'organizationName': X500ATTR+'10', 'eduPersonAssurance': EDUPERSON_OID+'11', 'norEduOrgAcronym': NOREDUPERSON_OID+'6', 'registeredAddress': X500ATTR+'26', 'physicalDeliveryOfficeName': X500ATTR+'19', 'associatedDomain': UCL_DIR_PILOT+'37', 'l': X500ATTR+'7', 'stateOrProvinceName': X500ATTR+'8', 'federationFeideSchemaVersion': NOREDUPERSON_OID+'9', 'pkcs9email': PKCS_9+'1', 'givenName': X500ATTR+'42', 'x500UniqueIdentifier': X500ATTR+'45', 'eduPersonNickname': EDUPERSON_OID+'2', 'houseIdentifier': X500ATTR+'51', 'street': X500ATTR+'9', 'supportedAlgorithms': X500ATTR+'52', 'preferredLanguage': NETSCAPE_LDAP+'39', 'postalAddress': X500ATTR+'16', 'email': PKCS_9+'1', 'norEduOrgUnitUniqueIdentifier': NOREDUPERSON_OID+'8', 'eduPersonPrimaryOrgUnitDN': EDUPERSON_OID+'8', 'c': X500ATTR+'6', 'teletexTerminalIdentifier': X500ATTR+'22', 'o': X500ATTR+'10', 'cACertificate': X500ATTR+'37', 'telexNumber': X500ATTR+'21', 'ou': X500ATTR+'11', 'initials': X500ATTR+'43', 'eduPersonOrgUnitDN': EDUPERSON_OID+'4', 'deltaRevocationList': X500ATTR+'53', 'norEduPersonLIN': NOREDUPERSON_OID+'4', 'supportedApplicationContext': X500ATTR+'30', 'eduPersonEntitlement': EDUPERSON_OID+'7', 'generationQualifier': X500ATTR+'44', 'eduPersonAffiliation': EDUPERSON_OID+'1', 'eduPersonPrincipalName': EDUPERSON_OID+'6', 'localityName': X500ATTR+'7', 'owner': X500ATTR+'32', 'norEduOrgUnitUniqueNumber': NOREDUPERSON_OID+'2', 'searchGuide': X500ATTR+'14', 'certificateRevocationList': X500ATTR+'39', 'organizationalUnitName': X500ATTR+'11', 'userCertificate': X500ATTR+'36', 'preferredDeliveryMethod': X500ATTR+'28', 'internationaliSDNNumber': X500ATTR+'25', 'uniqueMember': X500ATTR+'50', 'departmentNumber': NETSCAPE_LDAP+'2', 'enhancedSearchGuide': X500ATTR+'47', 'userPKCS12': NETSCAPE_LDAP+'216', 'eduPersonTargetedID': EDUPERSON_OID+'10', 'norEduOrgUniqueNumber': NOREDUPERSON_OID+'1', 'x121Address': X500ATTR+'24', 'destinationIndicator': X500ATTR+'27', 'eduPersonPrimaryAffiliation': EDUPERSON_OID+'5', 'surname': X500ATTR+'4', 'jpegPhoto': UCL_DIR_PILOT+'60', 'eduPersonScopedAffiliation': EDUPERSON_OID+'9', 'protocolInformation': X500ATTR+'48', 'knowledgeInformation': X500ATTR+'2', 'employeeType': NETSCAPE_LDAP+'4', 'userSMIMECertificate': NETSCAPE_LDAP+'40', 'member': X500ATTR+'31', 'streetAddress': X500ATTR+'9', 'dmdName': X500ATTR+'54', 'postalCode': X500ATTR+'17', 'pseudonym': X500ATTR+'65', 'dnQualifier': X500ATTR+'46', 'crossCertificatePair': X500ATTR+'40', 'eduPersonOrgDN': EDUPERSON_OID+'3', 'authorityRevocationList': X500ATTR+'38', 'displayName': NETSCAPE_LDAP+'241', 'businessCategory': X500ATTR+'15', 'serialNumber': X500ATTR+'5', 'norEduOrgUniqueIdentifier': NOREDUPERSON_OID+'7', 'st': X500ATTR+'8', 'carLicense': NETSCAPE_LDAP+'1', 'presentationAddress': X500ATTR+'29', 'sn': X500ATTR+'4', 'domainComponent': UCL_DIR_PILOT+'25', } }
bsd-2-clause
nuncjo/odoo
addons/account_analytic_analysis/__openerp__.py
262
2243
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## { 'name': 'Contracts Management', 'version': '1.1', 'category': 'Sales Management', 'description': """ This module is for modifying account analytic view to show important data to project manager of services companies. =================================================================================================================== Adds menu to show relevant information to each manager.You can also view the report of account analytic summary user-wise as well as month-wise. """, 'author': 'Camptocamp / Odoo', 'website': 'https://www.odoo.com/page/billing', 'depends': ['hr_timesheet_invoice', 'sale'], #although sale is technically not required to install this module, all menuitems are located under 'Sales' application 'data': [ 'security/ir.model.access.csv', 'security/account_analytic_analysis_security.xml', 'account_analytic_analysis_view.xml', 'account_analytic_analysis_cron.xml', 'res_config_view.xml', 'views/account_analytic_analysis.xml', ], 'demo': ['analytic_account_demo.xml'], 'test': ['test/account_analytic_analysis.yml'], 'installable': True, 'auto_install': False, } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
ShakoHo/fxos-certsuite
mcts/web-platform-tests/tests/tools/pywebsocket/src/mod_pywebsocket/_stream_base.py
652
5978
# Copyright 2011, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Base stream class. """ # Note: request.connection.write/read are used in this module, even though # mod_python document says that they should be used only in connection # handlers. Unfortunately, we have no other options. For example, # request.write/read are not suitable because they don't allow direct raw bytes # writing/reading. import socket from mod_pywebsocket import util # Exceptions class ConnectionTerminatedException(Exception): """This exception will be raised when a connection is terminated unexpectedly. """ pass class InvalidFrameException(ConnectionTerminatedException): """This exception will be raised when we received an invalid frame we cannot parse. """ pass class BadOperationException(Exception): """This exception will be raised when send_message() is called on server-terminated connection or receive_message() is called on client-terminated connection. """ pass class UnsupportedFrameException(Exception): """This exception will be raised when we receive a frame with flag, opcode we cannot handle. Handlers can just catch and ignore this exception and call receive_message() again to continue processing the next frame. """ pass class InvalidUTF8Exception(Exception): """This exception will be raised when we receive a text frame which contains invalid UTF-8 strings. """ pass class StreamBase(object): """Base stream class.""" def __init__(self, request): """Construct an instance. Args: request: mod_python request. """ self._logger = util.get_class_logger(self) self._request = request def _read(self, length): """Reads length bytes from connection. In case we catch any exception, prepends remote address to the exception message and raise again. Raises: ConnectionTerminatedException: when read returns empty string. """ try: read_bytes = self._request.connection.read(length) if not read_bytes: raise ConnectionTerminatedException( 'Receiving %d byte failed. Peer (%r) closed connection' % (length, (self._request.connection.remote_addr,))) return read_bytes except socket.error, e: # Catch a socket.error. Because it's not a child class of the # IOError prior to Python 2.6, we cannot omit this except clause. # Use %s rather than %r for the exception to use human friendly # format. raise ConnectionTerminatedException( 'Receiving %d byte failed. socket.error (%s) occurred' % (length, e)) except IOError, e: # Also catch an IOError because mod_python throws it. raise ConnectionTerminatedException( 'Receiving %d byte failed. IOError (%s) occurred' % (length, e)) def _write(self, bytes_to_write): """Writes given bytes to connection. In case we catch any exception, prepends remote address to the exception message and raise again. """ try: self._request.connection.write(bytes_to_write) except Exception, e: util.prepend_message_to_exception( 'Failed to send message to %r: ' % (self._request.connection.remote_addr,), e) raise def receive_bytes(self, length): """Receives multiple bytes. Retries read when we couldn't receive the specified amount. Raises: ConnectionTerminatedException: when read returns empty string. """ read_bytes = [] while length > 0: new_read_bytes = self._read(length) read_bytes.append(new_read_bytes) length -= len(new_read_bytes) return ''.join(read_bytes) def _read_until(self, delim_char): """Reads bytes until we encounter delim_char. The result will not contain delim_char. Raises: ConnectionTerminatedException: when read returns empty string. """ read_bytes = [] while True: ch = self._read(1) if ch == delim_char: break read_bytes.append(ch) return ''.join(read_bytes) # vi:sts=4 sw=4 et
mpl-2.0
vinodkc/spark
examples/src/main/python/ml/bisecting_k_means_example.py
27
1953
# # Licensed to the Apache Software Foundation (ASF) under one or more # contributor license agreements. See the NOTICE file distributed with # this work for additional information regarding copyright ownership. # The ASF licenses this file to You under the Apache License, Version 2.0 # (the "License"); you may not use this file except in compliance with # the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """ An example demonstrating bisecting k-means clustering. Run with: bin/spark-submit examples/src/main/python/ml/bisecting_k_means_example.py """ # $example on$ from pyspark.ml.clustering import BisectingKMeans from pyspark.ml.evaluation import ClusteringEvaluator # $example off$ from pyspark.sql import SparkSession if __name__ == "__main__": spark = SparkSession\ .builder\ .appName("BisectingKMeansExample")\ .getOrCreate() # $example on$ # Loads data. dataset = spark.read.format("libsvm").load("data/mllib/sample_kmeans_data.txt") # Trains a bisecting k-means model. bkm = BisectingKMeans().setK(2).setSeed(1) model = bkm.fit(dataset) # Make predictions predictions = model.transform(dataset) # Evaluate clustering by computing Silhouette score evaluator = ClusteringEvaluator() silhouette = evaluator.evaluate(predictions) print("Silhouette with squared euclidean distance = " + str(silhouette)) # Shows the result. print("Cluster Centers: ") centers = model.clusterCenters() for center in centers: print(center) # $example off$ spark.stop()
apache-2.0
f440/alfred-stash-workflow
workflow/src/lib/requests/sessions.py
439
24250
# -*- coding: utf-8 -*- """ requests.session ~~~~~~~~~~~~~~~~ This module provides a Session object to manage and persist settings across requests (cookies, auth, proxies). """ import os from collections import Mapping from datetime import datetime from .auth import _basic_auth_str from .compat import cookielib, OrderedDict, urljoin, urlparse from .cookies import ( cookiejar_from_dict, extract_cookies_to_jar, RequestsCookieJar, merge_cookies) from .models import Request, PreparedRequest, DEFAULT_REDIRECT_LIMIT from .hooks import default_hooks, dispatch_hook from .utils import to_key_val_list, default_headers, to_native_string from .exceptions import ( TooManyRedirects, InvalidSchema, ChunkedEncodingError, ContentDecodingError) from .packages.urllib3._collections import RecentlyUsedContainer from .structures import CaseInsensitiveDict from .adapters import HTTPAdapter from .utils import ( requote_uri, get_environ_proxies, get_netrc_auth, should_bypass_proxies, get_auth_from_url ) from .status_codes import codes # formerly defined here, reexposed here for backward compatibility from .models import REDIRECT_STATI REDIRECT_CACHE_SIZE = 1000 def merge_setting(request_setting, session_setting, dict_class=OrderedDict): """ Determines appropriate setting for a given request, taking into account the explicit setting on that request, and the setting in the session. If a setting is a dictionary, they will be merged together using `dict_class` """ if session_setting is None: return request_setting if request_setting is None: return session_setting # Bypass if not a dictionary (e.g. verify) if not ( isinstance(session_setting, Mapping) and isinstance(request_setting, Mapping) ): return request_setting merged_setting = dict_class(to_key_val_list(session_setting)) merged_setting.update(to_key_val_list(request_setting)) # Remove keys that are set to None. for (k, v) in request_setting.items(): if v is None: del merged_setting[k] merged_setting = dict((k, v) for (k, v) in merged_setting.items() if v is not None) return merged_setting def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict): """ Properly merges both requests and session hooks. This is necessary because when request_hooks == {'response': []}, the merge breaks Session hooks entirely. """ if session_hooks is None or session_hooks.get('response') == []: return request_hooks if request_hooks is None or request_hooks.get('response') == []: return session_hooks return merge_setting(request_hooks, session_hooks, dict_class) class SessionRedirectMixin(object): def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None, **adapter_kwargs): """Receives a Response. Returns a generator of Responses.""" i = 0 hist = [] # keep track of history while resp.is_redirect: prepared_request = req.copy() if i > 0: # Update history and keep track of redirects. hist.append(resp) new_hist = list(hist) resp.history = new_hist try: resp.content # Consume socket so it can be released except (ChunkedEncodingError, ContentDecodingError, RuntimeError): resp.raw.read(decode_content=False) if i >= self.max_redirects: raise TooManyRedirects('Exceeded %s redirects.' % self.max_redirects) # Release the connection back into the pool. resp.close() url = resp.headers['location'] method = req.method # Handle redirection without scheme (see: RFC 1808 Section 4) if url.startswith('//'): parsed_rurl = urlparse(resp.url) url = '%s:%s' % (parsed_rurl.scheme, url) # The scheme should be lower case... parsed = urlparse(url) url = parsed.geturl() # Facilitate relative 'location' headers, as allowed by RFC 7231. # (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource') # Compliant with RFC3986, we percent encode the url. if not parsed.netloc: url = urljoin(resp.url, requote_uri(url)) else: url = requote_uri(url) prepared_request.url = to_native_string(url) # Cache the url, unless it redirects to itself. if resp.is_permanent_redirect and req.url != prepared_request.url: self.redirect_cache[req.url] = prepared_request.url # http://tools.ietf.org/html/rfc7231#section-6.4.4 if (resp.status_code == codes.see_other and method != 'HEAD'): method = 'GET' # Do what the browsers do, despite standards... # First, turn 302s into GETs. if resp.status_code == codes.found and method != 'HEAD': method = 'GET' # Second, if a POST is responded to with a 301, turn it into a GET. # This bizarre behaviour is explained in Issue 1704. if resp.status_code == codes.moved and method == 'POST': method = 'GET' prepared_request.method = method # https://github.com/kennethreitz/requests/issues/1084 if resp.status_code not in (codes.temporary_redirect, codes.permanent_redirect): if 'Content-Length' in prepared_request.headers: del prepared_request.headers['Content-Length'] prepared_request.body = None headers = prepared_request.headers try: del headers['Cookie'] except KeyError: pass # Extract any cookies sent on the response to the cookiejar # in the new request. Because we've mutated our copied prepared # request, use the old one that we haven't yet touched. extract_cookies_to_jar(prepared_request._cookies, req, resp.raw) prepared_request._cookies.update(self.cookies) prepared_request.prepare_cookies(prepared_request._cookies) # Rebuild auth and proxy information. proxies = self.rebuild_proxies(prepared_request, proxies) self.rebuild_auth(prepared_request, resp) # Override the original request. req = prepared_request resp = self.send( req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies, allow_redirects=False, **adapter_kwargs ) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) i += 1 yield resp def rebuild_auth(self, prepared_request, response): """ When being redirected we may want to strip authentication from the request to avoid leaking credentials. This method intelligently removes and reapplies authentication where possible to avoid credential loss. """ headers = prepared_request.headers url = prepared_request.url if 'Authorization' in headers: # If we get redirected to a new host, we should strip out any # authentication headers. original_parsed = urlparse(response.request.url) redirect_parsed = urlparse(url) if (original_parsed.hostname != redirect_parsed.hostname): del headers['Authorization'] # .netrc might have more auth for us on our new host. new_auth = get_netrc_auth(url) if self.trust_env else None if new_auth is not None: prepared_request.prepare_auth(new_auth) return def rebuild_proxies(self, prepared_request, proxies): """ This method re-evaluates the proxy configuration by considering the environment variables. If we are redirected to a URL covered by NO_PROXY, we strip the proxy configuration. Otherwise, we set missing proxy keys for this URL (in case they were stripped by a previous redirect). This method also replaces the Proxy-Authorization header where necessary. """ headers = prepared_request.headers url = prepared_request.url scheme = urlparse(url).scheme new_proxies = proxies.copy() if proxies is not None else {} if self.trust_env and not should_bypass_proxies(url): environ_proxies = get_environ_proxies(url) proxy = environ_proxies.get(scheme) if proxy: new_proxies.setdefault(scheme, environ_proxies[scheme]) if 'Proxy-Authorization' in headers: del headers['Proxy-Authorization'] try: username, password = get_auth_from_url(new_proxies[scheme]) except KeyError: username, password = None, None if username and password: headers['Proxy-Authorization'] = _basic_auth_str(username, password) return new_proxies class Session(SessionRedirectMixin): """A Requests session. Provides cookie persistence, connection-pooling, and configuration. Basic Usage:: >>> import requests >>> s = requests.Session() >>> s.get('http://httpbin.org/get') 200 """ __attrs__ = [ 'headers', 'cookies', 'auth', 'proxies', 'hooks', 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream', 'trust_env', 'max_redirects', ] def __init__(self): #: A case-insensitive dictionary of headers to be sent on each #: :class:`Request <Request>` sent from this #: :class:`Session <Session>`. self.headers = default_headers() #: Default Authentication tuple or object to attach to #: :class:`Request <Request>`. self.auth = None #: Dictionary mapping protocol to the URL of the proxy (e.g. #: {'http': 'foo.bar:3128'}) to be used on each #: :class:`Request <Request>`. self.proxies = {} #: Event-handling hooks. self.hooks = default_hooks() #: Dictionary of querystring data to attach to each #: :class:`Request <Request>`. The dictionary values may be lists for #: representing multivalued query parameters. self.params = {} #: Stream response content default. self.stream = False #: SSL Verification default. self.verify = True #: SSL certificate default. self.cert = None #: Maximum number of redirects allowed. If the request exceeds this #: limit, a :class:`TooManyRedirects` exception is raised. self.max_redirects = DEFAULT_REDIRECT_LIMIT #: Should we trust the environment? self.trust_env = True #: A CookieJar containing all currently outstanding cookies set on this #: session. By default it is a #: :class:`RequestsCookieJar <requests.cookies.RequestsCookieJar>`, but #: may be any other ``cookielib.CookieJar`` compatible object. self.cookies = cookiejar_from_dict({}) # Default connection adapters. self.adapters = OrderedDict() self.mount('https://', HTTPAdapter()) self.mount('http://', HTTPAdapter()) # Only store 1000 redirects to prevent using infinite memory self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) def __enter__(self): return self def __exit__(self, *args): self.close() def prepare_request(self, request): """Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it. The :class:`PreparedRequest` has settings merged from the :class:`Request <Request>` instance and those of the :class:`Session`. :param request: :class:`Request` instance to prepare with this session's settings. """ cookies = request.cookies or {} # Bootstrap CookieJar. if not isinstance(cookies, cookielib.CookieJar): cookies = cookiejar_from_dict(cookies) # Merge with session cookies merged_cookies = merge_cookies( merge_cookies(RequestsCookieJar(), self.cookies), cookies) # Set environment's basic authentication if not explicitly set. auth = request.auth if self.trust_env and not auth and not self.auth: auth = get_netrc_auth(request.url) p = PreparedRequest() p.prepare( method=request.method.upper(), url=request.url, files=request.files, data=request.data, json=request.json, headers=merge_setting(request.headers, self.headers, dict_class=CaseInsensitiveDict), params=merge_setting(request.params, self.params), auth=merge_setting(auth, self.auth), cookies=merged_cookies, hooks=merge_hooks(request.hooks, self.hooks), ) return p def request(self, method, url, params=None, data=None, headers=None, cookies=None, files=None, auth=None, timeout=None, allow_redirects=True, proxies=None, hooks=None, stream=None, verify=None, cert=None, json=None): """Constructs a :class:`Request <Request>`, prepares it and sends it. Returns :class:`Response <Response>` object. :param method: method for the new :class:`Request` object. :param url: URL for the new :class:`Request` object. :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`. :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`. :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`. :param files: (optional) Dictionary of ``'filename': file-like-objects`` for multipart encoding upload. :param auth: (optional) Auth tuple or callable to enable Basic/Digest/Custom HTTP Auth. :param timeout: (optional) How long to wait for the server to send data before giving up, as a float, or a (`connect timeout, read timeout <user/advanced.html#timeouts>`_) tuple. :type timeout: float or tuple :param allow_redirects: (optional) Set to True by default. :type allow_redirects: bool :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy. :param stream: (optional) whether to immediately download the response content. Defaults to ``False``. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. """ method = to_native_string(method) # Create the Request. req = Request( method = method.upper(), url = url, headers = headers, files = files, data = data or {}, json = json, params = params or {}, auth = auth, cookies = cookies, hooks = hooks, ) prep = self.prepare_request(req) proxies = proxies or {} settings = self.merge_environment_settings( prep.url, proxies, stream, verify, cert ) # Send the request. send_kwargs = { 'timeout': timeout, 'allow_redirects': allow_redirects, } send_kwargs.update(settings) resp = self.send(prep, **send_kwargs) return resp def get(self, url, **kwargs): """Sends a GET request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', True) return self.request('GET', url, **kwargs) def options(self, url, **kwargs): """Sends a OPTIONS request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', True) return self.request('OPTIONS', url, **kwargs) def head(self, url, **kwargs): """Sends a HEAD request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ kwargs.setdefault('allow_redirects', False) return self.request('HEAD', url, **kwargs) def post(self, url, data=None, json=None, **kwargs): """Sends a POST request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param json: (optional) json to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('POST', url, data=data, json=json, **kwargs) def put(self, url, data=None, **kwargs): """Sends a PUT request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('PUT', url, data=data, **kwargs) def patch(self, url, data=None, **kwargs): """Sends a PATCH request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('PATCH', url, data=data, **kwargs) def delete(self, url, **kwargs): """Sends a DELETE request. Returns :class:`Response` object. :param url: URL for the new :class:`Request` object. :param \*\*kwargs: Optional arguments that ``request`` takes. """ return self.request('DELETE', url, **kwargs) def send(self, request, **kwargs): """Send a given PreparedRequest.""" # Set defaults that the hooks can utilize to ensure they always have # the correct parameters to reproduce the previous request. kwargs.setdefault('stream', self.stream) kwargs.setdefault('verify', self.verify) kwargs.setdefault('cert', self.cert) kwargs.setdefault('proxies', self.proxies) # It's possible that users might accidentally send a Request object. # Guard against that specific failure case. if not isinstance(request, PreparedRequest): raise ValueError('You can only send PreparedRequests.') checked_urls = set() while request.url in self.redirect_cache: checked_urls.add(request.url) new_url = self.redirect_cache.get(request.url) if new_url in checked_urls: break request.url = new_url # Set up variables needed for resolve_redirects and dispatching of hooks allow_redirects = kwargs.pop('allow_redirects', True) stream = kwargs.get('stream') hooks = request.hooks # Get the appropriate adapter to use adapter = self.get_adapter(url=request.url) # Start time (approximately) of the request start = datetime.utcnow() # Send the request r = adapter.send(request, **kwargs) # Total elapsed time of the request (approximately) r.elapsed = datetime.utcnow() - start # Response manipulation hooks r = dispatch_hook('response', hooks, r, **kwargs) # Persist cookies if r.history: # If the hooks create history then we want those cookies too for resp in r.history: extract_cookies_to_jar(self.cookies, resp.request, resp.raw) extract_cookies_to_jar(self.cookies, request, r.raw) # Redirect resolving generator. gen = self.resolve_redirects(r, request, **kwargs) # Resolve redirects if allowed. history = [resp for resp in gen] if allow_redirects else [] # Shuffle things around if there's history. if history: # Insert the first (original) request at the start history.insert(0, r) # Get the last request made r = history.pop() r.history = history if not stream: r.content return r def merge_environment_settings(self, url, proxies, stream, verify, cert): """Check the environment and merge it with some settings.""" # Gather clues from the surrounding environment. if self.trust_env: # Set environment's proxies. env_proxies = get_environ_proxies(url) or {} for (k, v) in env_proxies.items(): proxies.setdefault(k, v) # Look for requests environment configuration and be compatible # with cURL. if verify is True or verify is None: verify = (os.environ.get('REQUESTS_CA_BUNDLE') or os.environ.get('CURL_CA_BUNDLE')) # Merge all the kwargs. proxies = merge_setting(proxies, self.proxies) stream = merge_setting(stream, self.stream) verify = merge_setting(verify, self.verify) cert = merge_setting(cert, self.cert) return {'verify': verify, 'proxies': proxies, 'stream': stream, 'cert': cert} def get_adapter(self, url): """Returns the appropriate connnection adapter for the given URL.""" for (prefix, adapter) in self.adapters.items(): if url.lower().startswith(prefix): return adapter # Nothing matches :-/ raise InvalidSchema("No connection adapters were found for '%s'" % url) def close(self): """Closes all adapters and as such the session""" for v in self.adapters.values(): v.close() def mount(self, prefix, adapter): """Registers a connection adapter to a prefix. Adapters are sorted in descending order by key length.""" self.adapters[prefix] = adapter keys_to_move = [k for k in self.adapters if len(k) < len(prefix)] for key in keys_to_move: self.adapters[key] = self.adapters.pop(key) def __getstate__(self): state = dict((attr, getattr(self, attr, None)) for attr in self.__attrs__) state['redirect_cache'] = dict(self.redirect_cache) return state def __setstate__(self, state): redirect_cache = state.pop('redirect_cache', {}) for attr, value in state.items(): setattr(self, attr, value) self.redirect_cache = RecentlyUsedContainer(REDIRECT_CACHE_SIZE) for redirect, to in redirect_cache.items(): self.redirect_cache[redirect] = to def session(): """Returns a :class:`Session` for context-management.""" return Session()
mit
csitarichie/boost_msm_bare_metal
boost/tools/build/v2/test/project_test1.py
15
1958
#!/usr/bin/python # Copyright 2002 Dave Abrahams # Copyright 2002, 2003, 2004 Vladimir Prus # Distributed under the Boost Software License, Version 1.0. # (See accompanying file LICENSE_1_0.txt or http://www.boost.org/LICENSE_1_0.txt) import BoostBuild import os t = BoostBuild.Tester("--build-system=project-test1", boost_build_path='', pass_toolset=0) # This test does no modifications, so run in in the invocation dir. os.chdir(t.original_workdir) expected_output1="""Project Roots: """ expected_output2="""'%(root-dir-prefix)sdir2': Module for project-root is 'project-root<%(root-dir-prefix)sdir2>' Projects: '/cool-library': * Parent project: (none) * Requirements: <include>/home/ghost/build/boost-cvs * Default build: * Source location: %(root-dir-prefix)sdir2 * Projects to build: """ expected_output3="""'%(root-dir)s': Module for project-root is 'project-root<%(root-dir)s>' Projects: '/boost-build-test-project-1': * Parent project: (none) * Requirements: <include>/home/ghost/local/include <threading>multi * Default build: * Source location: %(root-dir)s * Projects to build: dir dir2 '/boost-build-test-project-1/dir': * Parent project: %(root-dir)s * Requirements: <include>/home/ghost/local/include <threading>multi * Default build: <variant>release * Source location: %(root-dir-prefix)sdir/src * Projects to build: """ # Test that correct project structure is created when jam is invoked outside of # the source tree. expected = (expected_output1 + expected_output2 + expected_output3) % \ {"root-dir": "project-test1", "root-dir-prefix": "project-test1/" } t.run_build_system(stdout=expected) # Test that correct project structure is created when jam is invoked at the top # of the source tree. expected = (expected_output1 + expected_output3 + expected_output2) % \ {"root-dir": ".", "root-dir-prefix": "" } os.chdir("project-test1") t.run_build_system(stdout=expected) t.cleanup()
gpl-2.0
PRIMEDesigner15/PRIMEDesigner15
Test_files/dependencies/Lib/optparse.py
728
60616
"""A powerful, extensible, and easy-to-use option parser. By Greg Ward <gward@python.net> Originally distributed as Optik. For support, use the optik-users@lists.sourceforge.net mailing list (http://lists.sourceforge.net/lists/listinfo/optik-users). Simple usage example: from optparse import OptionParser parser = OptionParser() parser.add_option("-f", "--file", dest="filename", help="write report to FILE", metavar="FILE") parser.add_option("-q", "--quiet", action="store_false", dest="verbose", default=True, help="don't print status messages to stdout") (options, args) = parser.parse_args() """ __version__ = "1.5.3" __all__ = ['Option', 'make_option', 'SUPPRESS_HELP', 'SUPPRESS_USAGE', 'Values', 'OptionContainer', 'OptionGroup', 'OptionParser', 'HelpFormatter', 'IndentedHelpFormatter', 'TitledHelpFormatter', 'OptParseError', 'OptionError', 'OptionConflictError', 'OptionValueError', 'BadOptionError'] __copyright__ = """ Copyright (c) 2001-2006 Gregory P. Ward. All rights reserved. Copyright (c) 2002-2006 Python Software Foundation. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the author nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """ import sys, os import textwrap def _repr(self): return "<%s at 0x%x: %s>" % (self.__class__.__name__, id(self), self) # This file was generated from: # Id: option_parser.py 527 2006-07-23 15:21:30Z greg # Id: option.py 522 2006-06-11 16:22:03Z gward # Id: help.py 527 2006-07-23 15:21:30Z greg # Id: errors.py 509 2006-04-20 00:58:24Z gward try: from gettext import gettext, ngettext except ImportError: def gettext(message): return message def ngettext(singular, plural, n): if n == 1: return singular return plural _ = gettext class OptParseError (Exception): def __init__(self, msg): self.msg = msg def __str__(self): return self.msg class OptionError (OptParseError): """ Raised if an Option instance is created with invalid or inconsistent arguments. """ def __init__(self, msg, option): self.msg = msg self.option_id = str(option) def __str__(self): if self.option_id: return "option %s: %s" % (self.option_id, self.msg) else: return self.msg class OptionConflictError (OptionError): """ Raised if conflicting options are added to an OptionParser. """ class OptionValueError (OptParseError): """ Raised if an invalid option value is encountered on the command line. """ class BadOptionError (OptParseError): """ Raised if an invalid option is seen on the command line. """ def __init__(self, opt_str): self.opt_str = opt_str def __str__(self): return _("no such option: %s") % self.opt_str class AmbiguousOptionError (BadOptionError): """ Raised if an ambiguous option is seen on the command line. """ def __init__(self, opt_str, possibilities): BadOptionError.__init__(self, opt_str) self.possibilities = possibilities def __str__(self): return (_("ambiguous option: %s (%s?)") % (self.opt_str, ", ".join(self.possibilities))) class HelpFormatter: """ Abstract base class for formatting option help. OptionParser instances should use one of the HelpFormatter subclasses for formatting help; by default IndentedHelpFormatter is used. Instance attributes: parser : OptionParser the controlling OptionParser instance indent_increment : int the number of columns to indent per nesting level max_help_position : int the maximum starting column for option help text help_position : int the calculated starting column for option help text; initially the same as the maximum width : int total number of columns for output (pass None to constructor for this value to be taken from the $COLUMNS environment variable) level : int current indentation level current_indent : int current indentation level (in columns) help_width : int number of columns available for option help text (calculated) default_tag : str text to replace with each option's default value, "%default" by default. Set to false value to disable default value expansion. option_strings : { Option : str } maps Option instances to the snippet of help text explaining the syntax of that option, e.g. "-h, --help" or "-fFILE, --file=FILE" _short_opt_fmt : str format string controlling how short options with values are printed in help text. Must be either "%s%s" ("-fFILE") or "%s %s" ("-f FILE"), because those are the two syntaxes that Optik supports. _long_opt_fmt : str similar but for long options; must be either "%s %s" ("--file FILE") or "%s=%s" ("--file=FILE"). """ NO_DEFAULT_VALUE = "none" def __init__(self, indent_increment, max_help_position, width, short_first): self.parser = None self.indent_increment = indent_increment self.help_position = self.max_help_position = max_help_position if width is None: try: width = int(os.environ['COLUMNS']) except (KeyError, ValueError): width = 80 width -= 2 self.width = width self.current_indent = 0 self.level = 0 self.help_width = None # computed later self.short_first = short_first self.default_tag = "%default" self.option_strings = {} self._short_opt_fmt = "%s %s" self._long_opt_fmt = "%s=%s" def set_parser(self, parser): self.parser = parser def set_short_opt_delimiter(self, delim): if delim not in ("", " "): raise ValueError( "invalid metavar delimiter for short options: %r" % delim) self._short_opt_fmt = "%s" + delim + "%s" def set_long_opt_delimiter(self, delim): if delim not in ("=", " "): raise ValueError( "invalid metavar delimiter for long options: %r" % delim) self._long_opt_fmt = "%s" + delim + "%s" def indent(self): self.current_indent += self.indent_increment self.level += 1 def dedent(self): self.current_indent -= self.indent_increment assert self.current_indent >= 0, "Indent decreased below 0." self.level -= 1 def format_usage(self, usage): raise NotImplementedError("subclasses must implement") def format_heading(self, heading): raise NotImplementedError("subclasses must implement") def _format_text(self, text): """ Format a paragraph of free-form text for inclusion in the help output at the current indentation level. """ text_width = self.width - self.current_indent indent = " "*self.current_indent return textwrap.fill(text, text_width, initial_indent=indent, subsequent_indent=indent) def format_description(self, description): if description: return self._format_text(description) + "\n" else: return "" def format_epilog(self, epilog): if epilog: return "\n" + self._format_text(epilog) + "\n" else: return "" def expand_default(self, option): if self.parser is None or not self.default_tag: return option.help default_value = self.parser.defaults.get(option.dest) if default_value is NO_DEFAULT or default_value is None: default_value = self.NO_DEFAULT_VALUE return option.help.replace(self.default_tag, str(default_value)) def format_option(self, option): # The help for each option consists of two parts: # * the opt strings and metavars # eg. ("-x", or "-fFILENAME, --file=FILENAME") # * the user-supplied help string # eg. ("turn on expert mode", "read data from FILENAME") # # If possible, we write both of these on the same line: # -x turn on expert mode # # But if the opt string list is too long, we put the help # string on a second line, indented to the same column it would # start in if it fit on the first line. # -fFILENAME, --file=FILENAME # read data from FILENAME result = [] opts = self.option_strings[option] opt_width = self.help_position - self.current_indent - 2 if len(opts) > opt_width: opts = "%*s%s\n" % (self.current_indent, "", opts) indent_first = self.help_position else: # start help on same line as opts opts = "%*s%-*s " % (self.current_indent, "", opt_width, opts) indent_first = 0 result.append(opts) if option.help: help_text = self.expand_default(option) help_lines = textwrap.wrap(help_text, self.help_width) result.append("%*s%s\n" % (indent_first, "", help_lines[0])) result.extend(["%*s%s\n" % (self.help_position, "", line) for line in help_lines[1:]]) elif opts[-1] != "\n": result.append("\n") return "".join(result) def store_option_strings(self, parser): self.indent() max_len = 0 for opt in parser.option_list: strings = self.format_option_strings(opt) self.option_strings[opt] = strings max_len = max(max_len, len(strings) + self.current_indent) self.indent() for group in parser.option_groups: for opt in group.option_list: strings = self.format_option_strings(opt) self.option_strings[opt] = strings max_len = max(max_len, len(strings) + self.current_indent) self.dedent() self.dedent() self.help_position = min(max_len + 2, self.max_help_position) self.help_width = self.width - self.help_position def format_option_strings(self, option): """Return a comma-separated list of option strings & metavariables.""" if option.takes_value(): metavar = option.metavar or option.dest.upper() short_opts = [self._short_opt_fmt % (sopt, metavar) for sopt in option._short_opts] long_opts = [self._long_opt_fmt % (lopt, metavar) for lopt in option._long_opts] else: short_opts = option._short_opts long_opts = option._long_opts if self.short_first: opts = short_opts + long_opts else: opts = long_opts + short_opts return ", ".join(opts) class IndentedHelpFormatter (HelpFormatter): """Format help with indented section bodies. """ def __init__(self, indent_increment=2, max_help_position=24, width=None, short_first=1): HelpFormatter.__init__( self, indent_increment, max_help_position, width, short_first) def format_usage(self, usage): return _("Usage: %s\n") % usage def format_heading(self, heading): return "%*s%s:\n" % (self.current_indent, "", heading) class TitledHelpFormatter (HelpFormatter): """Format help with underlined section headers. """ def __init__(self, indent_increment=0, max_help_position=24, width=None, short_first=0): HelpFormatter.__init__ ( self, indent_increment, max_help_position, width, short_first) def format_usage(self, usage): return "%s %s\n" % (self.format_heading(_("Usage")), usage) def format_heading(self, heading): return "%s\n%s\n" % (heading, "=-"[self.level] * len(heading)) def _parse_num(val, type): if val[:2].lower() == "0x": # hexadecimal radix = 16 elif val[:2].lower() == "0b": # binary radix = 2 val = val[2:] or "0" # have to remove "0b" prefix elif val[:1] == "0": # octal radix = 8 else: # decimal radix = 10 return type(val, radix) def _parse_int(val): return _parse_num(val, int) _builtin_cvt = { "int" : (_parse_int, _("integer")), "long" : (_parse_int, _("integer")), "float" : (float, _("floating-point")), "complex" : (complex, _("complex")) } def check_builtin(option, opt, value): (cvt, what) = _builtin_cvt[option.type] try: return cvt(value) except ValueError: raise OptionValueError( _("option %s: invalid %s value: %r") % (opt, what, value)) def check_choice(option, opt, value): if value in option.choices: return value else: choices = ", ".join(map(repr, option.choices)) raise OptionValueError( _("option %s: invalid choice: %r (choose from %s)") % (opt, value, choices)) # Not supplying a default is different from a default of None, # so we need an explicit "not supplied" value. NO_DEFAULT = ("NO", "DEFAULT") class Option: """ Instance attributes: _short_opts : [string] _long_opts : [string] action : string type : string dest : string default : any nargs : int const : any choices : [string] callback : function callback_args : (any*) callback_kwargs : { string : any } help : string metavar : string """ # The list of instance attributes that may be set through # keyword args to the constructor. ATTRS = ['action', 'type', 'dest', 'default', 'nargs', 'const', 'choices', 'callback', 'callback_args', 'callback_kwargs', 'help', 'metavar'] # The set of actions allowed by option parsers. Explicitly listed # here so the constructor can validate its arguments. ACTIONS = ("store", "store_const", "store_true", "store_false", "append", "append_const", "count", "callback", "help", "version") # The set of actions that involve storing a value somewhere; # also listed just for constructor argument validation. (If # the action is one of these, there must be a destination.) STORE_ACTIONS = ("store", "store_const", "store_true", "store_false", "append", "append_const", "count") # The set of actions for which it makes sense to supply a value # type, ie. which may consume an argument from the command line. TYPED_ACTIONS = ("store", "append", "callback") # The set of actions which *require* a value type, ie. that # always consume an argument from the command line. ALWAYS_TYPED_ACTIONS = ("store", "append") # The set of actions which take a 'const' attribute. CONST_ACTIONS = ("store_const", "append_const") # The set of known types for option parsers. Again, listed here for # constructor argument validation. TYPES = ("string", "int", "long", "float", "complex", "choice") # Dictionary of argument checking functions, which convert and # validate option arguments according to the option type. # # Signature of checking functions is: # check(option : Option, opt : string, value : string) -> any # where # option is the Option instance calling the checker # opt is the actual option seen on the command-line # (eg. "-a", "--file") # value is the option argument seen on the command-line # # The return value should be in the appropriate Python type # for option.type -- eg. an integer if option.type == "int". # # If no checker is defined for a type, arguments will be # unchecked and remain strings. TYPE_CHECKER = { "int" : check_builtin, "long" : check_builtin, "float" : check_builtin, "complex": check_builtin, "choice" : check_choice, } # CHECK_METHODS is a list of unbound method objects; they are called # by the constructor, in order, after all attributes are # initialized. The list is created and filled in later, after all # the methods are actually defined. (I just put it here because I # like to define and document all class attributes in the same # place.) Subclasses that add another _check_*() method should # define their own CHECK_METHODS list that adds their check method # to those from this class. CHECK_METHODS = None # -- Constructor/initialization methods ---------------------------- def __init__(self, *opts, **attrs): # Set _short_opts, _long_opts attrs from 'opts' tuple. # Have to be set now, in case no option strings are supplied. self._short_opts = [] self._long_opts = [] opts = self._check_opt_strings(opts) self._set_opt_strings(opts) # Set all other attrs (action, type, etc.) from 'attrs' dict self._set_attrs(attrs) # Check all the attributes we just set. There are lots of # complicated interdependencies, but luckily they can be farmed # out to the _check_*() methods listed in CHECK_METHODS -- which # could be handy for subclasses! The one thing these all share # is that they raise OptionError if they discover a problem. for checker in self.CHECK_METHODS: checker(self) def _check_opt_strings(self, opts): # Filter out None because early versions of Optik had exactly # one short option and one long option, either of which # could be None. opts = [opt for opt in opts if opt] if not opts: raise TypeError("at least one option string must be supplied") return opts def _set_opt_strings(self, opts): for opt in opts: if len(opt) < 2: raise OptionError( "invalid option string %r: " "must be at least two characters long" % opt, self) elif len(opt) == 2: if not (opt[0] == "-" and opt[1] != "-"): raise OptionError( "invalid short option string %r: " "must be of the form -x, (x any non-dash char)" % opt, self) self._short_opts.append(opt) else: if not (opt[0:2] == "--" and opt[2] != "-"): raise OptionError( "invalid long option string %r: " "must start with --, followed by non-dash" % opt, self) self._long_opts.append(opt) def _set_attrs(self, attrs): for attr in self.ATTRS: if attr in attrs: setattr(self, attr, attrs[attr]) del attrs[attr] else: if attr == 'default': setattr(self, attr, NO_DEFAULT) else: setattr(self, attr, None) if attrs: attrs = sorted(attrs.keys()) raise OptionError( "invalid keyword arguments: %s" % ", ".join(attrs), self) # -- Constructor validation methods -------------------------------- def _check_action(self): if self.action is None: self.action = "store" elif self.action not in self.ACTIONS: raise OptionError("invalid action: %r" % self.action, self) def _check_type(self): if self.type is None: if self.action in self.ALWAYS_TYPED_ACTIONS: if self.choices is not None: # The "choices" attribute implies "choice" type. self.type = "choice" else: # No type given? "string" is the most sensible default. self.type = "string" else: # Allow type objects or builtin type conversion functions # (int, str, etc.) as an alternative to their names. (The # complicated check of builtins is only necessary for # Python 2.1 and earlier, and is short-circuited by the # first check on modern Pythons.) import builtins if ( isinstance(self.type, type) or (hasattr(self.type, "__name__") and getattr(builtins, self.type.__name__, None) is self.type) ): self.type = self.type.__name__ if self.type == "str": self.type = "string" if self.type not in self.TYPES: raise OptionError("invalid option type: %r" % self.type, self) if self.action not in self.TYPED_ACTIONS: raise OptionError( "must not supply a type for action %r" % self.action, self) def _check_choice(self): if self.type == "choice": if self.choices is None: raise OptionError( "must supply a list of choices for type 'choice'", self) elif not isinstance(self.choices, (tuple, list)): raise OptionError( "choices must be a list of strings ('%s' supplied)" % str(type(self.choices)).split("'")[1], self) elif self.choices is not None: raise OptionError( "must not supply choices for type %r" % self.type, self) def _check_dest(self): # No destination given, and we need one for this action. The # self.type check is for callbacks that take a value. takes_value = (self.action in self.STORE_ACTIONS or self.type is not None) if self.dest is None and takes_value: # Glean a destination from the first long option string, # or from the first short option string if no long options. if self._long_opts: # eg. "--foo-bar" -> "foo_bar" self.dest = self._long_opts[0][2:].replace('-', '_') else: self.dest = self._short_opts[0][1] def _check_const(self): if self.action not in self.CONST_ACTIONS and self.const is not None: raise OptionError( "'const' must not be supplied for action %r" % self.action, self) def _check_nargs(self): if self.action in self.TYPED_ACTIONS: if self.nargs is None: self.nargs = 1 elif self.nargs is not None: raise OptionError( "'nargs' must not be supplied for action %r" % self.action, self) def _check_callback(self): if self.action == "callback": if not callable(self.callback): raise OptionError( "callback not callable: %r" % self.callback, self) if (self.callback_args is not None and not isinstance(self.callback_args, tuple)): raise OptionError( "callback_args, if supplied, must be a tuple: not %r" % self.callback_args, self) if (self.callback_kwargs is not None and not isinstance(self.callback_kwargs, dict)): raise OptionError( "callback_kwargs, if supplied, must be a dict: not %r" % self.callback_kwargs, self) else: if self.callback is not None: raise OptionError( "callback supplied (%r) for non-callback option" % self.callback, self) if self.callback_args is not None: raise OptionError( "callback_args supplied for non-callback option", self) if self.callback_kwargs is not None: raise OptionError( "callback_kwargs supplied for non-callback option", self) CHECK_METHODS = [_check_action, _check_type, _check_choice, _check_dest, _check_const, _check_nargs, _check_callback] # -- Miscellaneous methods ----------------------------------------- def __str__(self): return "/".join(self._short_opts + self._long_opts) __repr__ = _repr def takes_value(self): return self.type is not None def get_opt_string(self): if self._long_opts: return self._long_opts[0] else: return self._short_opts[0] # -- Processing methods -------------------------------------------- def check_value(self, opt, value): checker = self.TYPE_CHECKER.get(self.type) if checker is None: return value else: return checker(self, opt, value) def convert_value(self, opt, value): if value is not None: if self.nargs == 1: return self.check_value(opt, value) else: return tuple([self.check_value(opt, v) for v in value]) def process(self, opt, value, values, parser): # First, convert the value(s) to the right type. Howl if any # value(s) are bogus. value = self.convert_value(opt, value) # And then take whatever action is expected of us. # This is a separate method to make life easier for # subclasses to add new actions. return self.take_action( self.action, self.dest, opt, value, values, parser) def take_action(self, action, dest, opt, value, values, parser): if action == "store": setattr(values, dest, value) elif action == "store_const": setattr(values, dest, self.const) elif action == "store_true": setattr(values, dest, True) elif action == "store_false": setattr(values, dest, False) elif action == "append": values.ensure_value(dest, []).append(value) elif action == "append_const": values.ensure_value(dest, []).append(self.const) elif action == "count": setattr(values, dest, values.ensure_value(dest, 0) + 1) elif action == "callback": args = self.callback_args or () kwargs = self.callback_kwargs or {} self.callback(self, opt, value, parser, *args, **kwargs) elif action == "help": parser.print_help() parser.exit() elif action == "version": parser.print_version() parser.exit() else: raise ValueError("unknown action %r" % self.action) return 1 # class Option SUPPRESS_HELP = "SUPPRESS"+"HELP" SUPPRESS_USAGE = "SUPPRESS"+"USAGE" class Values: def __init__(self, defaults=None): if defaults: for (attr, val) in defaults.items(): setattr(self, attr, val) def __str__(self): return str(self.__dict__) __repr__ = _repr def __eq__(self, other): if isinstance(other, Values): return self.__dict__ == other.__dict__ elif isinstance(other, dict): return self.__dict__ == other else: return NotImplemented def _update_careful(self, dict): """ Update the option values from an arbitrary dictionary, but only use keys from dict that already have a corresponding attribute in self. Any keys in dict without a corresponding attribute are silently ignored. """ for attr in dir(self): if attr in dict: dval = dict[attr] if dval is not None: setattr(self, attr, dval) def _update_loose(self, dict): """ Update the option values from an arbitrary dictionary, using all keys from the dictionary regardless of whether they have a corresponding attribute in self or not. """ self.__dict__.update(dict) def _update(self, dict, mode): if mode == "careful": self._update_careful(dict) elif mode == "loose": self._update_loose(dict) else: raise ValueError("invalid update mode: %r" % mode) def read_module(self, modname, mode="careful"): __import__(modname) mod = sys.modules[modname] self._update(vars(mod), mode) def read_file(self, filename, mode="careful"): vars = {} exec(open(filename).read(), vars) self._update(vars, mode) def ensure_value(self, attr, value): if not hasattr(self, attr) or getattr(self, attr) is None: setattr(self, attr, value) return getattr(self, attr) class OptionContainer: """ Abstract base class. Class attributes: standard_option_list : [Option] list of standard options that will be accepted by all instances of this parser class (intended to be overridden by subclasses). Instance attributes: option_list : [Option] the list of Option objects contained by this OptionContainer _short_opt : { string : Option } dictionary mapping short option strings, eg. "-f" or "-X", to the Option instances that implement them. If an Option has multiple short option strings, it will appears in this dictionary multiple times. [1] _long_opt : { string : Option } dictionary mapping long option strings, eg. "--file" or "--exclude", to the Option instances that implement them. Again, a given Option can occur multiple times in this dictionary. [1] defaults : { string : any } dictionary mapping option destination names to default values for each destination [1] [1] These mappings are common to (shared by) all components of the controlling OptionParser, where they are initially created. """ def __init__(self, option_class, conflict_handler, description): # Initialize the option list and related data structures. # This method must be provided by subclasses, and it must # initialize at least the following instance attributes: # option_list, _short_opt, _long_opt, defaults. self._create_option_list() self.option_class = option_class self.set_conflict_handler(conflict_handler) self.set_description(description) def _create_option_mappings(self): # For use by OptionParser constructor -- create the master # option mappings used by this OptionParser and all # OptionGroups that it owns. self._short_opt = {} # single letter -> Option instance self._long_opt = {} # long option -> Option instance self.defaults = {} # maps option dest -> default value def _share_option_mappings(self, parser): # For use by OptionGroup constructor -- use shared option # mappings from the OptionParser that owns this OptionGroup. self._short_opt = parser._short_opt self._long_opt = parser._long_opt self.defaults = parser.defaults def set_conflict_handler(self, handler): if handler not in ("error", "resolve"): raise ValueError("invalid conflict_resolution value %r" % handler) self.conflict_handler = handler def set_description(self, description): self.description = description def get_description(self): return self.description def destroy(self): """see OptionParser.destroy().""" del self._short_opt del self._long_opt del self.defaults # -- Option-adding methods ----------------------------------------- def _check_conflict(self, option): conflict_opts = [] for opt in option._short_opts: if opt in self._short_opt: conflict_opts.append((opt, self._short_opt[opt])) for opt in option._long_opts: if opt in self._long_opt: conflict_opts.append((opt, self._long_opt[opt])) if conflict_opts: handler = self.conflict_handler if handler == "error": raise OptionConflictError( "conflicting option string(s): %s" % ", ".join([co[0] for co in conflict_opts]), option) elif handler == "resolve": for (opt, c_option) in conflict_opts: if opt.startswith("--"): c_option._long_opts.remove(opt) del self._long_opt[opt] else: c_option._short_opts.remove(opt) del self._short_opt[opt] if not (c_option._short_opts or c_option._long_opts): c_option.container.option_list.remove(c_option) def add_option(self, *args, **kwargs): """add_option(Option) add_option(opt_str, ..., kwarg=val, ...) """ if isinstance(args[0], str): option = self.option_class(*args, **kwargs) elif len(args) == 1 and not kwargs: option = args[0] if not isinstance(option, Option): raise TypeError("not an Option instance: %r" % option) else: raise TypeError("invalid arguments") self._check_conflict(option) self.option_list.append(option) option.container = self for opt in option._short_opts: self._short_opt[opt] = option for opt in option._long_opts: self._long_opt[opt] = option if option.dest is not None: # option has a dest, we need a default if option.default is not NO_DEFAULT: self.defaults[option.dest] = option.default elif option.dest not in self.defaults: self.defaults[option.dest] = None return option def add_options(self, option_list): for option in option_list: self.add_option(option) # -- Option query/removal methods ---------------------------------- def get_option(self, opt_str): return (self._short_opt.get(opt_str) or self._long_opt.get(opt_str)) def has_option(self, opt_str): return (opt_str in self._short_opt or opt_str in self._long_opt) def remove_option(self, opt_str): option = self._short_opt.get(opt_str) if option is None: option = self._long_opt.get(opt_str) if option is None: raise ValueError("no such option %r" % opt_str) for opt in option._short_opts: del self._short_opt[opt] for opt in option._long_opts: del self._long_opt[opt] option.container.option_list.remove(option) # -- Help-formatting methods --------------------------------------- def format_option_help(self, formatter): if not self.option_list: return "" result = [] for option in self.option_list: if not option.help is SUPPRESS_HELP: result.append(formatter.format_option(option)) return "".join(result) def format_description(self, formatter): return formatter.format_description(self.get_description()) def format_help(self, formatter): result = [] if self.description: result.append(self.format_description(formatter)) if self.option_list: result.append(self.format_option_help(formatter)) return "\n".join(result) class OptionGroup (OptionContainer): def __init__(self, parser, title, description=None): self.parser = parser OptionContainer.__init__( self, parser.option_class, parser.conflict_handler, description) self.title = title def _create_option_list(self): self.option_list = [] self._share_option_mappings(self.parser) def set_title(self, title): self.title = title def destroy(self): """see OptionParser.destroy().""" OptionContainer.destroy(self) del self.option_list # -- Help-formatting methods --------------------------------------- def format_help(self, formatter): result = formatter.format_heading(self.title) formatter.indent() result += OptionContainer.format_help(self, formatter) formatter.dedent() return result class OptionParser (OptionContainer): """ Class attributes: standard_option_list : [Option] list of standard options that will be accepted by all instances of this parser class (intended to be overridden by subclasses). Instance attributes: usage : string a usage string for your program. Before it is displayed to the user, "%prog" will be expanded to the name of your program (self.prog or os.path.basename(sys.argv[0])). prog : string the name of the current program (to override os.path.basename(sys.argv[0])). description : string A paragraph of text giving a brief overview of your program. optparse reformats this paragraph to fit the current terminal width and prints it when the user requests help (after usage, but before the list of options). epilog : string paragraph of help text to print after option help option_groups : [OptionGroup] list of option groups in this parser (option groups are irrelevant for parsing the command-line, but very useful for generating help) allow_interspersed_args : bool = true if true, positional arguments may be interspersed with options. Assuming -a and -b each take a single argument, the command-line -ablah foo bar -bboo baz will be interpreted the same as -ablah -bboo -- foo bar baz If this flag were false, that command line would be interpreted as -ablah -- foo bar -bboo baz -- ie. we stop processing options as soon as we see the first non-option argument. (This is the tradition followed by Python's getopt module, Perl's Getopt::Std, and other argument- parsing libraries, but it is generally annoying to users.) process_default_values : bool = true if true, option default values are processed similarly to option values from the command line: that is, they are passed to the type-checking function for the option's type (as long as the default value is a string). (This really only matters if you have defined custom types; see SF bug #955889.) Set it to false to restore the behaviour of Optik 1.4.1 and earlier. rargs : [string] the argument list currently being parsed. Only set when parse_args() is active, and continually trimmed down as we consume arguments. Mainly there for the benefit of callback options. largs : [string] the list of leftover arguments that we have skipped while parsing options. If allow_interspersed_args is false, this list is always empty. values : Values the set of option values currently being accumulated. Only set when parse_args() is active. Also mainly for callbacks. Because of the 'rargs', 'largs', and 'values' attributes, OptionParser is not thread-safe. If, for some perverse reason, you need to parse command-line arguments simultaneously in different threads, use different OptionParser instances. """ standard_option_list = [] def __init__(self, usage=None, option_list=None, option_class=Option, version=None, conflict_handler="error", description=None, formatter=None, add_help_option=True, prog=None, epilog=None): OptionContainer.__init__( self, option_class, conflict_handler, description) self.set_usage(usage) self.prog = prog self.version = version self.allow_interspersed_args = True self.process_default_values = True if formatter is None: formatter = IndentedHelpFormatter() self.formatter = formatter self.formatter.set_parser(self) self.epilog = epilog # Populate the option list; initial sources are the # standard_option_list class attribute, the 'option_list' # argument, and (if applicable) the _add_version_option() and # _add_help_option() methods. self._populate_option_list(option_list, add_help=add_help_option) self._init_parsing_state() def destroy(self): """ Declare that you are done with this OptionParser. This cleans up reference cycles so the OptionParser (and all objects referenced by it) can be garbage-collected promptly. After calling destroy(), the OptionParser is unusable. """ OptionContainer.destroy(self) for group in self.option_groups: group.destroy() del self.option_list del self.option_groups del self.formatter # -- Private methods ----------------------------------------------- # (used by our or OptionContainer's constructor) def _create_option_list(self): self.option_list = [] self.option_groups = [] self._create_option_mappings() def _add_help_option(self): self.add_option("-h", "--help", action="help", help=_("show this help message and exit")) def _add_version_option(self): self.add_option("--version", action="version", help=_("show program's version number and exit")) def _populate_option_list(self, option_list, add_help=True): if self.standard_option_list: self.add_options(self.standard_option_list) if option_list: self.add_options(option_list) if self.version: self._add_version_option() if add_help: self._add_help_option() def _init_parsing_state(self): # These are set in parse_args() for the convenience of callbacks. self.rargs = None self.largs = None self.values = None # -- Simple modifier methods --------------------------------------- def set_usage(self, usage): if usage is None: self.usage = _("%prog [options]") elif usage is SUPPRESS_USAGE: self.usage = None # For backwards compatibility with Optik 1.3 and earlier. elif usage.lower().startswith("usage: "): self.usage = usage[7:] else: self.usage = usage def enable_interspersed_args(self): """Set parsing to not stop on the first non-option, allowing interspersing switches with command arguments. This is the default behavior. See also disable_interspersed_args() and the class documentation description of the attribute allow_interspersed_args.""" self.allow_interspersed_args = True def disable_interspersed_args(self): """Set parsing to stop on the first non-option. Use this if you have a command processor which runs another command that has options of its own and you want to make sure these options don't get confused. """ self.allow_interspersed_args = False def set_process_default_values(self, process): self.process_default_values = process def set_default(self, dest, value): self.defaults[dest] = value def set_defaults(self, **kwargs): self.defaults.update(kwargs) def _get_all_options(self): options = self.option_list[:] for group in self.option_groups: options.extend(group.option_list) return options def get_default_values(self): if not self.process_default_values: # Old, pre-Optik 1.5 behaviour. return Values(self.defaults) defaults = self.defaults.copy() for option in self._get_all_options(): default = defaults.get(option.dest) if isinstance(default, str): opt_str = option.get_opt_string() defaults[option.dest] = option.check_value(opt_str, default) return Values(defaults) # -- OptionGroup methods ------------------------------------------- def add_option_group(self, *args, **kwargs): # XXX lots of overlap with OptionContainer.add_option() if isinstance(args[0], str): group = OptionGroup(self, *args, **kwargs) elif len(args) == 1 and not kwargs: group = args[0] if not isinstance(group, OptionGroup): raise TypeError("not an OptionGroup instance: %r" % group) if group.parser is not self: raise ValueError("invalid OptionGroup (wrong parser)") else: raise TypeError("invalid arguments") self.option_groups.append(group) return group def get_option_group(self, opt_str): option = (self._short_opt.get(opt_str) or self._long_opt.get(opt_str)) if option and option.container is not self: return option.container return None # -- Option-parsing methods ---------------------------------------- def _get_args(self, args): if args is None: return sys.argv[1:] else: return args[:] # don't modify caller's list def parse_args(self, args=None, values=None): """ parse_args(args : [string] = sys.argv[1:], values : Values = None) -> (values : Values, args : [string]) Parse the command-line options found in 'args' (default: sys.argv[1:]). Any errors result in a call to 'error()', which by default prints the usage message to stderr and calls sys.exit() with an error message. On success returns a pair (values, args) where 'values' is an Values instance (with all your option values) and 'args' is the list of arguments left over after parsing options. """ rargs = self._get_args(args) if values is None: values = self.get_default_values() # Store the halves of the argument list as attributes for the # convenience of callbacks: # rargs # the rest of the command-line (the "r" stands for # "remaining" or "right-hand") # largs # the leftover arguments -- ie. what's left after removing # options and their arguments (the "l" stands for "leftover" # or "left-hand") self.rargs = rargs self.largs = largs = [] self.values = values try: stop = self._process_args(largs, rargs, values) except (BadOptionError, OptionValueError) as err: self.error(str(err)) args = largs + rargs return self.check_values(values, args) def check_values(self, values, args): """ check_values(values : Values, args : [string]) -> (values : Values, args : [string]) Check that the supplied option values and leftover arguments are valid. Returns the option values and leftover arguments (possibly adjusted, possibly completely new -- whatever you like). Default implementation just returns the passed-in values; subclasses may override as desired. """ return (values, args) def _process_args(self, largs, rargs, values): """_process_args(largs : [string], rargs : [string], values : Values) Process command-line arguments and populate 'values', consuming options and arguments from 'rargs'. If 'allow_interspersed_args' is false, stop at the first non-option argument. If true, accumulate any interspersed non-option arguments in 'largs'. """ while rargs: arg = rargs[0] # We handle bare "--" explicitly, and bare "-" is handled by the # standard arg handler since the short arg case ensures that the # len of the opt string is greater than 1. if arg == "--": del rargs[0] return elif arg[0:2] == "--": # process a single long option (possibly with value(s)) self._process_long_opt(rargs, values) elif arg[:1] == "-" and len(arg) > 1: # process a cluster of short options (possibly with # value(s) for the last one only) self._process_short_opts(rargs, values) elif self.allow_interspersed_args: largs.append(arg) del rargs[0] else: return # stop now, leave this arg in rargs # Say this is the original argument list: # [arg0, arg1, ..., arg(i-1), arg(i), arg(i+1), ..., arg(N-1)] # ^ # (we are about to process arg(i)). # # Then rargs is [arg(i), ..., arg(N-1)] and largs is a *subset* of # [arg0, ..., arg(i-1)] (any options and their arguments will have # been removed from largs). # # The while loop will usually consume 1 or more arguments per pass. # If it consumes 1 (eg. arg is an option that takes no arguments), # then after _process_arg() is done the situation is: # # largs = subset of [arg0, ..., arg(i)] # rargs = [arg(i+1), ..., arg(N-1)] # # If allow_interspersed_args is false, largs will always be # *empty* -- still a subset of [arg0, ..., arg(i-1)], but # not a very interesting subset! def _match_long_opt(self, opt): """_match_long_opt(opt : string) -> string Determine which long option string 'opt' matches, ie. which one it is an unambiguous abbreviation for. Raises BadOptionError if 'opt' doesn't unambiguously match any long option string. """ return _match_abbrev(opt, self._long_opt) def _process_long_opt(self, rargs, values): arg = rargs.pop(0) # Value explicitly attached to arg? Pretend it's the next # argument. if "=" in arg: (opt, next_arg) = arg.split("=", 1) rargs.insert(0, next_arg) had_explicit_value = True else: opt = arg had_explicit_value = False opt = self._match_long_opt(opt) option = self._long_opt[opt] if option.takes_value(): nargs = option.nargs if len(rargs) < nargs: self.error(ngettext( "%(option)s option requires %(number)d argument", "%(option)s option requires %(number)d arguments", nargs) % {"option": opt, "number": nargs}) elif nargs == 1: value = rargs.pop(0) else: value = tuple(rargs[0:nargs]) del rargs[0:nargs] elif had_explicit_value: self.error(_("%s option does not take a value") % opt) else: value = None option.process(opt, value, values, self) def _process_short_opts(self, rargs, values): arg = rargs.pop(0) stop = False i = 1 for ch in arg[1:]: opt = "-" + ch option = self._short_opt.get(opt) i += 1 # we have consumed a character if not option: raise BadOptionError(opt) if option.takes_value(): # Any characters left in arg? Pretend they're the # next arg, and stop consuming characters of arg. if i < len(arg): rargs.insert(0, arg[i:]) stop = True nargs = option.nargs if len(rargs) < nargs: self.error(ngettext( "%(option)s option requires %(number)d argument", "%(option)s option requires %(number)d arguments", nargs) % {"option": opt, "number": nargs}) elif nargs == 1: value = rargs.pop(0) else: value = tuple(rargs[0:nargs]) del rargs[0:nargs] else: # option doesn't take a value value = None option.process(opt, value, values, self) if stop: break # -- Feedback methods ---------------------------------------------- def get_prog_name(self): if self.prog is None: return os.path.basename(sys.argv[0]) else: return self.prog def expand_prog_name(self, s): return s.replace("%prog", self.get_prog_name()) def get_description(self): return self.expand_prog_name(self.description) def exit(self, status=0, msg=None): if msg: sys.stderr.write(msg) sys.exit(status) def error(self, msg): """error(msg : string) Print a usage message incorporating 'msg' to stderr and exit. If you override this in a subclass, it should not return -- it should either exit or raise an exception. """ self.print_usage(sys.stderr) self.exit(2, "%s: error: %s\n" % (self.get_prog_name(), msg)) def get_usage(self): if self.usage: return self.formatter.format_usage( self.expand_prog_name(self.usage)) else: return "" def print_usage(self, file=None): """print_usage(file : file = stdout) Print the usage message for the current program (self.usage) to 'file' (default stdout). Any occurrence of the string "%prog" in self.usage is replaced with the name of the current program (basename of sys.argv[0]). Does nothing if self.usage is empty or not defined. """ if self.usage: print(self.get_usage(), file=file) def get_version(self): if self.version: return self.expand_prog_name(self.version) else: return "" def print_version(self, file=None): """print_version(file : file = stdout) Print the version message for this program (self.version) to 'file' (default stdout). As with print_usage(), any occurrence of "%prog" in self.version is replaced by the current program's name. Does nothing if self.version is empty or undefined. """ if self.version: print(self.get_version(), file=file) def format_option_help(self, formatter=None): if formatter is None: formatter = self.formatter formatter.store_option_strings(self) result = [] result.append(formatter.format_heading(_("Options"))) formatter.indent() if self.option_list: result.append(OptionContainer.format_option_help(self, formatter)) result.append("\n") for group in self.option_groups: result.append(group.format_help(formatter)) result.append("\n") formatter.dedent() # Drop the last "\n", or the header if no options or option groups: return "".join(result[:-1]) def format_epilog(self, formatter): return formatter.format_epilog(self.epilog) def format_help(self, formatter=None): if formatter is None: formatter = self.formatter result = [] if self.usage: result.append(self.get_usage() + "\n") if self.description: result.append(self.format_description(formatter) + "\n") result.append(self.format_option_help(formatter)) result.append(self.format_epilog(formatter)) return "".join(result) def print_help(self, file=None): """print_help(file : file = stdout) Print an extended help message, listing all options and any help text provided with them, to 'file' (default stdout). """ if file is None: file = sys.stdout file.write(self.format_help()) # class OptionParser def _match_abbrev(s, wordmap): """_match_abbrev(s : string, wordmap : {string : Option}) -> string Return the string key in 'wordmap' for which 's' is an unambiguous abbreviation. If 's' is found to be ambiguous or doesn't match any of 'words', raise BadOptionError. """ # Is there an exact match? if s in wordmap: return s else: # Isolate all words with s as a prefix. possibilities = [word for word in wordmap.keys() if word.startswith(s)] # No exact match, so there had better be just one possibility. if len(possibilities) == 1: return possibilities[0] elif not possibilities: raise BadOptionError(s) else: # More than one possible completion: ambiguous prefix. possibilities.sort() raise AmbiguousOptionError(s, possibilities) # Some day, there might be many Option classes. As of Optik 1.3, the # preferred way to instantiate Options is indirectly, via make_option(), # which will become a factory function when there are many Option # classes. make_option = Option
bsd-3-clause
Shrhawk/edx-platform
cms/djangoapps/contentstore/management/commands/export.py
64
1688
""" Script for exporting courseware from Mongo to a tar.gz file """ import os from django.core.management.base import BaseCommand, CommandError from xmodule.modulestore.xml_exporter import export_course_to_xml from xmodule.modulestore.django import modulestore from opaque_keys.edx.keys import CourseKey from xmodule.contentstore.django import contentstore from opaque_keys import InvalidKeyError from opaque_keys.edx.locations import SlashSeparatedCourseKey class Command(BaseCommand): """ Export the specified data directory into the default ModuleStore """ help = 'Export the specified data directory into the default ModuleStore' def handle(self, *args, **options): "Execute the command" if len(args) != 2: raise CommandError("export requires two arguments: <course id> <output path>") try: course_key = CourseKey.from_string(args[0]) except InvalidKeyError: try: course_key = SlashSeparatedCourseKey.from_deprecated_string(args[0]) except InvalidKeyError: raise CommandError("Invalid course_key: '%s'. " % args[0]) if not modulestore().get_course(course_key): raise CommandError("Course with %s key not found." % args[0]) output_path = args[1] print "Exporting course id = {0} to {1}".format(course_key, output_path) if not output_path.endswith('/'): output_path += '/' root_dir = os.path.dirname(output_path) course_dir = os.path.splitext(os.path.basename(output_path))[0] export_course_to_xml(modulestore(), contentstore(), course_key, root_dir, course_dir)
agpl-3.0
mcyprian/pyp2rpm
pyp2rpm/virtualenv.py
1
4252
import os import glob import logging import pprint from virtualenvapi.manage import VirtualEnvironment import virtualenvapi.exceptions as ve from pyp2rpm.exceptions import VirtualenvFailException from pyp2rpm.settings import DEFAULT_PYTHON_VERSION, MODULE_SUFFIXES logger = logging.getLogger(__name__) def site_packages_filter(site_packages_list): '''Removes wheel .dist-info files''' return set([x for x in site_packages_list if not x.endswith( ('dist-info', '.pth'))]) def scripts_filter(scripts): ''' Removes .pyc files and __pycache__ from scripts ''' return [x for x in scripts if not x.split('.')[-1] == 'pyc' and not x == '__pycache__'] class DirsContent(object): ''' Object to store and compare directory content before and after instalation of package. ''' def __init__(self, bindir=None, lib_sitepackages=None): self.bindir = bindir self.lib_sitepackages = lib_sitepackages def fill(self, path): ''' Scans content of directories ''' self.bindir = set(os.listdir(path + 'bin/')) self.lib_sitepackages = set(os.listdir(glob.glob( path + 'lib/python?.?/site-packages/')[0])) def __sub__(self, other): ''' Makes differance of DirsContents objects attributes ''' if any([self.bindir is None, self.lib_sitepackages is None, other.bindir is None, other.lib_sitepackages is None]): raise ValueError("Some of the attributes is uninicialized") result = DirsContent( self.bindir - other.bindir, self.lib_sitepackages - other.lib_sitepackages) return result class VirtualEnv(object): def __init__(self, name, temp_dir, name_convertor, base_python_version): self.name = name self.temp_dir = temp_dir self.name_convertor = name_convertor if not base_python_version: base_python_version = DEFAULT_PYTHON_VERSION python_version = 'python' + base_python_version self.env = VirtualEnvironment(temp_dir + '/venv', python=python_version) try: self.env.open_or_create() except (ve.VirtualenvCreationException, ve.VirtualenvReadonlyException): raise VirtualenvFailException('Failed to create virtualenv') self.dirs_before_install = DirsContent() self.dirs_after_install = DirsContent() self.dirs_before_install.fill(temp_dir + '/venv/') self.data = {} def install_package_to_venv(self): ''' Installs package given as first argument to virtualenv without dependencies ''' try: self.env.install(self.name, force=True, options=["--no-deps"]) except (ve.PackageInstallationException, ve.VirtualenvReadonlyException): raise VirtualenvFailException( 'Failed to install package to virtualenv') self.dirs_after_install.fill(self.temp_dir + '/venv/') def get_dirs_differance(self): ''' Makes final versions of site_packages and scripts using DirsContent sub method and filters ''' try: diff = self.dirs_after_install - self.dirs_before_install except ValueError: raise VirtualenvFailException( "Some of the DirsContent attributes is uninicialized") self.data['has_pth'] = \ any([x for x in diff.lib_sitepackages if x.endswith('.pth')]) site_packages = site_packages_filter(diff.lib_sitepackages) self.data['packages'] = sorted( [p for p in site_packages if not p.endswith(MODULE_SUFFIXES)]) self.data['py_modules'] = sorted(set( [os.path.splitext(m)[0] for m in site_packages - set( self.data['packages'])])) self.data['scripts'] = scripts_filter(sorted(diff.bindir)) logger.debug('Data from files differance in virtualenv:') logger.debug(pprint.pformat(self.data)) @property def get_venv_data(self): self.install_package_to_venv() self.get_dirs_differance() return self.data
mit
gregswift/ansible
lib/ansible/plugins/filter/ipaddr.py
11
19122
# (c) 2014, Maciej Delmanowski <drybjed@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # Make coding more python3-ish from __future__ import (absolute_import, division, print_function) __metaclass__ = type from functools import partial import types try: import netaddr except ImportError: # in this case, we'll make the filters return error messages (see bottom) netaddr = None else: class mac_linux(netaddr.mac_unix): pass mac_linux.word_fmt = '%.2x' from ansible import errors # ---- IP address and network query helpers ---- def _empty_ipaddr_query(v, vtype): # We don't have any query to process, so just check what type the user # expects, and return the IP address in a correct format if v: if vtype == 'address': return str(v.ip) elif vtype == 'network': return str(v) def _6to4_query(v, vtype, value): if v.version == 4: if v.size == 1: ipconv = str(v.ip) elif v.size > 1: if v.ip != v.network: ipconv = str(v.ip) else: ipconv = False if ipaddr(ipconv, 'public'): numbers = list(map(int, ipconv.split('.'))) try: return '2002:{:02x}{:02x}:{:02x}{:02x}::1/48'.format(*numbers) except: return False elif v.version == 6: if vtype == 'address': if ipaddr(str(v), '2002::/16'): return value elif vtype == 'network': if v.ip != v.network: if ipaddr(str(v.ip), '2002::/16'): return value else: return False def _ip_query(v): if v.size == 1: return str(v.ip) if v.size > 1: if v.ip != v.network: return str(v.ip) def _gateway_query(v): if v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _bool_ipaddr_query(v): if v: return True def _broadcast_query(v): if v.size > 1: return str(v.broadcast) def _cidr_query(v): return str(v) def _cidr_lookup_query(v, iplist, value): try: if v in iplist: return value except: return False def _host_query(v): if v.size == 1: return str(v) elif v.size > 1: if v.ip != v.network: return str(v.ip) + '/' + str(v.prefixlen) def _hostmask_query(v): return str(v.hostmask) def _int_query(v, vtype): if vtype == 'address': return int(v.ip) elif vtype == 'network': return str(int(v.ip)) + '/' + str(int(v.prefixlen)) def _ipv4_query(v, value): if v.version == 6: try: return str(v.ipv4()) except: return False else: return value def _ipv6_query(v, value): if v.version == 4: return str(v.ipv6()) else: return value def _link_local_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if v.version == 4: if ipaddr(str(v_ip), '169.254.0.0/24'): return value elif v.version == 6: if ipaddr(str(v_ip), 'fe80::/10'): return value def _loopback_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if v_ip.is_loopback(): return value def _multicast_query(v, value): if v.is_multicast(): return value def _net_query(v): if v.size > 1: if v.ip == v.network: return str(v.network) + '/' + str(v.prefixlen) def _netmask_query(v): if v.size > 1: return str(v.netmask) def _network_query(v): if v.size > 1: return str(v.network) def _prefix_query(v): return int(v.prefixlen) def _private_query(v, value): if v.is_private(): return value def _public_query(v, value): v_ip = netaddr.IPAddress(str(v.ip)) if v_ip.is_unicast() and not v_ip.is_private() and \ not v_ip.is_loopback() and not v_ip.is_netmask() and \ not v_ip.is_hostmask(): return value def _revdns_query(v): v_ip = netaddr.IPAddress(str(v.ip)) return v_ip.reverse_dns def _size_query(v): return v.size def _subnet_query(v): return str(v.cidr) def _type_query(v): if v.size == 1: return 'address' if v.size > 1: if v.ip != v.network: return 'address' else: return 'network' def _unicast_query(v, value): if v.is_unicast(): return value def _version_query(v): return v.version def _wrap_query(v, vtype, value): if v.version == 6: if vtype == 'address': return '[' + str(v.ip) + ']' elif vtype == 'network': return '[' + str(v.ip) + ']/' + str(v.prefixlen) else: return value # ---- HWaddr query helpers ---- def _bare_query(v): v.dialect = netaddr.mac_bare return str(v) def _bool_hwaddr_query(v): if v: return True def _cisco_query(v): v.dialect = netaddr.mac_cisco return str(v) def _empty_hwaddr_query(v, value): if v: return value def _linux_query(v): v.dialect = mac_linux return str(v) def _postgresql_query(v): v.dialect = netaddr.mac_pgsql return str(v) def _unix_query(v): v.dialect = netaddr.mac_unix return str(v) def _win_query(v): v.dialect = netaddr.mac_eui48 return str(v) # ---- IP address and network filters ---- def ipaddr(value, query = '', version = False, alias = 'ipaddr'): ''' Check if string is an IP address or network and filter it ''' query_func_extra_args = { '': ('vtype',), '6to4': ('vtype', 'value'), 'cidr_lookup': ('iplist', 'value'), 'int': ('vtype',), 'ipv4': ('value',), 'ipv6': ('value',), 'link-local': ('value',), 'loopback': ('value',), 'lo': ('value',), 'multicast': ('value',), 'private': ('value',), 'public': ('value',), 'unicast': ('value',), 'wrap': ('vtype', 'value'), } query_func_map = { '': _empty_ipaddr_query, '6to4': _6to4_query, 'address': _ip_query, 'address/prefix': _gateway_query, 'bool': _bool_ipaddr_query, 'broadcast': _broadcast_query, 'cidr': _cidr_query, 'cidr_lookup': _cidr_lookup_query, 'gateway': _gateway_query, 'gw': _gateway_query, 'host': _host_query, 'host/prefix': _gateway_query, 'hostmask': _hostmask_query, 'hostnet': _gateway_query, 'int': _int_query, 'ip': _ip_query, 'ipv4': _ipv4_query, 'ipv6': _ipv6_query, 'link-local': _link_local_query, 'lo': _loopback_query, 'loopback': _loopback_query, 'multicast': _multicast_query, 'net': _net_query, 'netmask': _netmask_query, 'network': _network_query, 'prefix': _prefix_query, 'private': _private_query, 'public': _public_query, 'revdns': _revdns_query, 'router': _gateway_query, 'size': _size_query, 'subnet': _subnet_query, 'type': _type_query, 'unicast': _unicast_query, 'v4': _ipv4_query, 'v6': _ipv6_query, 'version': _version_query, 'wrap': _wrap_query, } vtype = None if not value: return False elif value == True: return False # Check if value is a list and parse each element elif isinstance(value, (list, tuple, types.GeneratorType)): _ret = [] for element in value: if ipaddr(element, str(query), version): _ret.append(ipaddr(element, str(query), version)) if _ret: return _ret else: return list() # Check if value is a number and convert it to an IP address elif str(value).isdigit(): # We don't know what IP version to assume, so let's check IPv4 first, # then IPv6 try: if ((not version) or (version and version == 4)): v = netaddr.IPNetwork('0.0.0.0/0') v.value = int(value) v.prefixlen = 32 elif version and version == 6: v = netaddr.IPNetwork('::/0') v.value = int(value) v.prefixlen = 128 # IPv4 didn't work the first time, so it definitely has to be IPv6 except: try: v = netaddr.IPNetwork('::/0') v.value = int(value) v.prefixlen = 128 # The value is too big for IPv6. Are you a nanobot? except: return False # We got an IP address, let's mark it as such value = str(v) vtype = 'address' # value has not been recognized, check if it's a valid IP string else: try: v = netaddr.IPNetwork(value) # value is a valid IP string, check if user specified # CIDR prefix or just an IP address, this will indicate default # output format try: address, prefix = value.split('/') vtype = 'network' except: vtype = 'address' # value hasn't been recognized, maybe it's a numerical CIDR? except: try: address, prefix = value.split('/') address.isdigit() address = int(address) prefix.isdigit() prefix = int(prefix) # It's not numerical CIDR, give up except: return False # It is something, so let's try and build a CIDR from the parts try: v = netaddr.IPNetwork('0.0.0.0/0') v.value = address v.prefixlen = prefix # It's not a valid IPv4 CIDR except: try: v = netaddr.IPNetwork('::/0') v.value = address v.prefixlen = prefix # It's not a valid IPv6 CIDR. Give up. except: return False # We have a valid CIDR, so let's write it in correct format value = str(v) vtype = 'network' # We have a query string but it's not in the known query types. Check if # that string is a valid subnet, if so, we can check later if given IP # address/network is inside that specific subnet try: ### ?? 6to4 and link-local were True here before. Should they still? if query and (query not in query_func_map or query == 'cidr_lookup') and ipaddr(query, 'network'): iplist = netaddr.IPSet([netaddr.IPNetwork(query)]) query = 'cidr_lookup' except: pass # This code checks if value maches the IP version the user wants, ie. if # it's any version ("ipaddr()"), IPv4 ("ipv4()") or IPv6 ("ipv6()") # If version does not match, return False if version and v.version != version: return False extras = [] for arg in query_func_extra_args.get(query, tuple()): extras.append(locals()[arg]) try: return query_func_map[query](v, *extras) except KeyError: try: float(query) if v.size == 1: if vtype == 'address': return str(v.ip) elif vtype == 'network': return str(v) elif v.size > 1: try: return str(v[query]) + '/' + str(v.prefixlen) except: return False else: return value except: raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query) return False def ipwrap(value, query = ''): try: if isinstance(value, (list, tuple, types.GeneratorType)): _ret = [] for element in value: if ipaddr(element, query, version = False, alias = 'ipwrap'): _ret.append(ipaddr(element, 'wrap')) else: _ret.append(element) return _ret else: _ret = ipaddr(value, query, version = False, alias = 'ipwrap') if _ret: return ipaddr(_ret, 'wrap') else: return value except: return value def ipv4(value, query = ''): return ipaddr(value, query, version = 4, alias = 'ipv4') def ipv6(value, query = ''): return ipaddr(value, query, version = 6, alias = 'ipv6') # Split given subnet into smaller subnets or find out the biggest subnet of # a given IP address with given CIDR prefix # Usage: # # - address or address/prefix | ipsubnet # returns CIDR subnet of a given input # # - address/prefix | ipsubnet(cidr) # returns number of possible subnets for given CIDR prefix # # - address/prefix | ipsubnet(cidr, index) # returns new subnet with given CIDR prefix # # - address | ipsubnet(cidr) # returns biggest subnet with given CIDR prefix that address belongs to # # - address | ipsubnet(cidr, index) # returns next indexed subnet which contains given address def ipsubnet(value, query = '', index = 'x'): ''' Manipulate IPv4/IPv6 subnets ''' try: vtype = ipaddr(value, 'type') if vtype == 'address': v = ipaddr(value, 'cidr') elif vtype == 'network': v = ipaddr(value, 'subnet') value = netaddr.IPNetwork(v) except: return False if not query: return str(value) elif str(query).isdigit(): vsize = ipaddr(v, 'size') query = int(query) try: float(index) index = int(index) if vsize > 1: try: return str(list(value.subnet(query))[index]) except: return False elif vsize == 1: try: return str(value.supernet(query)[index]) except: return False except: if vsize > 1: try: return str(len(list(value.subnet(query)))) except: return False elif vsize == 1: try: return str(value.supernet(query)[0]) except: return False return False # Returns the nth host within a network described by value. # Usage: # # - address or address/prefix | nthhost(nth) # returns the nth host within the given network def nthhost(value, query=''): ''' Get the nth host within a given network ''' try: vtype = ipaddr(value, 'type') if vtype == 'address': v = ipaddr(value, 'cidr') elif vtype == 'network': v = ipaddr(value, 'subnet') value = netaddr.IPNetwork(v) except: return False if not query: return False try: vsize = ipaddr(v, 'size') nth = int(query) if value.size > nth: return value[nth] except ValueError: return False return False # Returns the SLAAC address within a network for a given HW/MAC address. # Usage: # # - prefix | slaac(mac) def slaac(value, query = ''): ''' Get the SLAAC address within given network ''' try: vtype = ipaddr(value, 'type') if vtype == 'address': v = ipaddr(value, 'cidr') elif vtype == 'network': v = ipaddr(value, 'subnet') if v.version != 6: return False value = netaddr.IPNetwork(v) except: return False if not query: return False try: mac = hwaddr(query, alias = 'slaac') eui = netaddr.EUI(mac) except: return False return eui.ipv6(value.network) # ---- HWaddr / MAC address filters ---- def hwaddr(value, query = '', alias = 'hwaddr'): ''' Check if string is a HW/MAC address and filter it ''' query_func_extra_args = { '': ('value',), } query_func_map = { '': _empty_hwaddr_query, 'bare': _bare_query, 'bool': _bool_hwaddr_query, 'cisco': _cisco_query, 'eui48': _win_query, 'linux': _linux_query, 'pgsql': _postgresql_query, 'postgresql': _postgresql_query, 'psql': _postgresql_query, 'unix': _unix_query, 'win': _win_query, } try: v = netaddr.EUI(value) except: if query and query != 'bool': raise errors.AnsibleFilterError(alias + ': not a hardware address: %s' % value) extras = [] for arg in query_func_extra_args.get(query, tuple()): extras.append(locals()[arg]) try: return query_func_map[query](v, *extras) except KeyError: raise errors.AnsibleFilterError(alias + ': unknown filter type: %s' % query) return False def macaddr(value, query = ''): return hwaddr(value, query, alias = 'macaddr') def _need_netaddr(f_name, *args, **kwargs): raise errors.AnsibleFilterError('The {0} filter requires python-netaddr be' ' installed on the ansible controller'.format(f_name)) def ip4_hex(arg): ''' Convert an IPv4 address to Hexadecimal notation ''' numbers = list(map(int, arg.split('.'))) return '{:02x}{:02x}{:02x}{:02x}'.format(*numbers) # ---- Ansible filters ---- class FilterModule(object): ''' IP address and network manipulation filters ''' filter_map = { # IP addresses and networks 'ipaddr': ipaddr, 'ipwrap': ipwrap, 'ipv4': ipv4, 'ipv6': ipv6, 'ipsubnet': ipsubnet, 'nthhost': nthhost, 'slaac': slaac, 'ip4_hex': ip4_hex, # MAC / HW addresses 'hwaddr': hwaddr, 'macaddr': macaddr } def filters(self): if netaddr: return self.filter_map else: # Need to install python-netaddr for these filters to work return dict((f, partial(_need_netaddr, f)) for f in self.filter_map)
gpl-3.0
viniciusgama/blog_gae
django/db/backends/mysql/validation.py
392
1309
from django.db.backends import BaseDatabaseValidation class DatabaseValidation(BaseDatabaseValidation): def validate_field(self, errors, opts, f): """ There are some field length restrictions for MySQL: - Prior to version 5.0.3, character fields could not exceed 255 characters in length. - No character (varchar) fields can have a length exceeding 255 characters if they have a unique index on them. """ from django.db import models db_version = self.connection.get_server_version() varchar_fields = (models.CharField, models.CommaSeparatedIntegerField, models.SlugField) if isinstance(f, varchar_fields) and f.max_length > 255: if db_version < (5, 0, 3): msg = '"%(name)s": %(cls)s cannot have a "max_length" greater than 255 when you are using a version of MySQL prior to 5.0.3 (you are using %(version)s).' elif f.unique == True: msg = '"%(name)s": %(cls)s cannot have a "max_length" greater than 255 when using "unique=True".' else: msg = None if msg: errors.add(opts, msg % {'name': f.name, 'cls': f.__class__.__name__, 'version': '.'.join([str(n) for n in db_version[:3]])})
bsd-3-clause
idano/home
lib/ansible/runner/connection.py
25
1456
# (c) 2012-2013, Michael DeHaan <michael.dehaan@gmail.com> # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ################################################ from ansible import utils from ansible.errors import AnsibleError import ansible.constants as C import os import os.path class Connection(object): ''' Handles abstract connections to remote hosts ''' def __init__(self, runner): self.runner = runner def connect(self, host, port, user, password, transport, private_key_file): conn = None conn = utils.plugins.connection_loader.get(transport, self.runner, host, port, user=user, password=password, private_key_file=private_key_file) if conn is None: raise AnsibleError("unsupported connection type: %s" % transport) self.active = conn.connect() return self.active
gpl-3.0
anbc/Sravana
common/DataOperate.py
1
5887
#! /usr/bin/env python #coding=utf-8 import logging, json import data import config import datetime class OperateAppStaticInfor: ''' 类功能:通过操作audit_static_infor数据,实现apk静态信息的保存 ''' @staticmethod def SearchFileInforFromTaskTable(fileMd5): ''' 函数功能:以文件的md5值为关键字,是否有该apk文件 参数:fileMd5 apk文件的md5值 返回值:找到了返回True,并返回查到的结果, 如果没找返回false和空结果 注:以文件的md5为关键字查询 ''' sql = 'select * from app_static_infor where file_md5=%s' values =[fileMd5] rcds = data.iquery(config.g_curDatabaseName,sql, values) if len(rcds)>0: return True, rcds else: return False, rcds @staticmethod def InsertAppStaticInfor(manifestInfor, apkInfor): ''' 函数功能:将静态数据插入表中 参数:manifestInfor manifest文件中的静态数据 注:改为多个检测引擎,多条扫描任务 ''' sql = ("INSERT INTO app_static_infor (file_md5, package_name, launcher_activity, uses_permission_infor," "activity_infor, service_infor, receiver_infor, provider_infor, metaDataDict)" "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)") values = [] values.append(apkInfor["md5"]) values.append(manifestInfor.packageName) values.append(manifestInfor.launcherActivity) usesPermissionJsonInfor = json.dumps(manifestInfor.usesPermission) values.append(usesPermissionJsonInfor) activityJsonInfor = json.dumps(manifestInfor.activityInfor) values.append(activityJsonInfor) serviceJsonInfor = json.dumps(manifestInfor.serviceInfor) values.append(serviceJsonInfor) reveiverJsonInfor = json.dumps(manifestInfor.receiverInfor) values.append(reveiverJsonInfor) providerJsonInfor = json.dumps(manifestInfor.providerInfor) values.append(providerJsonInfor) metaDataDictJsonInfor = json.dumps(manifestInfor.metaDataDict) values.append(metaDataDictJsonInfor ) #adJsonInfor = json.dumps(manifestInfor.adInfor) #广告信息 #values.append("") data.iexecute(config.g_curDatabaseName, sql, values) ''' app_dynamic_monitor_action +-----------+-------------+------+-----+---------+----------------+ | Field | Type | Null | Key | Default | Extra | +-----------+-------------+------+-----+---------+----------------+ | id | bigint(20) | NO | PRI | NULL | auto_increment | | action_id | varchar(64) | NO | | NULL | | | file_md5 | varchar(64) | YES | | NULL | | | time | varchar(64) | YES | | NULL | | | order | int(11) | YES | | NULL | | | thread | varchar(64) | YES | | NULL | | | funciton | varchar(64) | YES | | NULL | | | fun_args | text | YES | | NULL | | | fun_ret | text | YES | | NULL | | | is_native | tinyint(1) | YES | | 1 | | +-----------+-------------+------+-----+---------+----------------+ monitorInfor字典的定义: monitorData["md5"] = fileMd5 monitorData["action_id"] = str(uuid.uuid1()) monitorData["time"] = time monitorData["order"] = self.order monitorData["thread"] = threadInfor monitorData["function"] = functionInfor monitorData["args"] = argsInfor monitorData["isNative"] = isNativeInfor ''' class OperateMonitorInfor: ''' 类功能:通过操作app_dynamic_monitor_action数据,实现apk静态信息的保存 ''' @staticmethod def SearchMonitorInfor(fileMd5): pass @staticmethod def InsertMonitorInfor(monitorInfor): ''' 函数功能:将静态数据插入表中 参数:manifestInfor manifest文件中的静态数据 注:改为多个检测引擎,多条扫描任务 ''' sql = ("INSERT INTO app_dynamic_monitor_action (file_md5 , action_id, time , micro_second,serial," "thread, function, fun_args, fun_ret, is_native)" "VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s, %s)") time = monitorInfor["time"] timeData = time.split(",") values = [] values.append(monitorInfor["md5"]) values.append(monitorInfor["action_id"]) values.append(timeData[0]) values.append(int(timeData[1])) values.append(monitorInfor["order"]) values.append(monitorInfor["thread"]) values.append(monitorInfor["function"]) values.append(monitorInfor["args"]) values.append(monitorInfor["ret"]) values.append(monitorInfor["isNative"]) data.iexecute(config.g_curDatabaseName, sql, values) def main(): print 'start' monitorData={} monitorData["md5"] = "1" monitorData["action_id"] = "2" #monitorData["time"] = datetime.datetime.now() monitorData["time"] ="2013-12-18 10:49:33,234" #monitorData["time"] = "2009-06-09 00:24:08" #monitorData["time"] = "3" monitorData["order"] = 4 monitorData["thread"] = "5" monitorData["function"] = "6" monitorData["args"] = "7" monitorData["isNative"] = "8" monitorData["ret"] = "9" OperateMonitorInfor.InsertMonitorInfor(monitorData) print 'end' if __name__ == '__main__': main()
apache-2.0
landism/pants
src/python/pants/backend/docgen/tasks/markdown_to_html.py
11
8822
# coding=utf-8 # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md). # Licensed under the Apache License, Version 2.0 (see LICENSE). from __future__ import (absolute_import, division, generators, nested_scopes, print_function, unicode_literals, with_statement) import codecs import os import re from pkg_resources import resource_string from pygments.formatters.html import HtmlFormatter from pygments.styles import get_all_styles from pants.backend.docgen.targets.doc import Page from pants.base.build_environment import get_buildroot from pants.base.exceptions import TaskError from pants.base.generator import Generator from pants.base.workunit import WorkUnitLabel from pants.build_graph.address import Address from pants.task.task import Task from pants.util import desktop from pants.util.dirutil import safe_mkdir def util(): """Indirection function so we can lazy-import our utils. It's an expensive import that invokes re.compile a lot (via markdown and pygments), so we don't want to incur that cost unless we must. """ from pants.backend.docgen.tasks import markdown_to_html_utils return markdown_to_html_utils class MarkdownToHtml(Task): """Generate HTML from Markdown docs.""" @classmethod def register_options(cls, register): register('--code-style', choices=list(get_all_styles()), default='friendly', fingerprint=True, help='Use this stylesheet for code highlights.') register('--open', type=bool, help='Open the generated documents in a browser.') register('--fragment', type=bool, fingerprint=True, help='Generate a fragment of html to embed in a page.') register('--ignore-failure', type=bool, fingerprint=True, help='Do not consider rendering errors to be build errors.') @classmethod def product_types(cls): return ['markdown_html', 'wiki_html'] def __init__(self, *args, **kwargs): super(MarkdownToHtml, self).__init__(*args, **kwargs) self._templates_dir = os.path.join('templates', 'markdown') self.open = self.get_options().open self.fragment = self.get_options().fragment self.code_style = self.get_options().code_style def execute(self): # TODO(John Sirois): consider adding change detection outdir = os.path.join(self.get_options().pants_distdir, 'markdown') css_path = os.path.join(outdir, 'css', 'codehighlight.css') css = util().emit_codehighlight_css(css_path, self.code_style) if css: self.context.log.info('Emitted {}'.format(css)) def is_page(target): return isinstance(target, Page) roots = set() interior_nodes = set() if self.open: dependencies_by_page = self.context.dependents(on_predicate=is_page, from_predicate=is_page) roots.update(dependencies_by_page.keys()) for dependencies in dependencies_by_page.values(): interior_nodes.update(dependencies) roots.difference_update(dependencies) for page in self.context.targets(is_page): # There are no in or out edges so we need to show show this isolated page. if not page.dependencies and page not in interior_nodes: roots.add(page) with self.context.new_workunit(name='render', labels=[WorkUnitLabel.MULTITOOL]): plaingenmap = self.context.products.get('markdown_html') wikigenmap = self.context.products.get('wiki_html') show = [] for page in self.context.targets(is_page): def process_page(key, outdir, url_builder, genmap, fragment=False): if page.format == 'rst': with self.context.new_workunit(name='rst') as workunit: html_path = self.process_rst( workunit, page, os.path.join(outdir, util().page_to_html_path(page)), os.path.join(page.payload.sources.rel_path, page.source), self.fragment or fragment, ) else: with self.context.new_workunit(name='md'): html_path = self.process_md( os.path.join(outdir, util().page_to_html_path(page)), os.path.join(page.payload.sources.rel_path, page.source), self.fragment or fragment, url_builder, css=css, ) self.context.log.info('Processed {} to {}'.format(page.source, html_path)) relpath = os.path.relpath(html_path, outdir) genmap.add(key, outdir, [relpath]) return html_path def url_builder(linked_page): dest = util().page_to_html_path(linked_page) src_dir = os.path.dirname(util().page_to_html_path(page)) return linked_page.name, os.path.relpath(dest, src_dir) page_path = os.path.join(outdir, 'html') html = process_page(page, page_path, url_builder, plaingenmap) if css and not self.fragment: plaingenmap.add(page, self.workdir, list(css_path)) if self.open and page in roots: show.append(html) if page.provides: for wiki in page.provides: basedir = os.path.join(self.workdir, str(hash(wiki))) process_page((wiki, page), basedir, wiki.wiki.url_builder, wikigenmap, fragment=True) if show: try: desktop.ui_open(*show) except desktop.OpenError as e: raise TaskError(e) PANTS_LINK = re.compile(r'''pants\(['"]([^)]+)['"]\)(#.*)?''') def process_md(self, output_path, source, fragmented, url_builder, css=None): def parse_url(spec): match = self.PANTS_LINK.match(spec) if match: address = Address.parse(match.group(1), relative_to=get_buildroot()) page = self.context.build_graph.get_target(address) anchor = match.group(2) or '' if not page: raise TaskError('Invalid markdown link to pants target: "{}" when processing {}. ' 'Is your page missing a dependency on this target?'.format( match.group(1), source)) alias, url = url_builder(page) return alias, url + anchor else: return spec, spec def build_url(label): components = label.split('|', 1) if len(components) == 1: return parse_url(label.strip()) else: alias, link = components _, url = parse_url(link.strip()) return alias, url wikilinks = util().WikilinksExtension(build_url) safe_mkdir(os.path.dirname(output_path)) with codecs.open(output_path, 'w', 'utf-8') as output: source_path = os.path.join(get_buildroot(), source) with codecs.open(source_path, 'r', 'utf-8') as source_stream: md_html = util().markdown.markdown( source_stream.read(), extensions=['codehilite(guess_lang=False)', 'extra', 'tables', 'toc', wikilinks, util().IncludeExcerptExtension(source_path)], ) if fragmented: style_css = (HtmlFormatter(style=self.code_style)).get_style_defs('.codehilite') template = resource_string(__name__, os.path.join(self._templates_dir, 'fragment.mustache')) generator = Generator(template, style_css=style_css, md_html=md_html) generator.write(output) else: style_link = os.path.relpath(css, os.path.dirname(output_path)) template = resource_string(__name__, os.path.join(self._templates_dir, 'page.mustache')) generator = Generator(template, style_link=style_link, md_html=md_html) generator.write(output) return output.name def process_rst(self, workunit, page, output_path, source, fragmented): source_path = os.path.join(get_buildroot(), source) with codecs.open(source_path, 'r', 'utf-8') as source_stream: rst_html, returncode = util().rst_to_html(source_stream.read(), stderr=workunit.output('stderr')) if returncode != 0: message = '{} rendered with errors.'.format(source_path) if self.get_options().ignore_failure: self.context.log.warn(message) else: raise TaskError(message, exit_code=returncode, failed_targets=[page]) template_path = os.path.join(self._templates_dir, 'fragment.mustache' if fragmented else 'page.mustache') template = resource_string(__name__, template_path) generator = Generator(template, md_html=rst_html) safe_mkdir(os.path.dirname(output_path)) with codecs.open(output_path, 'w', 'utf-8') as output: generator.write(output) return output.name
apache-2.0
Stanford-Online/edx-platform
cms/envs/bok_choy.py
1
6000
""" Settings for Bok Choy tests that are used when running Studio. Bok Choy uses two different settings files: 1. test_static_optimized is used when invoking collectstatic 2. bok_choy is used when running the tests Note: it isn't possible to have a single settings file, because Django doesn't support both generating static assets to a directory and also serving static from the same directory. """ import os from path import Path as path from openedx.core.release import RELEASE_LINE ########################## Prod-like settings ################################### # These should be as close as possible to the settings we use in production. # As in prod, we read in environment and auth variables from JSON files. # Unlike in prod, we use the JSON files stored in this repo. # This is a convenience for ensuring (a) that we can consistently find the files # and (b) that the files are the same in Jenkins as in local dev. os.environ['SERVICE_VARIANT'] = 'bok_choy_docker' if 'BOK_CHOY_HOSTNAME' in os.environ else 'bok_choy' os.environ['CONFIG_ROOT'] = path(__file__).abspath().dirname() from openedx.stanford.cms.envs.aws import * # pylint: disable=wildcard-import, unused-wildcard-import ######################### Testing overrides #################################### # Redirect to the test_root folder within the repo TEST_ROOT = REPO_ROOT / "test_root" GITHUB_REPO_ROOT = (TEST_ROOT / "data").abspath() LOG_DIR = (TEST_ROOT / "log").abspath() DATA_DIR = TEST_ROOT / "data" # Configure modulestore to use the test folder within the repo update_module_store_settings( MODULESTORE, module_store_options={ 'fs_root': (TEST_ROOT / "data").abspath(), }, xml_store_options={ 'data_dir': (TEST_ROOT / "data").abspath(), }, default_store=os.environ.get('DEFAULT_STORE', 'draft'), ) # Needed to enable licensing on video modules XBLOCK_SETTINGS.update({'VideoDescriptor': {'licensing_enabled': True}}) # Capture the console log via template includes, until webdriver supports log capture again CAPTURE_CONSOLE_LOG = True ############################ STATIC FILES ############################# # Enable debug so that static assets are served by Django DEBUG = True # Serve static files at /static directly from the staticfiles directory under test root # Note: optimized files for testing are generated with settings from test_static_optimized STATIC_URL = "/static/" STATICFILES_FINDERS = [ 'django.contrib.staticfiles.finders.FileSystemFinder', ] STATICFILES_DIRS = [ (TEST_ROOT / "staticfiles" / "cms").abspath(), ] DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_ROOT = TEST_ROOT / "uploads" WEBPACK_LOADER['DEFAULT']['STATS_FILE'] = TEST_ROOT / "staticfiles" / "cms" / "webpack-stats.json" # Silence noisy logs import logging LOG_OVERRIDES = [ ('track.middleware', logging.CRITICAL), ('edx.discussion', logging.CRITICAL), ] for log_name, log_level in LOG_OVERRIDES: logging.getLogger(log_name).setLevel(log_level) # Use the auto_auth workflow for creating users and logging them in FEATURES['AUTOMATIC_AUTH_FOR_TESTING'] = True FEATURES['RESTRICT_AUTOMATIC_AUTH'] = False # Enable milestones app FEATURES['MILESTONES_APP'] = True # Enable pre-requisite course FEATURES['ENABLE_PREREQUISITE_COURSES'] = True # Enable student notes FEATURES['ENABLE_EDXNOTES'] = True # Enable teams feature FEATURES['ENABLE_TEAMS'] = True # Enable custom content licensing FEATURES['LICENSING'] = True FEATURES['ENABLE_MOBILE_REST_API'] = True # Enable video bumper in Studio FEATURES['ENABLE_VIDEO_BUMPER'] = True # Enable video bumper in Studio settings FEATURES['ENABLE_ENROLLMENT_TRACK_USER_PARTITION'] = True # Whether archived courses (courses with end dates in the past) should be # shown in Studio in a separate list. FEATURES['ENABLE_SEPARATE_ARCHIVED_COURSES'] = True # Enable support for OpenBadges accomplishments FEATURES['ENABLE_OPENBADGES'] = True # Enable partner support link in Studio footer PARTNER_SUPPORT_EMAIL = 'partner-support@example.com' ########################### Entrance Exams ################################# FEATURES['ENTRANCE_EXAMS'] = True FEATURES['ENABLE_SPECIAL_EXAMS'] = True # Point the URL used to test YouTube availability to our stub YouTube server YOUTUBE_PORT = 9080 YOUTUBE['TEST_TIMEOUT'] = 5000 YOUTUBE_HOSTNAME = os.environ.get('BOK_CHOY_HOSTNAME', '127.0.0.1') YOUTUBE['API'] = "http://{0}:{1}/get_youtube_api/".format(YOUTUBE_HOSTNAME, YOUTUBE_PORT) YOUTUBE['METADATA_URL'] = "http://{0}:{1}/test_youtube/".format(YOUTUBE_HOSTNAME, YOUTUBE_PORT) YOUTUBE['TEXT_API']['url'] = "{0}:{1}/test_transcripts_youtube/".format(YOUTUBE_HOSTNAME, YOUTUBE_PORT) FEATURES['ENABLE_COURSEWARE_INDEX'] = True FEATURES['ENABLE_LIBRARY_INDEX'] = True FEATURES['ORGANIZATIONS_APP'] = True SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine" # Path at which to store the mock index MOCK_SEARCH_BACKING_FILE = ( TEST_ROOT / "index_file.dat" ).abspath() # this secret key should be the same as lms/envs/bok_choy.py's SECRET_KEY = "very_secret_bok_choy_key" LMS_ROOT_URL = "http://localhost:8000" if RELEASE_LINE == "master": # On master, acceptance tests use edX books, not the default Open edX books. HELP_TOKENS_BOOKS = { 'learner': 'https://edx.readthedocs.io/projects/edx-guide-for-students', 'course_author': 'https://edx.readthedocs.io/projects/edx-partner-course-staff', } ########################## VIDEO TRANSCRIPTS STORAGE ############################ VIDEO_TRANSCRIPTS_SETTINGS = dict( VIDEO_TRANSCRIPTS_MAX_BYTES=3 * 1024 * 1024, # 3 MB STORAGE_KWARGS=dict( location=MEDIA_ROOT, base_url=MEDIA_URL, ), DIRECTORY_PREFIX='video-transcripts/', ) ##################################################################### # Lastly, see if the developer has any local overrides. try: from .private import * # pylint: disable=import-error except ImportError: pass
agpl-3.0
QiJune/Paddle
python/paddle/fluid/tests/unittests/transformer_model.py
1
16452
# Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. from __future__ import print_function from functools import partial import numpy as np import paddle.fluid as fluid import paddle.fluid.layers as layers pos_enc_param_names = ( "src_pos_enc_table", "trg_pos_enc_table", ) batch_size = 2 def position_encoding_init(n_position, d_pos_vec): """ Generate the initial values for the sinusoid position encoding table. """ position_enc = np.array([[ pos / np.power(10000, 2 * (j // 2) / d_pos_vec) for j in range(d_pos_vec) ] if pos != 0 else np.zeros(d_pos_vec) for pos in range(n_position)]) position_enc[1:, 0::2] = np.sin(position_enc[1:, 0::2]) # dim 2i position_enc[1:, 1::2] = np.cos(position_enc[1:, 1::2]) # dim 2i+1 return position_enc.astype("float32") def multi_head_attention(queries, keys, values, attn_bias, d_key, d_value, d_model, n_head=1, dropout_rate=0.): """ Multi-Head Attention. Note that attn_bias is added to the logit before computing softmax activiation to mask certain selected positions so that they will not considered in attention weights. """ if not (len(queries.shape) == len(keys.shape) == len(values.shape) == 3): raise ValueError( "Inputs: quries, keys and values should all be 3-D tensors.") def __compute_qkv(queries, keys, values, n_head, d_key, d_value): """ Add linear projection to queries, keys, and values. """ q = layers.fc(input=queries, size=d_key * n_head, param_attr=fluid.initializer.Xavier( uniform=False, fan_in=d_model * d_key, fan_out=n_head * d_key), bias_attr=False, num_flatten_dims=2) k = layers.fc(input=keys, size=d_key * n_head, param_attr=fluid.initializer.Xavier( uniform=False, fan_in=d_model * d_key, fan_out=n_head * d_key), bias_attr=False, num_flatten_dims=2) v = layers.fc(input=values, size=d_value * n_head, param_attr=fluid.initializer.Xavier( uniform=False, fan_in=d_model * d_value, fan_out=n_head * d_value), bias_attr=False, num_flatten_dims=2) return q, k, v def __split_heads(x, n_head): """ Reshape the last dimension of inpunt tensor x so that it becomes two dimensions and then transpose. Specifically, input a tensor with shape [bs, max_sequence_length, n_head * hidden_dim] then output a tensor with shape [bs, n_head, max_sequence_length, hidden_dim]. """ if n_head == 1: return x hidden_size = x.shape[-1] # FIXME(guosheng): Decouple the program desc with batch_size. reshaped = layers.reshape( x=x, shape=[batch_size, -1, n_head, hidden_size // n_head]) # permuate the dimensions into: # [batch_size, n_head, max_sequence_len, hidden_size_per_head] return layers.transpose(x=reshaped, perm=[0, 2, 1, 3]) def __combine_heads(x): """ Transpose and then reshape the last two dimensions of inpunt tensor x so that it becomes one dimension, which is reverse to __split_heads. """ if len(x.shape) == 3: return x if len(x.shape) != 4: raise ValueError("Input(x) should be a 4-D Tensor.") trans_x = layers.transpose(x, perm=[0, 2, 1, 3]) # FIXME(guosheng): Decouple the program desc with batch_size. return layers.reshape( x=trans_x, shape=list( map(int, [batch_size, -1, trans_x.shape[2] * trans_x.shape[3] ]))) def scaled_dot_product_attention(q, k, v, attn_bias, d_model, dropout_rate): """ Scaled Dot-Product Attention """ # FIXME(guosheng): Optimize the shape in reshape_op or softmax_op. # The current implementation of softmax_op only supports 2D tensor, # consequently it cannot be directly used here. # If to use the reshape_op, Besides, the shape of product inferred in # compile-time is not the actual shape in run-time. It cann't be used # to set the attribute of reshape_op. # So, here define the softmax for temporary solution. def __softmax(x, eps=1e-9): exp_out = layers.exp(x=x) sum_out = layers.reduce_sum(exp_out, dim=-1, keep_dim=False) return layers.elementwise_div(x=exp_out, y=sum_out, axis=0) scaled_q = layers.scale(x=q, scale=d_model**-0.5) product = layers.matmul(x=scaled_q, y=k, transpose_y=True) weights = __softmax(layers.elementwise_add(x=product, y=attn_bias)) if dropout_rate: weights = layers.dropout( weights, dropout_prob=dropout_rate, is_test=False) out = layers.matmul(weights, v) return out q, k, v = __compute_qkv(queries, keys, values, n_head, d_key, d_value) q = __split_heads(q, n_head) k = __split_heads(k, n_head) v = __split_heads(v, n_head) ctx_multiheads = scaled_dot_product_attention(q, k, v, attn_bias, d_model, dropout_rate) out = __combine_heads(ctx_multiheads) # Project back to the model size. proj_out = layers.fc(input=out, size=d_model, param_attr=fluid.initializer.Xavier(uniform=False), bias_attr=False, num_flatten_dims=2) return proj_out def positionwise_feed_forward(x, d_inner_hid, d_hid): """ Position-wise Feed-Forward Networks. This module consists of two linear transformations with a ReLU activation in between, which is applied to each position separately and identically. """ hidden = layers.fc(input=x, size=d_inner_hid, num_flatten_dims=2, param_attr=fluid.initializer.Uniform( low=-(d_hid**-0.5), high=(d_hid**-0.5)), act="relu") out = layers.fc(input=hidden, size=d_hid, num_flatten_dims=2, param_attr=fluid.initializer.Uniform( low=-(d_inner_hid**-0.5), high=(d_inner_hid**-0.5))) return out def pre_post_process_layer(prev_out, out, process_cmd, dropout=0.): """ Add residual connection, layer normalization and droput to the out tensor optionally according to the value of process_cmd. This will be used before or after multi-head attention and position-wise feed-forward networks. """ for cmd in process_cmd: if cmd == "a": # add residual connection out = out + prev_out if prev_out else out elif cmd == "n": # add layer normalization out = layers.layer_norm( out, begin_norm_axis=len(out.shape) - 1, param_attr=fluid.initializer.Constant(1.), bias_attr=fluid.initializer.Constant(0.)) elif cmd == "d": # add dropout if dropout: out = layers.dropout(out, dropout_prob=dropout, is_test=False) return out pre_process_layer = partial(pre_post_process_layer, None) post_process_layer = pre_post_process_layer def prepare_encoder(src_word, src_pos, src_vocab_size, src_emb_dim, src_pad_idx, src_max_len, dropout=0., pos_pad_idx=0, pos_enc_param_name=None): """Add word embeddings and position encodings. The output tensor has a shape of: [batch_size, max_src_length_in_batch, d_model]. This module is used at the bottom of the encoder stacks. """ src_word_emb = layers.embedding( src_word, size=[src_vocab_size, src_emb_dim], padding_idx=src_pad_idx, param_attr=fluid.initializer.Normal(0., 1.)) src_pos_enc = layers.embedding( src_pos, size=[src_max_len, src_emb_dim], padding_idx=pos_pad_idx, param_attr=fluid.ParamAttr( name=pos_enc_param_name, trainable=False)) enc_input = src_word_emb + src_pos_enc # FIXME(guosheng): Decouple the program desc with batch_size. enc_input = layers.reshape(x=enc_input, shape=[batch_size, -1, src_emb_dim]) return layers.dropout( enc_input, dropout_prob=dropout, is_test=False) if dropout else enc_input prepare_encoder = partial( prepare_encoder, pos_enc_param_name=pos_enc_param_names[0]) prepare_decoder = partial( prepare_encoder, pos_enc_param_name=pos_enc_param_names[1]) def encoder_layer(enc_input, attn_bias, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate=0.): """The encoder layers that can be stacked to form a deep encoder. This module consits of a multi-head (self) attention followed by position-wise feed-forward networks and both the two components companied with the post_process_layer to add residual connection, layer normalization and droput. """ attn_output = multi_head_attention(enc_input, enc_input, enc_input, attn_bias, d_key, d_value, d_model, n_head, dropout_rate) attn_output = post_process_layer(enc_input, attn_output, "dan", dropout_rate) ffd_output = positionwise_feed_forward(attn_output, d_inner_hid, d_model) return post_process_layer(attn_output, ffd_output, "dan", dropout_rate) def encoder(enc_input, attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate=0.): """ The encoder is composed of a stack of identical layers returned by calling encoder_layer. """ for i in range(n_layer): enc_output = encoder_layer(enc_input, attn_bias, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate) enc_input = enc_output return enc_output def decoder_layer(dec_input, enc_output, slf_attn_bias, dec_enc_attn_bias, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate=0.): """ The layer to be stacked in decoder part. The structure of this module is similar to that in the encoder part except a multi-head attention is added to implement encoder-decoder attention. """ slf_attn_output = multi_head_attention( dec_input, dec_input, dec_input, slf_attn_bias, d_key, d_value, d_model, n_head, dropout_rate, ) slf_attn_output = post_process_layer( dec_input, slf_attn_output, "dan", # residual connection + dropout + layer normalization dropout_rate, ) enc_attn_output = multi_head_attention( slf_attn_output, enc_output, enc_output, dec_enc_attn_bias, d_key, d_value, d_model, n_head, dropout_rate, ) enc_attn_output = post_process_layer( slf_attn_output, enc_attn_output, "dan", # residual connection + dropout + layer normalization dropout_rate, ) ffd_output = positionwise_feed_forward( enc_attn_output, d_inner_hid, d_model, ) dec_output = post_process_layer( enc_attn_output, ffd_output, "dan", # residual connection + dropout + layer normalization dropout_rate, ) return dec_output def decoder(dec_input, enc_output, dec_slf_attn_bias, dec_enc_attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate=0.): """ The decoder is composed of a stack of identical decoder_layer layers. """ for i in range(n_layer): dec_output = decoder_layer( dec_input, enc_output, dec_slf_attn_bias, dec_enc_attn_bias, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate, ) dec_input = dec_output return dec_output def transformer( src_vocab_size, trg_vocab_size, max_length, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate, src_pad_idx, trg_pad_idx, pos_pad_idx, ): file_obj = fluid.layers.open_recordio_file( filename='/tmp/wmt16.recordio', shapes=[ [batch_size * max_length, 1], [batch_size * max_length, 1], [batch_size * max_length, 1], [batch_size * max_length, 1], [batch_size, n_head, max_length, max_length], [batch_size, n_head, max_length, max_length], [batch_size, n_head, max_length, max_length], [batch_size * max_length, 1], [batch_size * max_length, 1], ], dtypes=[ 'int64', 'int64', 'int64', 'int64', 'float32', 'float32', 'float32', 'int64', 'float32', ], lod_levels=[0] * 9) src_word, src_pos, trg_word, trg_pos, src_slf_attn_bias, trg_slf_attn_bias, trg_src_attn_bias, gold, weights = fluid.layers.read_file( file_obj) enc_input = prepare_encoder( src_word, src_pos, src_vocab_size, d_model, src_pad_idx, max_length, dropout_rate, ) enc_output = encoder( enc_input, src_slf_attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate, ) dec_input = prepare_decoder( trg_word, trg_pos, trg_vocab_size, d_model, trg_pad_idx, max_length, dropout_rate, ) dec_output = decoder( dec_input, enc_output, trg_slf_attn_bias, trg_src_attn_bias, n_layer, n_head, d_key, d_value, d_model, d_inner_hid, dropout_rate, ) # TODO(guosheng): Share the weight matrix between the embedding layers and # the pre-softmax linear transformation. predict = layers.reshape( x=layers.fc(input=dec_output, size=trg_vocab_size, param_attr=fluid.initializer.Xavier(uniform=False), bias_attr=False, num_flatten_dims=2), shape=[-1, trg_vocab_size], act="softmax") cost = layers.cross_entropy(input=predict, label=gold) weighted_cost = cost * weights return layers.reduce_sum(weighted_cost)
apache-2.0
Stanford-Online/edx-analytics-pipeline
edx/analytics/tasks/tools/analyze/parser.py
5
2046
import re class LogFileParser(object): def __init__(self, log_file_obj, message_pattern, message_factory=dict, content_group_name='content'): self.log_file = log_file_obj self.message_pattern = message_pattern self.message_factory = message_factory self.content_group_name = content_group_name self.line_number = 0 self.messages = self.parse_messages() def parse_messages(self): message = self.read_line() while message: message_match = re.match(self.message_pattern, message) if not message_match: raise ValueError('Unable to parse message "%s"', message) matched_groups = dict(message_match.groupdict()) first_line_content = matched_groups.get(self.content_group_name, '') matched_groups[self.content_group_name] = self.read_content(first_line_content) yield self.message_factory(matched_groups) message = self.read_line() def read_content(self, first_line_content): content = first_line_content next_message_match = None while not next_message_match: next_line = self.peek_line() if not next_line: break next_message_match = re.match(self.message_pattern, next_line) if not next_message_match: self.read_line() content += next_line return content def peek_line(self): pos = self.log_file.tell() line = self.log_file.readline() self.log_file.seek(pos) return line def read_line(self): line = self.log_file.readline() if line: self.line_number += 1 return line def next_message(self): try: return next(self.messages) except StopIteration: return None def peek_message(self): pos = self.log_file.tell() message = self.next_message() self.log_file.seek(pos) return message
agpl-3.0
rapilabs/django
django/template/loaders/locmem.py
464
1194
""" Wrapper for loading templates from a plain Python dict. """ import warnings from django.template import Origin, TemplateDoesNotExist from django.utils.deprecation import RemovedInDjango20Warning from .base import Loader as BaseLoader class Loader(BaseLoader): def __init__(self, engine, templates_dict): self.templates_dict = templates_dict super(Loader, self).__init__(engine) def get_contents(self, origin): try: return self.templates_dict[origin.name] except KeyError: raise TemplateDoesNotExist(origin) def get_template_sources(self, template_name): yield Origin( name=template_name, template_name=template_name, loader=self, ) def load_template_source(self, template_name, template_dirs=None): warnings.warn( 'The load_template_sources() method is deprecated. Use ' 'get_template() or get_contents() instead.', RemovedInDjango20Warning, ) try: return self.templates_dict[template_name], template_name except KeyError: raise TemplateDoesNotExist(template_name)
bsd-3-clause
krieger-od/nwjs_chromium.src
tools/chrome_proxy/integration_tests/chrome_proxy_measurements.py
9
14708
# Copyright 2014 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import base64 import logging import urlparse from integration_tests import chrome_proxy_metrics as metrics from metrics import loading from telemetry.core import exceptions from telemetry.page import page_test class ChromeProxyLatency(page_test.PageTest): """Chrome proxy latency measurement.""" def __init__(self, *args, **kwargs): super(ChromeProxyLatency, self).__init__(*args, **kwargs) def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) def ValidateAndMeasurePage(self, page, tab, results): # Wait for the load event. tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) loading.LoadingMetric().AddResults(tab, results) class ChromeProxyDataSaving(page_test.PageTest): """Chrome proxy data daving measurement.""" def __init__(self, *args, **kwargs): super(ChromeProxyDataSaving, self).__init__(*args, **kwargs) self._metrics = metrics.ChromeProxyMetric() def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) self._metrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): # Wait for the load event. tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) self._metrics.Stop(page, tab) self._metrics.AddResultsForDataSaving(tab, results) class ChromeProxyValidation(page_test.PageTest): """Base class for all chrome proxy correctness measurements.""" def __init__(self, restart_after_each_page=False): super(ChromeProxyValidation, self).__init__( needs_browser_restart_after_each_page=restart_after_each_page) self._metrics = metrics.ChromeProxyMetric() self._page = None # Whether a timeout exception is expected during the test. self._expect_timeout = False def CustomizeBrowserOptions(self, options): # Enable the chrome proxy (data reduction proxy). options.AppendExtraBrowserArgs('--enable-spdy-proxy-auth') def WillNavigateToPage(self, page, tab): tab.ClearCache(force=True) assert self._metrics self._metrics.Start(page, tab) def ValidateAndMeasurePage(self, page, tab, results): self._page = page # Wait for the load event. tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) assert self._metrics self._metrics.Stop(page, tab) self.AddResults(tab, results) def AddResults(self, tab, results): raise NotImplementedError def StopBrowserAfterPage(self, browser, page): # pylint: disable=W0613 if hasattr(page, 'restart_after') and page.restart_after: return True return False def RunNavigateSteps(self, page, tab): # The redirect from safebrowsing causes a timeout. Ignore that. try: super(ChromeProxyValidation, self).RunNavigateSteps(page, tab) except exceptions.DevtoolsTargetCrashException, e: if self._expect_timeout: logging.warning('Navigation timeout on page %s', page.name if page.name else page.url) else: raise e class ChromeProxyHeaders(ChromeProxyValidation): """Correctness measurement for response headers.""" def __init__(self): super(ChromeProxyHeaders, self).__init__(restart_after_each_page=True) def AddResults(self, tab, results): self._metrics.AddResultsForHeaderValidation(tab, results) class ChromeProxyBypass(ChromeProxyValidation): """Correctness measurement for bypass responses.""" def __init__(self): super(ChromeProxyBypass, self).__init__(restart_after_each_page=True) def AddResults(self, tab, results): self._metrics.AddResultsForBypass(tab, results) class ChromeProxyCorsBypass(ChromeProxyValidation): """Correctness measurement for bypass responses for CORS requests.""" def __init__(self): super(ChromeProxyCorsBypass, self).__init__(restart_after_each_page=True) def ValidateAndMeasurePage(self, page, tab, results): # The test page sets window.xhrRequestCompleted to true when the XHR fetch # finishes. tab.WaitForJavaScriptExpression('window.xhrRequestCompleted', 300) super(ChromeProxyCorsBypass, self).ValidateAndMeasurePage(page, tab, results) def AddResults(self, tab, results): self._metrics.AddResultsForCorsBypass(tab, results) class ChromeProxyBlockOnce(ChromeProxyValidation): """Correctness measurement for block-once responses.""" def __init__(self): super(ChromeProxyBlockOnce, self).__init__(restart_after_each_page=True) def AddResults(self, tab, results): self._metrics.AddResultsForBlockOnce(tab, results) class ChromeProxySafebrowsing(ChromeProxyValidation): """Correctness measurement for safebrowsing.""" def __init__(self): super(ChromeProxySafebrowsing, self).__init__() def WillNavigateToPage(self, page, tab): super(ChromeProxySafebrowsing, self).WillNavigateToPage(page, tab) self._expect_timeout = True def AddResults(self, tab, results): self._metrics.AddResultsForSafebrowsing(tab, results) _FAKE_PROXY_AUTH_VALUE = 'aabbccdd3b7579186c1b0620614fdb1f0000ffff' _TEST_SERVER = 'chromeproxy-test.appspot.com' _TEST_SERVER_DEFAULT_URL = 'http://' + _TEST_SERVER + '/default' # We rely on the chromeproxy-test server to facilitate some of the tests. # The test server code is at <TBD location> and runs at _TEST_SERVER # # The test server allow request to override response status, headers, and # body through query parameters. See GetResponseOverrideURL. def GetResponseOverrideURL(url=_TEST_SERVER_DEFAULT_URL, respStatus=0, respHeader="", respBody=""): """ Compose the request URL with query parameters to override the chromeproxy-test server response. """ queries = [] if respStatus > 0: queries.append('respStatus=%d' % respStatus) if respHeader: queries.append('respHeader=%s' % base64.b64encode(respHeader)) if respBody: queries.append('respBody=%s' % base64.b64encode(respBody)) if len(queries) == 0: return url "&".join(queries) # url has query already if urlparse.urlparse(url).query: return url + '&' + "&".join(queries) else: return url + '?' + "&".join(queries) class ChromeProxyHTTPFallbackProbeURL(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the probe URL does not return 'OK'. Chrome is expected to use the fallback proxy. """ def __init__(self): super(ChromeProxyHTTPFallbackProbeURL, self).__init__() def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackProbeURL, self).CustomizeBrowserOptions(options) # Use the test server probe URL which returns the response # body as specified by respBody. probe_url = GetResponseOverrideURL( respBody='not OK') options.AppendExtraBrowserArgs( '--data-reduction-proxy-probe-url=%s' % probe_url) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPFallback(tab, results) class ChromeProxyHTTPFallbackViaHeader(ChromeProxyValidation): """Correctness measurement for proxy fallback. In this test, the configured proxy is the chromeproxy-test server which will send back a response without the expected Via header. Chrome is expected to use the fallback proxy and add the configured proxy to the bad proxy list. """ def __init__(self): super(ChromeProxyHTTPFallbackViaHeader, self).__init__( restart_after_each_page=True) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPFallbackViaHeader, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--ignore-certificate-errors') options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER) options.AppendExtraBrowserArgs( '--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE) def AddResults(self, tab, results): proxies = [ _TEST_SERVER + ":80", self._metrics.effective_proxies['fallback'], self._metrics.effective_proxies['direct']] bad_proxies = [_TEST_SERVER + ":80", metrics.PROXY_SETTING_HTTP] self._metrics.AddResultsForHTTPFallback(tab, results, proxies, bad_proxies) class ChromeProxyClientVersion(ChromeProxyValidation): """Correctness measurement for version directives in Chrome-Proxy header. The test verifies that the version information provided in the Chrome-Proxy request header overrides any version, if specified, that is provided in the user agent string. """ def __init__(self): super(ChromeProxyClientVersion, self).__init__() def CustomizeBrowserOptions(self, options): super(ChromeProxyClientVersion, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--user-agent="Chrome/32.0.1700.99"') def AddResults(self, tab, results): self._metrics.AddResultsForClientVersion(tab, results) class ChromeProxyClientType(ChromeProxyValidation): """Correctness measurement for Chrome-Proxy header client type directives.""" def __init__(self): super(ChromeProxyClientType, self).__init__(restart_after_each_page=True) self._chrome_proxy_client_type = None def AddResults(self, tab, results): # Get the Chrome-Proxy client type from the first page in the page set, so # that the client type value can be used to determine which of the later # pages in the page set should be bypassed. if not self._chrome_proxy_client_type: client_type = self._metrics.GetClientTypeFromRequests(tab) if client_type: self._chrome_proxy_client_type = client_type self._metrics.AddResultsForClientType(tab, results, self._chrome_proxy_client_type, self._page.bypass_for_client_type) class ChromeProxyHTTPToDirectFallback(ChromeProxyValidation): """Correctness measurement for HTTP proxy fallback to direct.""" def __init__(self): super(ChromeProxyHTTPToDirectFallback, self).__init__( restart_after_each_page=True) def CustomizeBrowserOptions(self, options): super(ChromeProxyHTTPToDirectFallback, self).CustomizeBrowserOptions(options) # Set the primary proxy to something that will fail to be resolved so that # this test will run using the HTTP fallback proxy. options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://nonexistent.googlezip.net') def WillNavigateToPage(self, page, tab): # Attempt to load a page through the nonexistent primary proxy in order to # cause a proxy fallback, and have this test run starting from the HTTP # fallback proxy. tab.Navigate(_TEST_SERVER_DEFAULT_URL) tab.WaitForJavaScriptExpression('performance.timing.loadEventStart', 300) proxies = [ 'nonexistent.googlezip.net:80', self._metrics.effective_proxies['fallback'], self._metrics.effective_proxies['direct']] # TODO(sclittle): Remove this dependency on net-internals#proxy once an # alternative method of verifying that Chrome is on the fallback proxy # exists. self._metrics.VerifyProxyInfo(tab, proxies, proxies[:1]) super(ChromeProxyHTTPToDirectFallback, self).WillNavigateToPage(page, tab) def AddResults(self, tab, results): self._metrics.AddResultsForHTTPToDirectFallback(tab, results) class ChromeProxyExplicitBypass(ChromeProxyValidation): """Correctness measurement for explicit proxy bypasses. In this test, the configured proxy is the chromeproxy-test server which will send back a response without the expected Via header. Chrome is expected to use the fallback proxy and add the configured proxy to the bad proxy list. """ def __init__(self): super(ChromeProxyExplicitBypass, self).__init__( restart_after_each_page=True) def CustomizeBrowserOptions(self, options): super(ChromeProxyExplicitBypass, self).CustomizeBrowserOptions(options) options.AppendExtraBrowserArgs('--ignore-certificate-errors') options.AppendExtraBrowserArgs( '--spdy-proxy-auth-origin=http://%s' % _TEST_SERVER) options.AppendExtraBrowserArgs( '--spdy-proxy-auth-value=%s' % _FAKE_PROXY_AUTH_VALUE) def AddResults(self, tab, results): bad_proxies = [{ 'proxy': _TEST_SERVER + ':80', 'retry_seconds_low': self._page.bypass_seconds_low, 'retry_seconds_high': self._page.bypass_seconds_high }] if self._page.num_bypassed_proxies == 2: bad_proxies.append({ 'proxy': self._metrics.effective_proxies['fallback'], 'retry_seconds_low': self._page.bypass_seconds_low, 'retry_seconds_high': self._page.bypass_seconds_high }) else: # Even if the test page only causes the primary proxy to be bypassed, # Chrome will attempt to fetch the favicon for the test server through # the data reduction proxy, which will cause a "block=0" bypass. bad_proxies.append({'proxy': self._metrics.effective_proxies['fallback']}) self._metrics.AddResultsForExplicitBypass(tab, results, bad_proxies) class ChromeProxySmoke(ChromeProxyValidation): """Smoke measurement for basic chrome proxy correctness.""" def __init__(self): super(ChromeProxySmoke, self).__init__() def WillNavigateToPage(self, page, tab): super(ChromeProxySmoke, self).WillNavigateToPage(page, tab) if page.name == 'safebrowsing': self._expect_timeout = True def AddResults(self, tab, results): # Map a page name to its AddResults func. page_to_metrics = { 'header validation': [self._metrics.AddResultsForHeaderValidation], 'compression: image': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: javascript': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'compression: css': [ self._metrics.AddResultsForHeaderValidation, self._metrics.AddResultsForDataSaving, ], 'bypass': [self._metrics.AddResultsForBypass], 'safebrowsing': [self._metrics.AddResultsForSafebrowsing], } if not self._page.name in page_to_metrics: raise page_test.MeasurementFailure( 'Invalid page name (%s) in smoke. Page name must be one of:\n%s' % ( self._page.name, page_to_metrics.keys())) for add_result in page_to_metrics[self._page.name]: add_result(tab, results)
bsd-3-clause
frewsxcv/servo
components/style/binding_tools/regen.py
1
24288
#!/usr/bin/env python # This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from __future__ import print_function import os import sys import argparse import platform import copy import subprocess import regen_atoms DESCRIPTION = 'Regenerate the rust version of the structs or the bindings file.' TOOLS_DIR = os.path.dirname(os.path.abspath(__file__)) COMMON_BUILD_KEY = "__common__" COMPILATION_TARGETS = { # Flags common for all the targets. COMMON_BUILD_KEY: { "flags": [ "--no-unstable-rust", "--no-type-renaming", ], "clang_flags": [ "-x", "c++", "-std=c++14", "-DTRACING=1", "-DIMPL_LIBXUL", "-DMOZ_STYLO_BINDINGS=1", "-DMOZILLA_INTERNAL_API", "-DRUST_BINDGEN", "-DMOZ_STYLO" ], "search_dirs": [ "{}/dist/include", "{}/dist/include/nspr", "{}/../nsprpub/pr/include" ], "includes": [ "{}/mozilla-config.h", ], }, # Generation of style structs. "structs": { "target_dir": "../gecko_bindings", "flags": [ "--ignore-functions", "--ignore-methods", ], "includes": [ "{}/dist/include/gfxFontConstants.h", "{}/dist/include/nsThemeConstants.h", "{}/dist/include/mozilla/dom/AnimationEffectReadOnlyBinding.h", "{}/dist/include/mozilla/ServoElementSnapshot.h", "{}/dist/include/mozilla/dom/Element.h", ], "files": [ "{}/dist/include/nsStyleStruct.h", ], "build_kinds": { "debug": { "clang_flags": [ "-DDEBUG=1", "-DJS_DEBUG=1", ] }, "release": { } }, "raw_lines": [ # We can get rid of this when the bindings move into the style crate. "pub enum OpaqueStyleData {}", "pub use nsstring::nsStringRepr as nsString;" ], "blacklist_types": ["nsString"], "whitelist_vars": [ "NS_THEME_.*", "NODE_.*", "NS_FONT_.*", "NS_STYLE_.*", "NS_CORNER_.*", "NS_RADIUS_.*", "BORDER_COLOR_.*", "BORDER_STYLE_.*" ], "whitelist": [ "RawGeckoNode", "RawGeckoElement", "RawGeckoDocument", "Element", "Side", "nsTArrayHeader", "nsCSSValueGradient", "nsCSSValueList_heap", "FrameRequestCallback", "nsCSSValueTriplet_heap", "nsCSSRect_heap", "AnonymousContent", "nsCSSValuePairList", "nsCSSValuePairList_heap", "nsCSSValuePair_heap", "CapturingContentInfo", "Runnable", "AudioContext", "FontFamilyListRefCnt", "ImageURL", "Image", "nsCSSValueFloatColor", "ServoAttrSnapshot", "GridNamedArea", "nsAttrName", "nsAttrValue", "nsCSSRect", "gfxFontFeature", "gfxAlternateValue", "nsCSSValueTokenStream", "nsSize", "pair", "StyleClipPathGeometryBox", "FontFamilyName", "nsCSSPropertyID", "StyleAnimation", "StyleTransition", "nsresult", "nsCSSValueGradientStop", "nsBorderColors", "Position", "nsCSSValueList", "nsCSSValue", "UniquePtr", "DefaultDelete", "StyleBasicShape", "nsMargin", "nsStyleContentData", "nsStyleFilter", "nsRect", "FragmentOrURL", "nsStyleCoord", "nsStyleCounterData", "StaticRefPtr", "nsTArray", "nsStyleFont", "nsStyleColor", "nsStyleList", "nsStyleText", "nsStyleVisibility", "nsStyleUserInterface", "nsStyleTableBorder", "nsStyleSVG", "nsStyleVariables", "nsStyleBackground", "nsStylePosition", "nsStyleTextReset", "nsStyleDisplay", "nsStyleContent", "nsStyleUIReset", "nsStyleTable", "nsStyleMargin", "nsStylePadding", "nsStyleBorder", "nsStyleOutline", "nsStyleXUL", "nsStyleSVGReset", "nsStyleColumn", "nsStyleEffects", "nsStyleImage", "nsStyleGradient", "nsStyleCoord", "nsStyleGradientStop", "nsStyleImageLayers", "nsStyleImageLayers_Layer", "nsStyleImageLayers_LayerType", "nsStyleUnit", "nsStyleUnion", "nsStyleCoord", "nsRestyleHint", "ServoElementSnapshot", "nsChangeHint", "SheetParsingMode", "nsMainThreadPtrHandle", "nsMainThreadPtrHolder", "nscolor", "nsFont", "FontFamilyList", "FontFamilyType", "nsIAtom", "nsStyleContext", "StyleClipPath", "StyleBasicShapeType", "StyleBasicShape", "nsCSSShadowArray", ], "opaque_types": [ "atomic___base", "nsAString_internal_char_traits", "nsAString_internal_incompatible_char_type", "nsACString_internal_char_traits", "nsACString_internal_incompatible_char_type", "RefPtr_Proxy", "nsAutoPtr_Proxy", "Pair_Base", "RefPtr_Proxy_member_function", "nsAutoPtr_Proxy_member_function", "nsWritingIterator_reference", "nsReadingIterator_reference", "Heap", "TenuredHeap", "Rooted", "WeakPtr", # <- More template magic than what # we support. "nsTObserverArray", # <- Inherits from nsAutoTObserverArray<T, 0> "PLArenaPool", # <- Bindgen bug "nsTHashtable", # <- Inheriting from inner typedefs that clang # doesn't expose properly. "nsRefPtrHashtable", "nsDataHashtable", "nsClassHashtable", # <- Ditto "nsIDocument_SelectorCache", # <- Inherits from nsExpirationTracker<.., 4> "nsIPresShell_ScrollAxis", # <- For some reason the alignment of this is 4 # for clang. "nsPIDOMWindow", # <- Takes the vtable from a template parameter, and we can't # generate it conditionally. "SupportsWeakPtr", "Maybe", # <- AlignedStorage, which means templated union, which # means impossible to represent in stable rust as of # right now. "gfxSize", # <- Same, union { struct { T width; T height; }; T components[2] }; "gfxSize_Super", # Ditto. ], "servo_mapped_generic_types": [ { "generic": True, "gecko": "ServoUnsafeCell", "servo": "::std::cell::UnsafeCell" }, { "generic": True, "gecko": "ServoCell", "servo": "::std::cell::Cell" }, { "generic": False, "gecko": "ServoNodeData", "servo": "OpaqueStyleData" } ], }, # Generation of the ffi bindings. "bindings": { "target_dir": "../gecko_bindings", "raw_lines": [], "flags": [ "--ignore-methods", ], "match_headers": [ "ServoBindingList.h", "ServoBindings.h", "nsStyleStructList.h", ], "files": [ "{}/dist/include/mozilla/ServoBindings.h", ], # Types to just use from the `structs` target. "structs_types": [ "nsStyleFont", "nsStyleColor", "nsStyleList", "nsStyleText", "nsStyleVisibility", "nsStyleUserInterface", "nsStyleTableBorder", "nsStyleSVG", "nsStyleVariables", "nsStyleBackground", "nsStylePosition", "nsStyleTextReset", "nsStyleDisplay", "nsStyleContent", "nsStyleUIReset", "nsStyleTable", "nsStyleMargin", "nsStylePadding", "nsStyleBorder", "nsStyleOutline", "nsStyleXUL", "nsStyleSVGReset", "nsStyleColumn", "nsStyleEffects", "nsStyleImage", "nsStyleGradient", "nsStyleCoord", "nsStyleGradientStop", "nsStyleImageLayers", "nsStyleImageLayers_Layer", "nsStyleImageLayers_LayerType", "nsStyleUnit", "nsStyleUnion", "nsStyleCoord_CalcValue", "nsStyleCoord_Calc", "nsRestyleHint", "ServoElementSnapshot", "nsChangeHint", "SheetParsingMode", "nsMainThreadPtrHolder", "nsFont", "FontFamilyList", "FontFamilyType", "nsIAtom", "nsStyleContext", "StyleClipPath", "StyleBasicShapeType", "StyleBasicShape", "nsCSSShadowArray", "nsIPrincipal", "nsIURI", "RawGeckoNode", "RawGeckoElement", "RawGeckoDocument", "nsString", "nsStyleQuoteValues" ], "servo_nullable_arc_types": [ "ServoComputedValues", "RawServoStyleSheet", "ServoDeclarationBlock" ], "servo_owned_types": [ "RawServoStyleSet", "StyleChildrenIterator", ], "servo_immutable_borrow_types": [ "RawGeckoNode", "RawGeckoElement", "RawGeckoDocument", ], "whitelist_functions": [ "Servo_.*", "Gecko_.*" ] }, "atoms": { "custom_build": regen_atoms.build, } } def platform_dependent_defines(): ret = [] if os.name == "posix": ret.append("-DOS_POSIX=1") system = platform.system() if system == "Linux": ret.append("-DOS_LINUX=1") elif system == "Darwin": ret.append("-DOS_MACOSX=1") elif system == "Windows": ret.append("-DOS_WIN=1") ret.append("-DWIN32=1") msvc_platform = os.environ["PLATFORM"] if msvc_platform == "X86": ret.append("--target=i686-pc-win32") elif msvc_platform == "X64": ret.append("--target=x86_64-pc-win32") else: raise Exception("Only MSVC builds are supported on Windows") # For compatibility with MSVC 2015 ret.append("-fms-compatibility-version=19") # To enable the builtin __builtin_offsetof so that CRT wouldn't # use reinterpret_cast in offsetof() which is not allowed inside # static_assert(). ret.append("-D_CRT_USE_BUILTIN_OFFSETOF") # Enable hidden attribute (which is not supported by MSVC and # thus not enabled by default with a MSVC-compatibile build) # to exclude hidden symbols from the generated file. ret.append("-DHAVE_VISIBILITY_HIDDEN_ATTRIBUTE=1") else: raise Exception("Unknown platform") return ret def extend_object(obj, other): if not obj or not other: return obj if isinstance(obj, list) and isinstance(other, list): obj.extend(other) return assert isinstance(obj, dict) and isinstance(other, dict) for key in other.keys(): if key in obj: extend_object(obj[key], other[key]) else: obj[key] = copy.deepcopy(other[key]) def build(objdir, target_name, debug, debugger, kind_name=None, output_filename=None, bindgen=None, skip_test=False, verbose=False): assert target_name in COMPILATION_TARGETS current_target = COMPILATION_TARGETS[target_name] if COMMON_BUILD_KEY in COMPILATION_TARGETS: current_target = copy.deepcopy(COMPILATION_TARGETS[COMMON_BUILD_KEY]) extend_object(current_target, COMPILATION_TARGETS[target_name]) assert ((kind_name is None and "build_kinds" not in current_target) or (kind_name in current_target["build_kinds"])) if "custom_build" in current_target: print("[CUSTOM] {}::{} in \"{}\"... ".format(target_name, kind_name, objdir), end='') sys.stdout.flush() ret = current_target["custom_build"](objdir, verbose=True) if ret != 0: print("FAIL") else: print("OK") return ret if bindgen is None: bindgen = os.path.join(TOOLS_DIR, "rust-bindgen") if os.path.isdir(bindgen): bindgen = ["cargo", "run", "--manifest-path", os.path.join(bindgen, "Cargo.toml"), "--features", "llvm_stable", "--"] else: bindgen = [bindgen] if kind_name is not None: current_target = copy.deepcopy(current_target) extend_object(current_target, current_target["build_kinds"][kind_name]) target_dir = None if output_filename is None and "target_dir" in current_target: target_dir = current_target["target_dir"] if output_filename is None: output_filename = "{}.rs".format(target_name) if kind_name is not None: output_filename = "{}_{}.rs".format(target_name, kind_name) if target_dir: output_filename = "{}/{}".format(target_dir, output_filename) print("[BINDGEN] {}::{} in \"{}\"... ".format(target_name, kind_name, objdir), end='') sys.stdout.flush() flags = [] # This makes an FFI-safe void type that can't be matched on # &VoidType is UB to have, because you can match on it # to produce a reachable unreachable. If it's wrapped in # a struct as a private field it becomes okay again # # Not 100% sure of how safe this is, but it's what we're using # in the XPCOM ffi too # https://github.com/nikomatsakis/rust-memory-model/issues/2 def zero_size_type(ty, flags): flags.append("--blacklist-type") flags.append(ty) flags.append("--raw-line") flags.append("enum {0}Void{{ }}".format(ty)) flags.append("--raw-line") flags.append("pub struct {0}({0}Void);".format(ty)) if "flags" in current_target: flags.extend(current_target["flags"]) clang_flags = [] if "clang_flags" in current_target: clang_flags.extend(current_target["clang_flags"]) clang_flags.extend(platform_dependent_defines()) if platform.system() == "Windows": flags.append("--use-msvc-mangling") if "raw_lines" in current_target: for raw_line in current_target["raw_lines"]: flags.append("--raw-line") flags.append(raw_line) if "search_dirs" in current_target: for dir_name in current_target["search_dirs"]: clang_flags.append("-I") clang_flags.append(dir_name.format(objdir)) if "includes" in current_target: for file_name in current_target["includes"]: clang_flags.append("-include") clang_flags.append(file_name.format(objdir)) if "whitelist" in current_target: for header in current_target["whitelist"]: flags.append("--whitelist-type") flags.append(header) if "whitelist_functions" in current_target: for header in current_target["whitelist_functions"]: flags.append("--whitelist-function") flags.append(header) if "whitelist_vars" in current_target: for header in current_target["whitelist_vars"]: flags.append("--whitelist-var") flags.append(header) if "opaque_types" in current_target: for ty in current_target["opaque_types"]: flags.append("--opaque-type") flags.append(ty) if "blacklist_types" in current_target: for ty in current_target["blacklist_types"]: flags.append("--blacklist-type") flags.append(ty) if "servo_nullable_arc_types" in current_target: for ty in current_target["servo_nullable_arc_types"]: flags.append("--blacklist-type") flags.append("{}Strong".format(ty)) flags.append("--raw-line") flags.append("pub type {0}Strong = ::gecko_bindings::sugar::ownership::Strong<{0}>;" .format(ty)) flags.append("--blacklist-type") flags.append("{}BorrowedOrNull".format(ty)) flags.append("--raw-line") flags.append("pub type {0}BorrowedOrNull<'a> = \ Option<&'a {0}>;".format(ty)) flags.append("--blacklist-type") flags.append("{}Borrowed".format(ty)) flags.append("--raw-line") flags.append("pub type {0}Borrowed<'a> = &'a {0};".format(ty)) zero_size_type(ty, flags) if "servo_immutable_borrow_types" in current_target: for ty in current_target["servo_immutable_borrow_types"]: flags.append("--blacklist-type") flags.append("{}Borrowed".format(ty)) flags.append("--raw-line") flags.append("pub type {0}Borrowed<'a> = &'a {0};".format(ty)) flags.append("--blacklist-type") flags.append("{}BorrowedOrNull".format(ty)) flags.append("--raw-line") flags.append("pub type {0}BorrowedOrNull<'a> = \ Option<&'a {0}>;".format(ty)) # Right now the only immutable borrow types are ones which we import # from the |structs| module. As such, we don't need to create an opaque # type with zero_size_type. If we ever introduce immutable borrow types # which _do_ need to be opaque, we'll need a separate mode. if "servo_mapped_generic_types" in current_target: for ty in current_target["servo_mapped_generic_types"]: flags.append("--blacklist-type") flags.append("{}".format(ty["gecko"])) flags.append("--raw-line") flags.append("pub type {0}{2} = {1}{2};".format(ty["gecko"], ty["servo"], "<T>" if ty["generic"] else "")) if "servo_owned_types" in current_target: for ty in current_target["servo_owned_types"]: flags.append("--blacklist-type") flags.append("{}Borrowed".format(ty)) flags.append("--raw-line") flags.append("pub type {0}Borrowed<'a> = &'a {0};".format(ty)) flags.append("--blacklist-type") flags.append("{}BorrowedMut".format(ty)) flags.append("--raw-line") flags.append("pub type {0}BorrowedMut<'a> = &'a mut {0};".format(ty)) flags.append("--blacklist-type") flags.append("{}Owned".format(ty)) flags.append("--raw-line") flags.append("pub type {0}Owned = ::gecko_bindings::sugar::ownership::Owned<{0}>;".format(ty)) flags.append("--blacklist-type") flags.append("{}BorrowedOrNull".format(ty)) flags.append("--raw-line") flags.append("pub type {0}BorrowedOrNull<'a> = Option<&'a {0}>;" .format(ty)) flags.append("--blacklist-type") flags.append("{}BorrowedMutOrNull".format(ty)) flags.append("--raw-line") flags.append("pub type {0}BorrowedMutOrNull<'a> = Option<&'a mut {0}>;" .format(ty)) flags.append("--blacklist-type") flags.append("{}OwnedOrNull".format(ty)) flags.append("--raw-line") flags.append("pub type {0}OwnedOrNull = ::gecko_bindings::sugar::ownership::OwnedOrNull<{0}>;".format(ty)) zero_size_type(ty, flags) if "structs_types" in current_target: for ty in current_target["structs_types"]: flags.append("--blacklist-type") flags.append(ty) flags.append("--raw-line") flags.append("use gecko_bindings::structs::{};".format(ty)) # TODO: this is hacky, figure out a better way to do it without # hardcoding everything... if ty.startswith("nsStyle"): flags.extend([ "--raw-line", "unsafe impl Send for {} {{}}".format(ty), "--raw-line", "unsafe impl Sync for {} {{}}".format(ty), ]) flags.append("-o") flags.append(output_filename) assert len(current_target["files"]) == 1 flags.append(current_target["files"][0].format(objdir)) flags = bindgen + flags + ["--"] + clang_flags if verbose: print(flags) output = "" try: if debug: flags = [debugger, "--args"] + flags subprocess.check_call(flags) else: output = subprocess.check_output(flags, stderr=subprocess.STDOUT) output = output.decode('utf8') except subprocess.CalledProcessError as e: print("FAIL\n", e.output.decode('utf8')) return 1 print("OK") print("(please test with ./mach test-stylo)") if verbose: print(output) return 0 def builds_for(target_name, kind): if target_name == "all": for target in COMPILATION_TARGETS.keys(): if target == COMMON_BUILD_KEY: continue if "build_kinds" in COMPILATION_TARGETS[target]: for kind in COMPILATION_TARGETS[target]["build_kinds"].keys(): yield (target, kind) else: yield (target, None) return target = COMPILATION_TARGETS[target_name] if "build_kinds" in target: if kind is None: for kind in target["build_kinds"].keys(): yield(target_name, kind) else: yield (target_name, kind) return yield (target_name, None) def main(): parser = argparse.ArgumentParser(description=DESCRIPTION) parser.add_argument('--target', default='all', help='The target to build, either "structs" or "bindings"') parser.add_argument('--kind', help='Kind of build') parser.add_argument('--bindgen', help='Override bindgen binary') parser.add_argument('--output', '-o', help='Output of the script') parser.add_argument('--skip-test', action='store_true', help='Skip automatic tests, useful for debugging') parser.add_argument('--verbose', '-v', action='store_true', help='Be... verbose') parser.add_argument('--debug', action='store_true', help='Try to use a debugger to debug bindgen commands (default: gdb)') parser.add_argument('--debugger', default='gdb', help='Debugger to use. Only used if --debug is passed.') parser.add_argument('objdir') args = parser.parse_args() if not os.path.isdir(args.objdir): print("\"{}\" doesn't seem to be a directory".format(args.objdir)) return 1 if (args.target != "all" and args.target not in COMPILATION_TARGETS) or args.target == COMMON_BUILD_KEY: print("{} is not a valid compilation target.".format(args.target)) print("Valid compilation targets are:") for target in COMPILATION_TARGETS.keys(): if target != COMMON_BUILD_KEY: print("\t * {}".format(target)) return 1 current_target = COMPILATION_TARGETS.get(args.target, {}) if args.kind and "build_kinds" in current_target and args.kind not in current_target["build_kinds"]: print("{} is not a valid build kind.".format(args.kind)) print("Valid build kinds are:") for kind in current_target["build_kinds"].keys(): print("\t * {}".format(kind)) return 1 for target, kind in builds_for(args.target, args.kind): ret = build(args.objdir, target, kind_name=kind, debug=args.debug, debugger=args.debugger, bindgen=args.bindgen, skip_test=args.skip_test, output_filename=args.output, verbose=args.verbose) if ret != 0: print("{}::{} failed".format(target, kind)) return ret return 0 if __name__ == '__main__': sys.exit(main())
mpl-2.0
edry/edx-platform
common/lib/xmodule/xmodule/peer_grading_module.py
56
29601
import json import logging from datetime import datetime from django.utils.timezone import UTC from lxml import etree from pkg_resources import resource_string from xblock.fields import Dict, String, Scope, Boolean, Float, Reference from xmodule.capa_module import ComplexEncoder from xmodule.fields import Date, Timedelta from xmodule.modulestore.exceptions import ItemNotFoundError, NoPathToItem from xmodule.raw_module import RawDescriptor from xmodule.timeinfo import TimeInfo from xmodule.x_module import XModule, module_attr from xmodule.open_ended_grading_classes.peer_grading_service import PeerGradingService, MockPeerGradingService from xmodule.open_ended_grading_classes.grading_service_module import GradingServiceError from xmodule.validation import StudioValidation, StudioValidationMessage from open_ended_grading_classes import combined_open_ended_rubric log = logging.getLogger(__name__) # Make '_' a no-op so we can scrape strings _ = lambda text: text EXTERNAL_GRADER_NO_CONTACT_ERROR = "Failed to contact external graders. Please notify course staff." MAX_ALLOWED_FEEDBACK_LENGTH = 5000 class PeerGradingFields(object): use_for_single_location = Boolean( display_name=_("Show Single Problem"), help=_('When True, only the single problem specified by "Link to Problem Location" is shown. ' 'When False, a panel is displayed with all problems available for peer grading.'), default=False, scope=Scope.settings ) link_to_location = Reference( display_name=_("Link to Problem Location"), help=_('The location of the problem being graded. Only used when "Show Single Problem" is True.'), default="", scope=Scope.settings ) graded = Boolean( display_name=_("Graded"), help=_('Defines whether the student gets credit for grading this problem. Only used when "Show Single Problem" is True.'), default=False, scope=Scope.settings ) due = Date( help=_("Due date that should be displayed."), scope=Scope.settings) graceperiod = Timedelta( help=_("Amount of grace to give on the due date."), scope=Scope.settings ) student_data_for_location = Dict( help=_("Student data for a given peer grading problem."), scope=Scope.user_state ) weight = Float( display_name=_("Problem Weight"), help=_("Defines the number of points each problem is worth. If the value is not set, each problem is worth one point."), scope=Scope.settings, values={"min": 0, "step": ".1"}, default=1 ) display_name = String( display_name=_("Display Name"), help=_("Display name for this module"), scope=Scope.settings, default=_("Peer Grading Interface") ) data = String( help=_("Html contents to display for this module"), default='<peergrading></peergrading>', scope=Scope.content ) class InvalidLinkLocation(Exception): """ Exception for the case in which a peer grading module tries to link to an invalid location. """ pass class PeerGradingModule(PeerGradingFields, XModule): """ PeerGradingModule.__init__ takes the same arguments as xmodule.x_module:XModule.__init__ """ _VERSION = 1 js = { 'coffee': [ resource_string(__name__, 'js/src/peergrading/peer_grading.coffee'), resource_string(__name__, 'js/src/peergrading/peer_grading_problem.coffee'), resource_string(__name__, 'js/src/javascript_loader.coffee'), ], 'js': [ resource_string(__name__, 'js/src/collapsible.js'), ] } js_module_name = "PeerGrading" css = {'scss': [resource_string(__name__, 'css/combinedopenended/display.scss')]} def __init__(self, *args, **kwargs): super(PeerGradingModule, self).__init__(*args, **kwargs) # Copy this to a new variable so that we can edit it if needed. # We need to edit it if the linked module cannot be found, so # we can revert to panel model. self.use_for_single_location_local = self.use_for_single_location # We need to set the location here so the child modules can use it. self.runtime.set('location', self.location) if self.runtime.open_ended_grading_interface: self.peer_gs = PeerGradingService(self.system.open_ended_grading_interface, self.system.render_template) else: self.peer_gs = MockPeerGradingService() if self.use_for_single_location_local: linked_descriptors = self.descriptor.get_required_module_descriptors() if len(linked_descriptors) == 0: error_msg = "Peer grading module {0} is trying to use single problem mode without " "a location specified.".format(self.location) log.error(error_msg) # Change module over to panel mode from single problem mode. self.use_for_single_location_local = False else: self.linked_problem = self.system.get_module(linked_descriptors[0]) try: self.timeinfo = TimeInfo(self.due, self.graceperiod) except Exception: log.error("Error parsing due date information in location {0}".format(self.location)) raise self.display_due_date = self.timeinfo.display_due_date try: self.student_data_for_location = json.loads(self.student_data_for_location) except Exception: # pylint: disable=broad-except # OK with this broad exception because we just want to continue on any error pass @property def ajax_url(self): """ Returns the `ajax_url` from the system, with any trailing '/' stripped off. """ ajax_url = self.system.ajax_url if not ajax_url.endswith("/"): ajax_url += "/" return ajax_url def closed(self): return self._closed(self.timeinfo) def _closed(self, timeinfo): if timeinfo.close_date is not None and datetime.now(UTC()) > timeinfo.close_date: return True return False def _err_response(self, msg): """ Return a HttpResponse with a json dump with success=False, and the given error message. """ return {'success': False, 'error': msg} def _check_required(self, data, required): actual = set(data.keys()) missing = required - actual if len(missing) > 0: return False, "Missing required keys: {0}".format(', '.join(missing)) else: return True, "" def get_html(self): """ Needs to be implemented by inheritors. Renders the HTML that students see. @return: """ if self.closed(): return self.peer_grading_closed() if not self.use_for_single_location_local: return self.peer_grading() else: # b/c handle_ajax expects serialized data payload and directly calls peer_grading return self.peer_grading_problem({'location': self.link_to_location.to_deprecated_string()})['html'] def handle_ajax(self, dispatch, data): """ Needs to be implemented by child modules. Handles AJAX events. @return: """ handlers = { 'get_next_submission': self.get_next_submission, 'show_calibration_essay': self.show_calibration_essay, 'is_student_calibrated': self.is_student_calibrated, 'save_grade': self.save_grade, 'save_calibration_essay': self.save_calibration_essay, 'problem': self.peer_grading_problem, } if dispatch not in handlers: # This is a dev_facing_error log.error("Cannot find {0} in handlers in handle_ajax function for open_ended_module.py".format(dispatch)) # This is a dev_facing_error return json.dumps({'error': 'Error handling action. Please try again.', 'success': False}) data_dict = handlers[dispatch](data) return json.dumps(data_dict, cls=ComplexEncoder) def query_data_for_location(self, location): student_id = self.system.anonymous_student_id success = False response = {} try: response = self.peer_gs.get_data_for_location(location, student_id) _count_graded = response['count_graded'] _count_required = response['count_required'] success = True except GradingServiceError: # This is a dev_facing_error log.exception("Error getting location data from controller for location %s, student %s", location, student_id) return success, response def get_progress(self): pass def get_score(self): max_score = None score = None weight = self.weight #The old default was None, so set to 1 if it is the old default weight if weight is None: weight = 1 score_dict = { 'score': score, 'total': max_score, } if not self.use_for_single_location_local or not self.graded: return score_dict try: count_graded = self.student_data_for_location['count_graded'] count_required = self.student_data_for_location['count_required'] except: success, response = self.query_data_for_location(self.link_to_location) if not success: log.exception( "No instance data found and could not get data from controller for loc {0} student {1}".format( self.system.location.to_deprecated_string(), self.system.anonymous_student_id )) return None count_graded = response['count_graded'] count_required = response['count_required'] if count_required > 0 and count_graded >= count_required: # Ensures that once a student receives a final score for peer grading, that it does not change. self.student_data_for_location = response score = int(count_graded >= count_required and count_graded > 0) * float(weight) total = float(weight) score_dict['score'] = score score_dict['total'] = total return score_dict def max_score(self): ''' Maximum score. Two notes: * This is generic; in abstract, a problem could be 3/5 points on one randomization, and 5/7 on another ''' max_grade = None if self.use_for_single_location_local and self.graded: max_grade = self.weight return max_grade def get_next_submission(self, data): """ Makes a call to the grading controller for the next essay that should be graded Returns a json dict with the following keys: 'success': bool 'submission_id': a unique identifier for the submission, to be passed back with the grade. 'submission': the submission, rendered as read-only html for grading 'rubric': the rubric, also rendered as html. 'submission_key': a key associated with the submission for validation reasons 'error': if success is False, will have an error message with more info. """ required = set(['location']) success, message = self._check_required(data, required) if not success: return self._err_response(message) grader_id = self.system.anonymous_student_id location = data['location'] try: response = self.peer_gs.get_next_submission(location, grader_id) return response except GradingServiceError: # This is a dev_facing_error log.exception("Error getting next submission. server url: %s location: %s, grader_id: %s", self.peer_gs.url, location, grader_id) # This is a student_facing_error return {'success': False, 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} def save_grade(self, data): """ Saves the grade of a given submission. Input: The request should have the following keys: location - problem location submission_id - id associated with this submission submission_key - submission key given for validation purposes score - the grade that was given to the submission feedback - the feedback from the student Returns A json object with the following keys: success: bool indicating whether the save was a success error: if there was an error in the submission, this is the error message """ required = ['location', 'submission_id', 'submission_key', 'score', 'feedback', 'submission_flagged', 'answer_unknown'] if data.get("submission_flagged", False) in ["false", False, "False", "FALSE"]: required.append("rubric_scores[]") success, message = self._check_required(data, set(required)) if not success: return self._err_response(message) success, message = self._check_feedback_length(data) if not success: return self._err_response(message) data_dict = {k: data.get(k) for k in required} if 'rubric_scores[]' in required: data_dict['rubric_scores'] = data.getall('rubric_scores[]') data_dict['grader_id'] = self.system.anonymous_student_id try: response = self.peer_gs.save_grade(**data_dict) success, location_data = self.query_data_for_location(data_dict['location']) #Don't check for success above because the response = statement will raise the same Exception as the one #that will cause success to be false. response.update({'required_done': False}) if 'count_graded' in location_data and 'count_required' in location_data and int(location_data['count_graded']) >= int(location_data['count_required']): response['required_done'] = True return response except GradingServiceError: # This is a dev_facing_error log.exception("Error saving grade to open ended grading service. server url: %s", self.peer_gs.url) # This is a student_facing_error return { 'success': False, 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def is_student_calibrated(self, data): """ Calls the grading controller to see if the given student is calibrated on the given problem Input: In the request, we need the following arguments: location - problem location Returns: Json object with the following keys success - bool indicating whether or not the call was successful calibrated - true if the grader has fully calibrated and can now move on to grading - false if the grader is still working on calibration problems total_calibrated_on_so_far - the number of calibration essays for this problem that this grader has graded """ required = set(['location']) success, message = self._check_required(data, required) if not success: return self._err_response(message) grader_id = self.system.anonymous_student_id location = data['location'] try: response = self.peer_gs.is_student_calibrated(location, grader_id) return response except GradingServiceError: # This is a dev_facing_error log.exception("Error from open ended grading service. server url: %s, grader_id: %s, location: %s", self.peer_gs.url, grader_id, location) # This is a student_facing_error return { 'success': False, 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR } def show_calibration_essay(self, data): """ Fetch the next calibration essay from the grading controller and return it Inputs: In the request location - problem location Returns: A json dict with the following keys 'success': bool 'submission_id': a unique identifier for the submission, to be passed back with the grade. 'submission': the submission, rendered as read-only html for grading 'rubric': the rubric, also rendered as html. 'submission_key': a key associated with the submission for validation reasons 'error': if success is False, will have an error message with more info. """ required = set(['location']) success, message = self._check_required(data, required) if not success: return self._err_response(message) grader_id = self.system.anonymous_student_id location = data['location'] try: response = self.peer_gs.show_calibration_essay(location, grader_id) return response except GradingServiceError: # This is a dev_facing_error log.exception("Error from open ended grading service. server url: %s, location: %s", self.peer_gs.url, location) # This is a student_facing_error return {'success': False, 'error': EXTERNAL_GRADER_NO_CONTACT_ERROR} # if we can't parse the rubric into HTML, except etree.XMLSyntaxError: # This is a dev_facing_error log.exception("Cannot parse rubric string.") # This is a student_facing_error return {'success': False, 'error': 'Error displaying submission. Please notify course staff.'} def save_calibration_essay(self, data): """ Saves the grader's grade of a given calibration. Input: The request should have the following keys: location - problem location submission_id - id associated with this submission submission_key - submission key given for validation purposes score - the grade that was given to the submission feedback - the feedback from the student Returns A json object with the following keys: success: bool indicating whether the save was a success error: if there was an error in the submission, this is the error message actual_score: the score that the instructor gave to this calibration essay """ required = set(['location', 'submission_id', 'submission_key', 'score', 'feedback', 'rubric_scores[]']) success, message = self._check_required(data, required) if not success: return self._err_response(message) data_dict = {k: data.get(k) for k in required} data_dict['rubric_scores'] = data.getall('rubric_scores[]') data_dict['student_id'] = self.system.anonymous_student_id data_dict['calibration_essay_id'] = data_dict['submission_id'] try: response = self.peer_gs.save_calibration_essay(**data_dict) if 'actual_rubric' in response: rubric_renderer = combined_open_ended_rubric.CombinedOpenEndedRubric(self.system.render_template, True) response['actual_rubric'] = rubric_renderer.render_rubric(response['actual_rubric'])['html'] return response except GradingServiceError: # This is a dev_facing_error log.exception("Error saving calibration grade") # This is a student_facing_error return self._err_response('There was an error saving your score. Please notify course staff.') def peer_grading_closed(self): ''' Show the Peer grading closed template ''' html = self.system.render_template('peer_grading/peer_grading_closed.html', { 'use_for_single_location': self.use_for_single_location_local }) return html def _find_corresponding_module_for_location(self, location): """ Find the peer grading module that exists at the given location. """ try: return self.descriptor.system.load_item(location) except ItemNotFoundError: # The linked problem doesn't exist. log.error("Problem {0} does not exist in this course.".format(location)) raise except NoPathToItem: # The linked problem does not have a path to it (ie is in a draft or other strange state). log.error("Cannot find a path to problem {0} in this course.".format(location)) raise def peer_grading(self, _data=None): ''' Show a peer grading interface ''' # call problem list service success = False error_text = "" problem_list = [] try: problem_list_dict = self.peer_gs.get_problem_list(self.course_id, self.system.anonymous_student_id) success = problem_list_dict['success'] if 'error' in problem_list_dict: error_text = problem_list_dict['error'] problem_list = problem_list_dict['problem_list'] except GradingServiceError: # This is a student_facing_error error_text = EXTERNAL_GRADER_NO_CONTACT_ERROR log.error(error_text) success = False # catch error if if the json loads fails except ValueError: # This is a student_facing_error error_text = "Could not get list of problems to peer grade. Please notify course staff." log.error(error_text) success = False except Exception: log.exception("Could not contact peer grading service.") success = False good_problem_list = [] for problem in problem_list: problem_location = problem['location'] try: descriptor = self._find_corresponding_module_for_location(problem_location) except (NoPathToItem, ItemNotFoundError): continue if descriptor: problem['due'] = descriptor.due grace_period = descriptor.graceperiod try: problem_timeinfo = TimeInfo(problem['due'], grace_period) except Exception: log.error("Malformed due date or grace period string for location {0}".format(problem_location)) raise if self._closed(problem_timeinfo): problem['closed'] = True else: problem['closed'] = False else: # if we can't find the due date, assume that it doesn't have one problem['due'] = None problem['closed'] = False good_problem_list.append(problem) ajax_url = self.ajax_url html = self.system.render_template('peer_grading/peer_grading.html', { 'ajax_url': ajax_url, 'success': success, 'problem_list': good_problem_list, 'error_text': error_text, # Checked above 'staff_access': False, 'use_single_location': self.use_for_single_location_local, }) return html def peer_grading_problem(self, data=None): ''' Show individual problem interface ''' if data is None or data.get('location') is None: if not self.use_for_single_location_local: # This is an error case, because it must be set to use a single location to be called without get parameters # This is a dev_facing_error log.error( "Peer grading problem in peer_grading_module called with no get parameters, but use_for_single_location is False.") return {'html': "", 'success': False} problem_location = self.link_to_location elif data.get('location') is not None: problem_location = self.course_id.make_usage_key_from_deprecated_string(data.get('location')) self._find_corresponding_module_for_location(problem_location) ajax_url = self.ajax_url html = self.system.render_template('peer_grading/peer_grading_problem.html', { 'view_html': '', 'problem_location': problem_location, 'course_id': self.course_id, 'ajax_url': ajax_url, # Checked above 'staff_access': False, 'use_single_location': self.use_for_single_location_local, }) return {'html': html, 'success': True} def get_instance_state(self): """ Returns the current instance state. The module can be recreated from the instance state. Input: None Output: A dictionary containing the instance state. """ state = { 'student_data_for_location': self.student_data_for_location, } return json.dumps(state) def _check_feedback_length(self, data): feedback = data.get("feedback") if feedback and len(feedback) > MAX_ALLOWED_FEEDBACK_LENGTH: return False, "Feedback is too long, Max length is {0} characters.".format( MAX_ALLOWED_FEEDBACK_LENGTH ) else: return True, "" def validate(self): """ Message for either error or warning validation message/s. Returns message and type. Priority given to error type message. """ return self.descriptor.validate() class PeerGradingDescriptor(PeerGradingFields, RawDescriptor): """ Module for adding peer grading questions """ mako_template = "widgets/raw-edit.html" module_class = PeerGradingModule filename_extension = "xml" has_score = True always_recalculate_grades = True #Specify whether or not to pass in open ended interface needs_open_ended_interface = True metadata_translations = { 'is_graded': 'graded', 'attempts': 'max_attempts', 'due_data': 'due' } @property def non_editable_metadata_fields(self): non_editable_fields = super(PeerGradingDescriptor, self).non_editable_metadata_fields non_editable_fields.extend([PeerGradingFields.due, PeerGradingFields.graceperiod]) return non_editable_fields def get_required_module_descriptors(self): """ Returns a list of XModuleDescriptor instances upon which this module depends, but are not children of this module. """ # If use_for_single_location is True, this is linked to an open ended problem. if self.use_for_single_location: # Try to load the linked module. # If we can't load it, return empty list to avoid exceptions on progress page. try: linked_module = self.system.load_item(self.link_to_location) return [linked_module] except (NoPathToItem, ItemNotFoundError): error_message = ("Cannot find the combined open ended module " "at location {0} being linked to from peer " "grading module {1}").format(self.link_to_location, self.location) log.error(error_message) return [] else: return [] # Proxy to PeerGradingModule so that external callers don't have to know if they're working # with a module or a descriptor closed = module_attr('closed') get_instance_state = module_attr('get_instance_state') get_next_submission = module_attr('get_next_submission') graded = module_attr('graded') is_student_calibrated = module_attr('is_student_calibrated') peer_grading = module_attr('peer_grading') peer_grading_closed = module_attr('peer_grading_closed') peer_grading_problem = module_attr('peer_grading_problem') peer_gs = module_attr('peer_gs') query_data_for_location = module_attr('query_data_for_location') save_calibration_essay = module_attr('save_calibration_essay') save_grade = module_attr('save_grade') show_calibration_essay = module_attr('show_calibration_essay') use_for_single_location_local = module_attr('use_for_single_location_local') _find_corresponding_module_for_location = module_attr('_find_corresponding_module_for_location') def validate(self): """ Validates the state of this instance. This is the override of the general XBlock method, and it will also ask its superclass to validate. """ validation = super(PeerGradingDescriptor, self).validate() validation = StudioValidation.copy(validation) i18n_service = self.runtime.service(self, "i18n") validation.summary = StudioValidationMessage( StudioValidationMessage.ERROR, i18n_service.ugettext( "ORA1 is no longer supported. To use this assessment, " "replace this ORA1 component with an ORA2 component." ) ) return validation
agpl-3.0
jetskijoe/headphones
lib/beetsplug/lyrics.py
1
26402
# -*- coding: utf-8 -*- # This file is part of beets. # Copyright 2016, Adrian Sampson. # # Permission is hereby granted, free of charge, to any person obtaining # a copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, # distribute, sublicense, and/or sell copies of the Software, and to # permit persons to whom the Software is furnished to do so, subject to # the following conditions: # # The above copyright notice and this permission notice shall be # included in all copies or substantial portions of the Software. """Fetches, embeds, and displays lyrics. """ from __future__ import absolute_import, division, print_function import difflib import itertools import json import re import requests import unicodedata import warnings from six.moves import urllib import six try: from bs4 import SoupStrainer, BeautifulSoup HAS_BEAUTIFUL_SOUP = True except ImportError: HAS_BEAUTIFUL_SOUP = False try: import langdetect HAS_LANGDETECT = True except ImportError: HAS_LANGDETECT = False try: # PY3: HTMLParseError was removed in 3.5 as strict mode # was deprecated in 3.3. # https://docs.python.org/3.3/library/html.parser.html from six.moves.html_parser import HTMLParseError except ImportError: class HTMLParseError(Exception): pass from beets import plugins from beets import ui DIV_RE = re.compile(r'<(/?)div>?', re.I) COMMENT_RE = re.compile(r'<!--.*-->', re.S) TAG_RE = re.compile(r'<[^>]*>') BREAK_RE = re.compile(r'\n?\s*<br([\s|/][^>]*)*>\s*\n?', re.I) URL_CHARACTERS = { u'\u2018': u"'", u'\u2019': u"'", u'\u201c': u'"', u'\u201d': u'"', u'\u2010': u'-', u'\u2011': u'-', u'\u2012': u'-', u'\u2013': u'-', u'\u2014': u'-', u'\u2015': u'-', u'\u2016': u'-', u'\u2026': u'...', } # Utilities. def unescape(text): """Resolve &#xxx; HTML entities (and some others).""" if isinstance(text, bytes): text = text.decode('utf8', 'ignore') out = text.replace(u'&nbsp;', u' ') def replchar(m): num = m.group(1) return six.unichr(int(num)) out = re.sub(u"&#(\d+);", replchar, out) return out def extract_text_between(html, start_marker, end_marker): try: _, html = html.split(start_marker, 1) html, _ = html.split(end_marker, 1) except ValueError: return u'' return html def extract_text_in(html, starttag): """Extract the text from a <DIV> tag in the HTML starting with ``starttag``. Returns None if parsing fails. """ # Strip off the leading text before opening tag. try: _, html = html.split(starttag, 1) except ValueError: return # Walk through balanced DIV tags. level = 0 parts = [] pos = 0 for match in DIV_RE.finditer(html): if match.group(1): # Closing tag. level -= 1 if level == 0: pos = match.end() else: # Opening tag. if level == 0: parts.append(html[pos:match.start()]) level += 1 if level == -1: parts.append(html[pos:match.start()]) break else: print(u'no closing tag found!') return return u''.join(parts) def search_pairs(item): """Yield a pairs of artists and titles to search for. The first item in the pair is the name of the artist, the second item is a list of song names. In addition to the artist and title obtained from the `item` the method tries to strip extra information like paranthesized suffixes and featured artists from the strings and add them as candidates. The method also tries to split multiple titles separated with `/`. """ title, artist = item.title, item.artist titles = [title] artists = [artist] # Remove any featuring artists from the artists name pattern = r"(.*?) {0}".format(plugins.feat_tokens()) match = re.search(pattern, artist, re.IGNORECASE) if match: artists.append(match.group(1)) # Remove a parenthesized suffix from a title string. Common # examples include (live), (remix), and (acoustic). pattern = r"(.+?)\s+[(].*[)]$" match = re.search(pattern, title, re.IGNORECASE) if match: titles.append(match.group(1)) # Remove any featuring artists from the title pattern = r"(.*?) {0}".format(plugins.feat_tokens(for_artist=False)) for title in titles[:]: match = re.search(pattern, title, re.IGNORECASE) if match: titles.append(match.group(1)) # Check for a dual song (e.g. Pink Floyd - Speak to Me / Breathe) # and each of them. multi_titles = [] for title in titles: multi_titles.append([title]) if '/' in title: multi_titles.append([x.strip() for x in title.split('/')]) return itertools.product(artists, multi_titles) class Backend(object): def __init__(self, config, log): self._log = log @staticmethod def _encode(s): """Encode the string for inclusion in a URL""" if isinstance(s, six.text_type): for char, repl in URL_CHARACTERS.items(): s = s.replace(char, repl) s = s.encode('utf8', 'ignore') return urllib.parse.quote(s) def build_url(self, artist, title): return self.URL_PATTERN % (self._encode(artist.title()), self._encode(title.title())) def fetch_url(self, url): """Retrieve the content at a given URL, or return None if the source is unreachable. """ try: # Disable the InsecureRequestWarning that comes from using # `verify=false`. # https://github.com/kennethreitz/requests/issues/2214 # We're not overly worried about the NSA MITMing our lyrics scraper with warnings.catch_warnings(): warnings.simplefilter('ignore') r = requests.get(url, verify=False) except requests.RequestException as exc: self._log.debug(u'lyrics request failed: {0}', exc) return if r.status_code == requests.codes.ok: return r.text else: self._log.debug(u'failed to fetch: {0} ({1})', url, r.status_code) def fetch(self, artist, title): raise NotImplementedError() class SymbolsReplaced(Backend): REPLACEMENTS = { r'\s+': '_', '<': 'Less_Than', '>': 'Greater_Than', '#': 'Number_', r'[\[\{]': '(', r'[\[\{]': ')' } @classmethod def _encode(cls, s): for old, new in cls.REPLACEMENTS.items(): s = re.sub(old, new, s) return super(SymbolsReplaced, cls)._encode(s) class MusiXmatch(SymbolsReplaced): REPLACEMENTS = dict(SymbolsReplaced.REPLACEMENTS, **{ r'\s+': '-' }) URL_PATTERN = 'https://www.musixmatch.com/lyrics/%s/%s' def fetch(self, artist, title): url = self.build_url(artist, title) html = self.fetch_url(url) if not html: return lyrics = extract_text_between(html, '"body":', '"language":') return lyrics.strip(',"').replace('\\n', '\n') class Genius(Backend): """Fetch lyrics from Genius via genius-api.""" def __init__(self, config, log): super(Genius, self).__init__(config, log) self.api_key = config['genius_api_key'].as_str() self.headers = {'Authorization': "Bearer %s" % self.api_key} def search_genius(self, artist, title): query = u"%s %s" % (artist, title) url = u'https://api.genius.com/search?q=%s' \ % (urllib.parse.quote(query.encode('utf8'))) self._log.debug(u'genius: requesting search {}', url) try: req = requests.get( url, headers=self.headers, allow_redirects=True ) req.raise_for_status() except requests.RequestException as exc: self._log.debug(u'genius: request error: {}', exc) return None try: return req.json() except ValueError: self._log.debug(u'genius: invalid response: {}', req.text) return None def get_lyrics(self, link): url = u'http://genius-api.com/api/lyricsInfo' self._log.debug(u'genius: requesting lyrics for link {}', link) try: req = requests.post( url, data={'link': link}, headers=self.headers, allow_redirects=True ) req.raise_for_status() except requests.RequestException as exc: self._log.debug(u'genius: request error: {}', exc) return None try: return req.json() except ValueError: self._log.debug(u'genius: invalid response: {}', req.text) return None def build_lyric_string(self, lyrics): if 'lyrics' not in lyrics: return sections = lyrics['lyrics']['sections'] lyrics_list = [] for section in sections: lyrics_list.append(section['name']) lyrics_list.append('\n') for verse in section['verses']: if 'content' in verse: lyrics_list.append(verse['content']) return ''.join(lyrics_list) def fetch(self, artist, title): search_data = self.search_genius(artist, title) if not search_data: return if not search_data['meta']['status'] == 200: return else: records = search_data['response']['hits'] if not records: return record_url = records[0]['result']['url'] lyric_data = self.get_lyrics(record_url) if not lyric_data: return lyrics = self.build_lyric_string(lyric_data) return lyrics class LyricsWiki(SymbolsReplaced): """Fetch lyrics from LyricsWiki.""" URL_PATTERN = 'http://lyrics.wikia.com/%s:%s' def fetch(self, artist, title): url = self.build_url(artist, title) html = self.fetch_url(url) if not html: return # Get the HTML fragment inside the appropriate HTML element and then # extract the text from it. html_frag = extract_text_in(html, u"<div class='lyricbox'>") if html_frag: lyrics = _scrape_strip_cruft(html_frag, True) if lyrics and 'Unfortunately, we are not licensed' not in lyrics: return lyrics class LyricsCom(Backend): """Fetch lyrics from Lyrics.com.""" URL_PATTERN = 'http://www.lyrics.com/%s-lyrics-%s.html' NOT_FOUND = ( 'Sorry, we do not have the lyric', 'Submit Lyrics', ) @classmethod def _encode(cls, s): s = re.sub(r'[^\w\s-]', '', s) s = re.sub(r'\s+', '-', s) return super(LyricsCom, cls)._encode(s).lower() def fetch(self, artist, title): url = self.build_url(artist, title) html = self.fetch_url(url) if not html: return lyrics = extract_text_between(html, '<div id="lyrics" class="SCREENO' 'NLY" itemprop="description">', '</div>') if not lyrics: return for not_found_str in self.NOT_FOUND: if not_found_str in lyrics: return parts = lyrics.split('\n---\nLyrics powered by', 1) if parts: return parts[0] def remove_credits(text): """Remove first/last line of text if it contains the word 'lyrics' eg 'Lyrics by songsdatabase.com' """ textlines = text.split('\n') credits = None for i in (0, -1): if textlines and 'lyrics' in textlines[i].lower(): credits = textlines.pop(i) if credits: text = '\n'.join(textlines) return text def _scrape_strip_cruft(html, plain_text_out=False): """Clean up HTML """ html = unescape(html) html = html.replace('\r', '\n') # Normalize EOL. html = re.sub(r' +', ' ', html) # Whitespaces collapse. html = BREAK_RE.sub('\n', html) # <br> eats up surrounding '\n'. html = re.sub(r'<(script).*?</\1>(?s)', '', html) # Strip script tags. if plain_text_out: # Strip remaining HTML tags html = COMMENT_RE.sub('', html) html = TAG_RE.sub('', html) html = '\n'.join([x.strip() for x in html.strip().split('\n')]) html = re.sub(r'\n{3,}', r'\n\n', html) return html def _scrape_merge_paragraphs(html): html = re.sub(r'</p>\s*<p(\s*[^>]*)>', '\n', html) return re.sub(r'<div .*>\s*</div>', '\n', html) def scrape_lyrics_from_html(html): """Scrape lyrics from a URL. If no lyrics can be found, return None instead. """ if not HAS_BEAUTIFUL_SOUP: return None if not html: return None def is_text_notcode(text): length = len(text) return (length > 20 and text.count(' ') > length / 25 and (text.find('{') == -1 or text.find(';') == -1)) html = _scrape_strip_cruft(html) html = _scrape_merge_paragraphs(html) # extract all long text blocks that are not code try: soup = BeautifulSoup(html, "html.parser", parse_only=SoupStrainer(text=is_text_notcode)) except HTMLParseError: return None # Get the longest text element (if any). strings = sorted(soup.stripped_strings, key=len, reverse=True) if strings: return strings[0] else: return None class Google(Backend): """Fetch lyrics from Google search results.""" def __init__(self, config, log): super(Google, self).__init__(config, log) self.api_key = config['google_API_key'].as_str() self.engine_id = config['google_engine_ID'].as_str() def is_lyrics(self, text, artist=None): """Determine whether the text seems to be valid lyrics. """ if not text: return False bad_triggers_occ = [] nb_lines = text.count('\n') if nb_lines <= 1: self._log.debug(u"Ignoring too short lyrics '{0}'", text) return False elif nb_lines < 5: bad_triggers_occ.append('too_short') else: # Lyrics look legit, remove credits to avoid being penalized # further down text = remove_credits(text) bad_triggers = ['lyrics', 'copyright', 'property', 'links'] if artist: bad_triggers_occ += [artist] for item in bad_triggers: bad_triggers_occ += [item] * len(re.findall(r'\W%s\W' % item, text, re.I)) if bad_triggers_occ: self._log.debug(u'Bad triggers detected: {0}', bad_triggers_occ) return len(bad_triggers_occ) < 2 def slugify(self, text): """Normalize a string and remove non-alphanumeric characters. """ text = re.sub(r"[-'_\s]", '_', text) text = re.sub(r"_+", '_', text).strip('_') pat = "([^,\(]*)\((.*?)\)" # Remove content within parentheses text = re.sub(pat, '\g<1>', text).strip() try: text = unicodedata.normalize('NFKD', text).encode('ascii', 'ignore') text = six.text_type(re.sub('[-\s]+', ' ', text.decode('utf-8'))) except UnicodeDecodeError: self._log.exception(u"Failing to normalize '{0}'", text) return text BY_TRANS = ['by', 'par', 'de', 'von'] LYRICS_TRANS = ['lyrics', 'paroles', 'letras', 'liedtexte'] def is_page_candidate(self, url_link, url_title, title, artist): """Return True if the URL title makes it a good candidate to be a page that contains lyrics of title by artist. """ title = self.slugify(title.lower()) artist = self.slugify(artist.lower()) sitename = re.search(u"//([^/]+)/.*", self.slugify(url_link.lower())).group(1) url_title = self.slugify(url_title.lower()) # Check if URL title contains song title (exact match) if url_title.find(title) != -1: return True # or try extracting song title from URL title and check if # they are close enough tokens = [by + '_' + artist for by in self.BY_TRANS] + \ [artist, sitename, sitename.replace('www.', '')] + \ self.LYRICS_TRANS tokens = [re.escape(t) for t in tokens] song_title = re.sub(u'(%s)' % u'|'.join(tokens), u'', url_title) song_title = song_title.strip('_|') typo_ratio = .9 ratio = difflib.SequenceMatcher(None, song_title, title).ratio() return ratio >= typo_ratio def fetch(self, artist, title): query = u"%s %s" % (artist, title) url = u'https://www.googleapis.com/customsearch/v1?key=%s&cx=%s&q=%s' \ % (self.api_key, self.engine_id, urllib.parse.quote(query.encode('utf8'))) data = urllib.request.urlopen(url) data = json.load(data) if 'error' in data: reason = data['error']['errors'][0]['reason'] self._log.debug(u'google lyrics backend error: {0}', reason) return if 'items' in data.keys(): for item in data['items']: url_link = item['link'] url_title = item.get('title', u'') if not self.is_page_candidate(url_link, url_title, title, artist): continue html = self.fetch_url(url_link) lyrics = scrape_lyrics_from_html(html) if not lyrics: continue if self.is_lyrics(lyrics, artist): self._log.debug(u'got lyrics from {0}', item['displayLink']) return lyrics class LyricsPlugin(plugins.BeetsPlugin): SOURCES = ['google', 'lyricwiki', 'lyrics.com', 'musixmatch'] SOURCE_BACKENDS = { 'google': Google, 'lyricwiki': LyricsWiki, 'lyrics.com': LyricsCom, 'musixmatch': MusiXmatch, 'genius': Genius, } def __init__(self): super(LyricsPlugin, self).__init__() self.import_stages = [self.imported] self.config.add({ 'auto': True, 'bing_client_secret': None, 'bing_lang_from': [], 'bing_lang_to': None, 'google_API_key': None, 'google_engine_ID': u'009217259823014548361:lndtuqkycfu', 'genius_api_key': "Ryq93pUGm8bM6eUWwD_M3NOFFDAtp2yEE7W" "76V-uFL5jks5dNvcGCdarqFjDhP9c", 'fallback': None, 'force': False, 'sources': self.SOURCES, }) self.config['bing_client_secret'].redact = True self.config['google_API_key'].redact = True self.config['google_engine_ID'].redact = True self.config['genius_api_key'].redact = True available_sources = list(self.SOURCES) sources = plugins.sanitize_choices( self.config['sources'].as_str_seq(), available_sources) if 'google' in sources: if not self.config['google_API_key'].get(): # We log a *debug* message here because the default # configuration includes `google`. This way, the source # is silent by default but can be enabled just by # setting an API key. self._log.debug(u'Disabling google source: ' u'no API key configured.') sources.remove('google') elif not HAS_BEAUTIFUL_SOUP: self._log.warning(u'To use the google lyrics source, you must ' u'install the beautifulsoup4 module. See ' u'the documentation for further details.') sources.remove('google') self.config['bing_lang_from'] = [ x.lower() for x in self.config['bing_lang_from'].as_str_seq()] self.bing_auth_token = None if not HAS_LANGDETECT and self.config['bing_client_secret'].get(): self._log.warning(u'To use bing translations, you need to ' u'install the langdetect module. See the ' u'documentation for further details.') self.backends = [self.SOURCE_BACKENDS[source](self.config, self._log) for source in sources] def get_bing_access_token(self): params = { 'client_id': 'beets', 'client_secret': self.config['bing_client_secret'], 'scope': 'http://api.microsofttranslator.com', 'grant_type': 'client_credentials', } oauth_url = 'https://datamarket.accesscontrol.windows.net/v2/OAuth2-13' oauth_token = json.loads(requests.post( oauth_url, data=urllib.parse.urlencode(params)).content) if 'access_token' in oauth_token: return "Bearer " + oauth_token['access_token'] else: self._log.warning(u'Could not get Bing Translate API access token.' u' Check your "bing_client_secret" password') def commands(self): cmd = ui.Subcommand('lyrics', help='fetch song lyrics') cmd.parser.add_option( u'-p', u'--print', dest='printlyr', action='store_true', default=False, help=u'print lyrics to console', ) cmd.parser.add_option( u'-f', u'--force', dest='force_refetch', action='store_true', default=False, help=u'always re-download lyrics', ) def func(lib, opts, args): # The "write to files" option corresponds to the # import_write config value. write = ui.should_write() for item in lib.items(ui.decargs(args)): self.fetch_item_lyrics( lib, item, write, opts.force_refetch or self.config['force'], ) if opts.printlyr and item.lyrics: ui.print_(item.lyrics) cmd.func = func return [cmd] def imported(self, session, task): """Import hook for fetching lyrics automatically. """ if self.config['auto']: for item in task.imported_items(): self.fetch_item_lyrics(session.lib, item, False, self.config['force']) def fetch_item_lyrics(self, lib, item, write, force): """Fetch and store lyrics for a single item. If ``write``, then the lyrics will also be written to the file itself.""" # Skip if the item already has lyrics. if not force and item.lyrics: self._log.info(u'lyrics already present: {0}', item) return lyrics = None for artist, titles in search_pairs(item): lyrics = [self.get_lyrics(artist, title) for title in titles] if any(lyrics): break lyrics = u"\n\n---\n\n".join([l for l in lyrics if l]) if lyrics: self._log.info(u'fetched lyrics: {0}', item) if HAS_LANGDETECT and self.config['bing_client_secret'].get(): lang_from = langdetect.detect(lyrics) if self.config['bing_lang_to'].get() != lang_from and ( not self.config['bing_lang_from'] or ( lang_from in self.config[ 'bing_lang_from'].as_str_seq())): lyrics = self.append_translation( lyrics, self.config['bing_lang_to']) else: self._log.info(u'lyrics not found: {0}', item) fallback = self.config['fallback'].get() if fallback: lyrics = fallback else: return item.lyrics = lyrics if write: item.try_write() item.store() def get_lyrics(self, artist, title): """Fetch lyrics, trying each source in turn. Return a string or None if no lyrics were found. """ for backend in self.backends: lyrics = backend.fetch(artist, title) if lyrics: self._log.debug(u'got lyrics from backend: {0}', backend.__class__.__name__) return _scrape_strip_cruft(lyrics, True) def append_translation(self, text, to_lang): import xml.etree.ElementTree as ET if not self.bing_auth_token: self.bing_auth_token = self.get_bing_access_token() if self.bing_auth_token: # Extract unique lines to limit API request size per song text_lines = set(text.split('\n')) url = ('http://api.microsofttranslator.com/v2/Http.svc/' 'Translate?text=%s&to=%s' % ('|'.join(text_lines), to_lang)) r = requests.get(url, headers={"Authorization ": self.bing_auth_token}) if r.status_code != 200: self._log.debug('translation API error {}: {}', r.status_code, r.text) if 'token has expired' in r.text: self.bing_auth_token = None return self.append_translation(text, to_lang) return text lines_translated = ET.fromstring(r.text.encode('utf8')).text # Use a translation mapping dict to build resulting lyrics translations = dict(zip(text_lines, lines_translated.split('|'))) result = '' for line in text.split('\n'): result += '%s / %s\n' % (line, translations[line]) return result
gpl-3.0
sonnyhu/scikit-learn
examples/ensemble/plot_partial_dependence.py
54
4704
""" ======================== Partial Dependence Plots ======================== Partial dependence plots show the dependence between the target function [2]_ and a set of 'target' features, marginalizing over the values of all other features (the complement features). Due to the limits of human perception the size of the target feature set must be small (usually, one or two) thus the target features are usually chosen among the most important features (see :attr:`~sklearn.ensemble.GradientBoostingRegressor.feature_importances_`). This example shows how to obtain partial dependence plots from a :class:`~sklearn.ensemble.GradientBoostingRegressor` trained on the California housing dataset. The example is taken from [1]_. The plot shows four one-way and one two-way partial dependence plots. The target variables for the one-way PDP are: median income (`MedInc`), avg. occupants per household (`AvgOccup`), median house age (`HouseAge`), and avg. rooms per household (`AveRooms`). We can clearly see that the median house price shows a linear relationship with the median income (top left) and that the house price drops when the avg. occupants per household increases (top middle). The top right plot shows that the house age in a district does not have a strong influence on the (median) house price; so does the average rooms per household. The tick marks on the x-axis represent the deciles of the feature values in the training data. Partial dependence plots with two target features enable us to visualize interactions among them. The two-way partial dependence plot shows the dependence of median house price on joint values of house age and avg. occupants per household. We can clearly see an interaction between the two features: For an avg. occupancy greater than two, the house price is nearly independent of the house age, whereas for values less than two there is a strong dependence on age. .. [1] T. Hastie, R. Tibshirani and J. Friedman, "Elements of Statistical Learning Ed. 2", Springer, 2009. .. [2] For classification you can think of it as the regression score before the link function. """ from __future__ import print_function print(__doc__) import numpy as np import matplotlib.pyplot as plt from mpl_toolkits.mplot3d import Axes3D from sklearn.model_selection import train_test_split from sklearn.ensemble import GradientBoostingRegressor from sklearn.ensemble.partial_dependence import plot_partial_dependence from sklearn.ensemble.partial_dependence import partial_dependence from sklearn.datasets.california_housing import fetch_california_housing def main(): cal_housing = fetch_california_housing() # split 80/20 train-test X_train, X_test, y_train, y_test = train_test_split(cal_housing.data, cal_housing.target, test_size=0.2, random_state=1) names = cal_housing.feature_names print("Training GBRT...") clf = GradientBoostingRegressor(n_estimators=100, max_depth=4, learning_rate=0.1, loss='huber', random_state=1) clf.fit(X_train, y_train) print(" done.") print('Convenience plot with ``partial_dependence_plots``') features = [0, 5, 1, 2, (5, 1)] fig, axs = plot_partial_dependence(clf, X_train, features, feature_names=names, n_jobs=3, grid_resolution=50) fig.suptitle('Partial dependence of house value on nonlocation features\n' 'for the California housing dataset') plt.subplots_adjust(top=0.9) # tight_layout causes overlap with suptitle print('Custom 3d plot via ``partial_dependence``') fig = plt.figure() target_feature = (1, 5) pdp, axes = partial_dependence(clf, target_feature, X=X_train, grid_resolution=50) XX, YY = np.meshgrid(axes[0], axes[1]) Z = pdp[0].reshape(list(map(np.size, axes))).T ax = Axes3D(fig) surf = ax.plot_surface(XX, YY, Z, rstride=1, cstride=1, cmap=plt.cm.BuPu) ax.set_xlabel(names[target_feature[0]]) ax.set_ylabel(names[target_feature[1]]) ax.set_zlabel('Partial dependence') # pretty init view ax.view_init(elev=22, azim=122) plt.colorbar(surf) plt.suptitle('Partial dependence of house value on median age and ' 'average occupancy') plt.subplots_adjust(top=0.9) plt.show() # Needed on Windows because plot_partial_dependence uses multiprocessing if __name__ == '__main__': main()
bsd-3-clause
davidjb/js.jquery_scrolltofixed
setup.py
1
1362
from setuptools import setup, find_packages import os # The version of the wrapped library is the starting point for the # version number of the python package. # In bugfix releases of the python package, add a '-' suffix and an # incrementing integer. # For example, a packaging bugfix release version 1.4.4 of the # js.jquery package would be version 1.4.4-1 . version = '0.1-2.dev0' def read(*rnames): return open(os.path.join(os.path.dirname(__file__), *rnames)).read() long_description = ( read('README.rst') + '\n' + read('js', 'jquery_scrolltofixed', 'test_scrolltofixed.txt') + '\n' + read('CHANGES.rst')) setup( name='js.jquery_scrolltofixed', version=version, description="Fanstatic packaging of ScrollToFixed (jQuery plugin)", long_description=long_description, classifiers=[], keywords='', author='Fanstatic Developers', author_email='fanstatic@googlegroups.com', license='BSD', packages=find_packages(),namespace_packages=['js'], include_package_data=True, zip_safe=False, setup_requires=[ 'setuptools-git', ], install_requires=[ 'fanstatic', 'js.jquery', 'setuptools', ], entry_points={ 'fanstatic.libraries': [ 'scrolltofixed = js.jquery_scrolltofixed:library', ], }, )
bsd-3-clause
neteler/QGIS
python/plugins/db_manager/db_plugins/postgis/data_model.py
5
3419
# -*- coding: utf-8 -*- """ /*************************************************************************** Name : DB Manager Description : Database manager plugin for QGIS Date : May 23, 2011 copyright : (C) 2011 by Giuseppe Sucameli email : brush.tyler@gmail.com ***************************************************************************/ /*************************************************************************** * * * This program is free software; you can redistribute it and/or modify * * it under the terms of the GNU General Public License as published by * * the Free Software Foundation; either version 2 of the License, or * * (at your option) any later version. * * * ***************************************************************************/ """ from PyQt4.QtCore import SIGNAL from ..data_model import TableDataModel, SqlResultModel class PGTableDataModel(TableDataModel): def __init__(self, table, parent=None): self.cursor = None TableDataModel.__init__(self, table, parent) if self.table.rowCount is None: self.table.refreshRowCount() if self.table.rowCount is None: return self.connect(self.table, SIGNAL("aboutToChange"), self._deleteCursor) self._createCursor() def _createCursor(self): fields_txt = u", ".join(self.fields) table_txt = self.db.quoteId( (self.table.schemaName(), self.table.name) ) self.cursor = self.db._get_cursor() sql = u"SELECT %s FROM %s" % (fields_txt, table_txt) self.db._execute(self.cursor, sql) def _sanitizeTableField(self, field): # get fields, ignore geometry columns if field.dataType.lower() == "geometry": return u"CASE WHEN %(fld)s IS NULL THEN NULL ELSE GeometryType(%(fld)s) END AS %(fld)s" % {'fld': self.db.quoteId(field.name)} elif field.dataType.lower() == "raster": return u"CASE WHEN %(fld)s IS NULL THEN NULL ELSE 'RASTER' END AS %(fld)s" % {'fld': self.db.quoteId(field.name)} return u"%s::text" % self.db.quoteId(field.name) def _deleteCursor(self): self.db._close_cursor(self.cursor) self.cursor = None def __del__(self): self.disconnect(self.table, SIGNAL("aboutToChange"), self._deleteCursor) self._deleteCursor() pass # print "PGTableModel.__del__" def fetchMoreData(self, row_start): if not self.cursor: self._createCursor() try: self.cursor.scroll(row_start, mode='absolute') except self.db.error_types(): self._deleteCursor() return self.fetchMoreData(row_start) self.resdata = self.cursor.fetchmany(self.fetchedCount) self.fetchedFrom = row_start class PGSqlResultModel(SqlResultModel): pass
gpl-2.0
OlafLee/LV_groundhog
groundhog/utils/utils.py
3
7006
""" Utility functions TODO: write more documentation """ __docformat__ = 'restructedtext en' __authors__ = ("Razvan Pascanu " "KyungHyun Cho " "Caglar Gulcehre ") __contact__ = "Razvan Pascanu <r.pascanu@gmail>" import numpy import random import string import copy as pycopy import theano import theano.tensor as TT def print_time(secs): if secs < 120.: return '%6.3f sec' % secs elif secs <= 60 * 60: return '%6.3f min' % (secs / 60.) else: return '%6.3f h ' % (secs / 3600.) def print_mem(context=None): if theano.sandbox.cuda.cuda_enabled: rvals = theano.sandbox.cuda.cuda_ndarray.cuda_ndarray.mem_info() # Avaliable memory in Mb available = float(rvals[0]) / 1024. / 1024. # Total memory in Mb total = float(rvals[1]) / 1024. / 1024. if context == None: print ('Used %.3f Mb Free %.3f Mb, total %.3f Mb' % (total - available, available, total)) else: info = str(context) print (('GPU status : Used %.3f Mb Free %.3f Mb,' 'total %.3f Mb [context %s]') % (total - available, available, total, info)) def const(value): return TT.constant(numpy.asarray(value, dtype=theano.config.floatX)) def as_floatX(variable): """ This code is taken from pylearn2: Casts a given variable into dtype config.floatX numpy ndarrays will remain numpy ndarrays python floats will become 0-D ndarrays all other types will be treated as theano tensors """ if isinstance(variable, float): return numpy.cast[theano.config.floatX](variable) if isinstance(variable, numpy.ndarray): return numpy.cast[theano.config.floatX](variable) return theano.tensor.cast(variable, theano.config.floatX) def copy(x): new_x = pycopy.copy(x) new_x.params = [x for x in new_x.params] new_x.params_grad_scale = [x for x in new_x.params_grad_scale ] new_x.noise_params = [x for x in new_x.noise_params ] new_x.noise_params_shape_fn = [x for x in new_x.noise_params_shape_fn] new_x.updates = [x for x in new_x.updates ] new_x.additional_gradients = [x for x in new_x.additional_gradients ] new_x.inputs = [x for x in new_x.inputs ] new_x.schedules = [x for x in new_x.schedules ] new_x.properties = [x for x in new_x.properties ] return new_x def softmax(x): if x.ndim == 2: e = TT.exp(x) return e / TT.sum(e, axis=1).dimshuffle(0, 'x') else: e = TT.exp(x) return e/ TT.sum(e) def sample_zeros(sizeX, sizeY, sparsity, scale, rng): return numpy.zeros((sizeX, sizeY), dtype=theano.config.floatX) def sample_weights(sizeX, sizeY, sparsity, scale, rng): """ Initialization that fixes the largest singular value. """ sizeX = int(sizeX) sizeY = int(sizeY) sparsity = numpy.minimum(sizeY, sparsity) values = numpy.zeros((sizeX, sizeY), dtype=theano.config.floatX) for dx in xrange(sizeX): perm = rng.permutation(sizeY) new_vals = rng.uniform(low=-scale, high=scale, size=(sparsity,)) vals_norm = numpy.sqrt((new_vals**2).sum()) new_vals = scale*new_vals/vals_norm values[dx, perm[:sparsity]] = new_vals _,v,_ = numpy.linalg.svd(values) values = scale * values/v[0] return values.astype(theano.config.floatX) def sample_weights_classic(sizeX, sizeY, sparsity, scale, rng): sizeX = int(sizeX) sizeY = int(sizeY) if sparsity < 0: sparsity = sizeY else: sparsity = numpy.minimum(sizeY, sparsity) sparsity = numpy.minimum(sizeY, sparsity) values = numpy.zeros((sizeX, sizeY), dtype=theano.config.floatX) for dx in xrange(sizeX): perm = rng.permutation(sizeY) new_vals = rng.normal(loc=0, scale=scale, size=(sparsity,)) values[dx, perm[:sparsity]] = new_vals return values.astype(theano.config.floatX) def sample_weights_orth(sizeX, sizeY, sparsity, scale, rng): sizeX = int(sizeX) sizeY = int(sizeY) assert sizeX == sizeY, 'for orthogonal init, sizeX == sizeY' if sparsity < 0: sparsity = sizeY else: sparsity = numpy.minimum(sizeY, sparsity) values = numpy.zeros((sizeX, sizeY), dtype=theano.config.floatX) for dx in xrange(sizeX): perm = rng.permutation(sizeY) new_vals = rng.normal(loc=0, scale=scale, size=(sparsity,)) values[dx, perm[:sparsity]] = new_vals u,s,v = numpy.linalg.svd(values) values = u * scale return values.astype(theano.config.floatX) def init_bias(size, scale, rng): return numpy.ones((size,), dtype=theano.config.floatX)*scale def id_generator(size=5, chars=string.ascii_uppercase + string.digits): return ''.join(random.choice(chars) for i in xrange(size)) def constant_shape(shape): return lambda *args, **kwargs : shape def binVec2Int(binVec): add = lambda x,y: x+y return reduce(add, [int(x) * 2 ** y for x, y in zip( list(binVec),range(len(binVec) - 1, -1, -1))]) def Int2binVec(val, nbits=10): strVal = '{0:b}'.format(val) value = numpy.zeros((nbits,), dtype=theano.config.floatX) if theano.config.floatX == 'float32': value[:len(strVal)] = [numpy.float32(x) for x in strVal[::-1]] else: value[:len(strVal)] = [numpy.float64(x) for x in strVal[::-1]] return value def dot(inp, matrix): """ Decide the right type of dot product depending on the input arguments """ if 'int' in inp.dtype and inp.ndim==2: return matrix[inp.flatten()] elif 'int' in inp.dtype: return matrix[inp] elif 'float' in inp.dtype and inp.ndim == 3: shape0 = inp.shape[0] shape1 = inp.shape[1] shape2 = inp.shape[2] return TT.dot(inp.reshape((shape0*shape1, shape2)), matrix) else: return TT.dot(inp, matrix) def dbg_hook(hook, x): if not isinstance(x, TT.TensorVariable): x.out = theano.printing.Print(global_fn=hook)(x.out) return x else: return theano.printing.Print(global_fn=hook)(x) def replace_array(in_array, mapping): # NOT in-place out_array = numpy.zeros(in_array.shape, in_array.dtype) for i in xrange(numpy.shape(in_array)[0]): # Assume array is 2-dimensional for j in xrange(numpy.shape(in_array)[1]): out_array[i,j] = mapping[in_array[i,j]] return out_array def name2pos(param_list): d = {} i = 0 for p in param_list: d[p.name] = i i += 1 return d def invert_dict(d): inv_d = {} for key in d: inv_d[d[key]] = key assert len(d) == len(inv_d) # Check for uniqueness return inv_d
bsd-3-clause
GoogleCloudPlatform/appengine-python-standard
src/google/appengine/api/memcache/memcache_stub_service_pb2.py
1
8397
#!/usr/bin/env python # # Copyright 2007 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # """Generated protocol buffer code.""" from google.protobuf import descriptor as _descriptor from google.protobuf import message as _message from google.protobuf import reflection as _reflection from google.protobuf import symbol_database as _symbol_database _sym_db = _symbol_database.Default() from google.appengine.api import api_base_pb2 as google_dot_appengine_dot_api_dot_api__base__pb2 DESCRIPTOR = _descriptor.FileDescriptor( name='google/appengine/api/memcache/memcache_stub_service.proto', package='google.appengine', syntax='proto2', serialized_options=b'\n!com.google.appengine.api.memcacheB\025MemcacheStubServicePb', create_key=_descriptor._internal_create_key, serialized_pb=b'\n9google/appengine/api/memcache/memcache_stub_service.proto\x12\x10google.appengine\x1a#google/appengine/api/api_base.proto\"+\n\x11SetMaxSizeRequest\x12\x16\n\x0emax_size_bytes\x18\x01 \x02(\x03\"1\n\x19GetLruChainLengthResponse\x12\x14\n\x0c\x63hain_length\x18\x01 \x02(\x03\"2\n\x0fSetClockRequest\x12\x1f\n\x17\x63lock_time_milliseconds\x18\x01 \x02(\x03\"+\n\x13\x41\x64vanceClockRequest\x12\x14\n\x0cmilliseconds\x18\x01 \x02(\x03\"7\n\x14\x41\x64vanceClockResponse\x12\x1f\n\x17\x63lock_time_milliseconds\x18\x01 \x02(\x03\x42:\n!com.google.appengine.api.memcacheB\x15MemcacheStubServicePb' , dependencies=[google_dot_appengine_dot_api_dot_api__base__pb2.DESCRIPTOR,]) _SETMAXSIZEREQUEST = _descriptor.Descriptor( name='SetMaxSizeRequest', full_name='google.appengine.SetMaxSizeRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='max_size_bytes', full_name='google.appengine.SetMaxSizeRequest.max_size_bytes', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=116, serialized_end=159, ) _GETLRUCHAINLENGTHRESPONSE = _descriptor.Descriptor( name='GetLruChainLengthResponse', full_name='google.appengine.GetLruChainLengthResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='chain_length', full_name='google.appengine.GetLruChainLengthResponse.chain_length', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=161, serialized_end=210, ) _SETCLOCKREQUEST = _descriptor.Descriptor( name='SetClockRequest', full_name='google.appengine.SetClockRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='clock_time_milliseconds', full_name='google.appengine.SetClockRequest.clock_time_milliseconds', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=212, serialized_end=262, ) _ADVANCECLOCKREQUEST = _descriptor.Descriptor( name='AdvanceClockRequest', full_name='google.appengine.AdvanceClockRequest', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='milliseconds', full_name='google.appengine.AdvanceClockRequest.milliseconds', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=264, serialized_end=307, ) _ADVANCECLOCKRESPONSE = _descriptor.Descriptor( name='AdvanceClockResponse', full_name='google.appengine.AdvanceClockResponse', filename=None, file=DESCRIPTOR, containing_type=None, create_key=_descriptor._internal_create_key, fields=[ _descriptor.FieldDescriptor( name='clock_time_milliseconds', full_name='google.appengine.AdvanceClockResponse.clock_time_milliseconds', index=0, number=1, type=3, cpp_type=2, label=2, has_default_value=False, default_value=0, message_type=None, enum_type=None, containing_type=None, is_extension=False, extension_scope=None, serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key), ], extensions=[ ], nested_types=[], enum_types=[ ], serialized_options=None, is_extendable=False, syntax='proto2', extension_ranges=[], oneofs=[ ], serialized_start=309, serialized_end=364, ) DESCRIPTOR.message_types_by_name['SetMaxSizeRequest'] = _SETMAXSIZEREQUEST DESCRIPTOR.message_types_by_name['GetLruChainLengthResponse'] = _GETLRUCHAINLENGTHRESPONSE DESCRIPTOR.message_types_by_name['SetClockRequest'] = _SETCLOCKREQUEST DESCRIPTOR.message_types_by_name['AdvanceClockRequest'] = _ADVANCECLOCKREQUEST DESCRIPTOR.message_types_by_name['AdvanceClockResponse'] = _ADVANCECLOCKRESPONSE _sym_db.RegisterFileDescriptor(DESCRIPTOR) SetMaxSizeRequest = _reflection.GeneratedProtocolMessageType('SetMaxSizeRequest', (_message.Message,), { 'DESCRIPTOR' : _SETMAXSIZEREQUEST, '__module__' : 'google.appengine.api.memcache.memcache_stub_service_pb2' }) _sym_db.RegisterMessage(SetMaxSizeRequest) GetLruChainLengthResponse = _reflection.GeneratedProtocolMessageType('GetLruChainLengthResponse', (_message.Message,), { 'DESCRIPTOR' : _GETLRUCHAINLENGTHRESPONSE, '__module__' : 'google.appengine.api.memcache.memcache_stub_service_pb2' }) _sym_db.RegisterMessage(GetLruChainLengthResponse) SetClockRequest = _reflection.GeneratedProtocolMessageType('SetClockRequest', (_message.Message,), { 'DESCRIPTOR' : _SETCLOCKREQUEST, '__module__' : 'google.appengine.api.memcache.memcache_stub_service_pb2' }) _sym_db.RegisterMessage(SetClockRequest) AdvanceClockRequest = _reflection.GeneratedProtocolMessageType('AdvanceClockRequest', (_message.Message,), { 'DESCRIPTOR' : _ADVANCECLOCKREQUEST, '__module__' : 'google.appengine.api.memcache.memcache_stub_service_pb2' }) _sym_db.RegisterMessage(AdvanceClockRequest) AdvanceClockResponse = _reflection.GeneratedProtocolMessageType('AdvanceClockResponse', (_message.Message,), { 'DESCRIPTOR' : _ADVANCECLOCKRESPONSE, '__module__' : 'google.appengine.api.memcache.memcache_stub_service_pb2' }) _sym_db.RegisterMessage(AdvanceClockResponse) DESCRIPTOR._options = None
apache-2.0
liavkoren/djangoDev
tests/inspectdb/models.py
27
2512
# -*- encoding: utf-8 -*- from __future__ import unicode_literals import warnings from django.db import models class People(models.Model): name = models.CharField(max_length=255) parent = models.ForeignKey('self') class Message(models.Model): from_field = models.ForeignKey(People, db_column='from_id') class PeopleData(models.Model): people_pk = models.ForeignKey(People, primary_key=True) ssn = models.CharField(max_length=11) class PeopleMoreData(models.Model): people_unique = models.ForeignKey(People, unique=True) license = models.CharField(max_length=255) class DigitsInColumnName(models.Model): all_digits = models.CharField(max_length=11, db_column='123') leading_digit = models.CharField(max_length=11, db_column='4extra') leading_digits = models.CharField(max_length=11, db_column='45extra') class SpecialColumnName(models.Model): field = models.IntegerField(db_column='field') # Underscores field_field_0 = models.IntegerField(db_column='Field_') field_field_1 = models.IntegerField(db_column='Field__') field_field_2 = models.IntegerField(db_column='__field') # Other chars prc_x = models.IntegerField(db_column='prc(%) x') non_ascii = models.IntegerField(db_column='tamaño') class ColumnTypes(models.Model): id = models.AutoField(primary_key=True) big_int_field = models.BigIntegerField() bool_field = models.BooleanField(default=False) null_bool_field = models.NullBooleanField() char_field = models.CharField(max_length=10) comma_separated_int_field = models.CommaSeparatedIntegerField(max_length=99) date_field = models.DateField() date_time_field = models.DateTimeField() decimal_field = models.DecimalField(max_digits=6, decimal_places=1) email_field = models.EmailField() file_field = models.FileField(upload_to="unused") file_path_field = models.FilePathField() float_field = models.FloatField() int_field = models.IntegerField() with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") ip_address_field = models.IPAddressField() gen_ip_adress_field = models.GenericIPAddressField(protocol="ipv4") pos_int_field = models.PositiveIntegerField() pos_small_int_field = models.PositiveSmallIntegerField() slug_field = models.SlugField() small_int_field = models.SmallIntegerField() text_field = models.TextField() time_field = models.TimeField() url_field = models.URLField()
bsd-3-clause
vasyarv/edx-platform
common/lib/xmodule/xmodule/modulestore/split_mongo/definition_lazy_loader.py
213
1560
from opaque_keys.edx.locator import DefinitionLocator import copy class DefinitionLazyLoader(object): """ A placeholder to put into an xblock in place of its definition which when accessed knows how to get its content. Only useful if the containing object doesn't force access during init but waits until client wants the definition. Only works if the modulestore is a split mongo store. """ def __init__(self, modulestore, course_key, block_type, definition_id, field_converter): """ Simple placeholder for yet-to-be-fetched data :param modulestore: the pymongo db connection with the definitions :param definition_locator: the id of the record in the above to fetch """ self.modulestore = modulestore self.course_key = course_key self.definition_locator = DefinitionLocator(block_type, definition_id) self.field_converter = field_converter def fetch(self): """ Fetch the definition. Note, the caller should replace this lazy loader pointer with the result so as not to fetch more than once """ # get_definition may return a cached value perhaps from another course or code path # so, we copy the result here so that updates don't cross-pollinate nor change the cached # value in such a way that we can't tell that the definition's been updated. definition = self.modulestore.get_definition(self.course_key, self.definition_locator.definition_id) return copy.deepcopy(definition)
agpl-3.0
lucashmorais/x-Bench
mozmill-env/linux/python-lib/requests/status_codes.py
71
3104
# -*- coding: utf-8 -*- from .structures import LookupDict _codes = { # Informational. 100: ('continue',), 101: ('switching_protocols',), 102: ('processing',), 103: ('checkpoint',), 122: ('uri_too_long', 'request_uri_too_long'), 200: ('ok', 'okay', 'all_ok', 'all_okay', 'all_good', '\\o/', '✓'), 201: ('created',), 202: ('accepted',), 203: ('non_authoritative_info', 'non_authoritative_information'), 204: ('no_content',), 205: ('reset_content', 'reset'), 206: ('partial_content', 'partial'), 207: ('multi_status', 'multiple_status', 'multi_stati', 'multiple_stati'), 208: ('im_used',), # Redirection. 300: ('multiple_choices',), 301: ('moved_permanently', 'moved', '\\o-'), 302: ('found',), 303: ('see_other', 'other'), 304: ('not_modified',), 305: ('use_proxy',), 306: ('switch_proxy',), 307: ('temporary_redirect', 'temporary_moved', 'temporary'), 308: ('resume_incomplete', 'resume'), # Client Error. 400: ('bad_request', 'bad'), 401: ('unauthorized',), 402: ('payment_required', 'payment'), 403: ('forbidden',), 404: ('not_found', '-o-'), 405: ('method_not_allowed', 'not_allowed'), 406: ('not_acceptable',), 407: ('proxy_authentication_required', 'proxy_auth', 'proxy_authentication'), 408: ('request_timeout', 'timeout'), 409: ('conflict',), 410: ('gone',), 411: ('length_required',), 412: ('precondition_failed', 'precondition'), 413: ('request_entity_too_large',), 414: ('request_uri_too_large',), 415: ('unsupported_media_type', 'unsupported_media', 'media_type'), 416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'), 417: ('expectation_failed',), 418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'), 422: ('unprocessable_entity', 'unprocessable'), 423: ('locked',), 424: ('failed_dependency', 'dependency'), 425: ('unordered_collection', 'unordered'), 426: ('upgrade_required', 'upgrade'), 428: ('precondition_required', 'precondition'), 429: ('too_many_requests', 'too_many'), 431: ('header_fields_too_large', 'fields_too_large'), 444: ('no_response', 'none'), 449: ('retry_with', 'retry'), 450: ('blocked_by_windows_parental_controls', 'parental_controls'), 451: ('unavailable_for_legal_reasons', 'legal_reasons'), 499: ('client_closed_request',), # Server Error. 500: ('internal_server_error', 'server_error', '/o\\', '✗'), 501: ('not_implemented',), 502: ('bad_gateway',), 503: ('service_unavailable', 'unavailable'), 504: ('gateway_timeout',), 505: ('http_version_not_supported', 'http_version'), 506: ('variant_also_negotiates',), 507: ('insufficient_storage',), 509: ('bandwidth_limit_exceeded', 'bandwidth'), 510: ('not_extended',), } codes = LookupDict(name='status_codes') for (code, titles) in list(_codes.items()): for title in titles: setattr(codes, title, code) if not title.startswith('\\'): setattr(codes, title.upper(), code)
mit
teamCarel/EyeTracker
src/shared_modules/video_capture/__init__.py
2
1353
''' (*)~--------------------------------------------------------------------------- Pupil - eye tracking platform Copyright (C) 2012-2017 Pupil Labs Distributed under the terms of the GNU Lesser General Public License (LGPL v3.0). See COPYING and COPYING.LESSER for license details. ---------------------------------------------------------------------------~(*) ''' ''' Video Capture provides the interface to get frames from diffferent backends. Backends consist of a manager and at least one source class. The manager is a Pupil plugin that provides an GUI that lists all available sources. The source provides the stream of image frames. These backends are available: - UVC: Local USB sources - NDSI: Remote Pupil Mobile sources - Fake: Fallback, static random image - File: Loads video from file ''' from .base_backend import InitialisationError, StreamError from .base_backend import Base_Source, Base_Manager from .fake_backend import Fake_Source, Fake_Manager from .file_backend import FileCaptureError, EndofVideoFileError, FileSeekError from .file_backend import File_Source, File_Manager from .ndsi_backend import NDSI_Source, NDSI_Manager from .uvc_backend import UVC_Source, UVC_Manager source_classes = [File_Source, NDSI_Source, UVC_Source, Fake_Source] manager_classes = [File_Manager, NDSI_Manager, UVC_Manager, Fake_Manager]
lgpl-3.0
lochiiconnectivity/pdns
pdns/ed25519/base2.py
77
1231
b = 256 q = 2**255 - 19 l = 2**252 + 27742317777372353535851937790883648493 def expmod(b,e,m): if e == 0: return 1 t = expmod(b,e/2,m)**2 % m if e & 1: t = (t*b) % m return t def inv(x): return expmod(x,q-2,q) d = -121665 * inv(121666) I = expmod(2,(q-1)/4,q) def xrecover(y): xx = (y*y-1) * inv(d*y*y+1) x = expmod(xx,(q+3)/8,q) if (x*x - xx) % q != 0: x = (x*I) % q if x % 2 != 0: x = q-x return x By = 4 * inv(5) Bx = xrecover(By) B = [Bx % q,By % q] def edwards(P,Q): x1 = P[0] y1 = P[1] x2 = Q[0] y2 = Q[1] x3 = (x1*y2+x2*y1) * inv(1+d*x1*x2*y1*y2) y3 = (y1*y2+x1*x2) * inv(1-d*x1*x2*y1*y2) return [x3 % q,y3 % q] def radix255(x): x = x % q if x + x > q: x -= q x = [x,0,0,0,0,0,0,0,0,0] bits = [26,25,26,25,26,25,26,25,26,25] for i in range(9): carry = (x[i] + 2**(bits[i]-1)) / 2**bits[i] x[i] -= carry * 2**bits[i] x[i + 1] += carry result = "" for i in range(9): result = result+str(x[i])+"," result = result+str(x[9]) return result Bi = B for i in range(8): print " {" print " {",radix255(Bi[1]+Bi[0]),"}," print " {",radix255(Bi[1]-Bi[0]),"}," print " {",radix255(2*d*Bi[0]*Bi[1]),"}," print " }," Bi = edwards(B,edwards(B,Bi))
gpl-2.0
fbagirov/scikit-learn
sklearn/linear_model/tests/test_theil_sen.py
234
9928
""" Testing for Theil-Sen module (sklearn.linear_model.theil_sen) """ # Author: Florian Wilhelm <florian.wilhelm@gmail.com> # License: BSD 3 clause from __future__ import division, print_function, absolute_import import os import sys from contextlib import contextmanager import numpy as np from numpy.testing import assert_array_equal, assert_array_less from numpy.testing import assert_array_almost_equal, assert_warns from scipy.linalg import norm from scipy.optimize import fmin_bfgs from nose.tools import raises, assert_almost_equal from sklearn.utils import ConvergenceWarning from sklearn.linear_model import LinearRegression, TheilSenRegressor from sklearn.linear_model.theil_sen import _spatial_median, _breakdown_point from sklearn.linear_model.theil_sen import _modified_weiszfeld_step from sklearn.utils.testing import assert_greater, assert_less @contextmanager def no_stdout_stderr(): old_stdout = sys.stdout old_stderr = sys.stderr sys.stdout = open(os.devnull, 'w') sys.stderr = open(os.devnull, 'w') yield sys.stdout.flush() sys.stderr.flush() sys.stdout = old_stdout sys.stderr = old_stderr def gen_toy_problem_1d(intercept=True): random_state = np.random.RandomState(0) # Linear model y = 3*x + N(2, 0.1**2) w = 3. if intercept: c = 2. n_samples = 50 else: c = 0.1 n_samples = 100 x = random_state.normal(size=n_samples) noise = 0.1 * random_state.normal(size=n_samples) y = w * x + c + noise # Add some outliers if intercept: x[42], y[42] = (-2, 4) x[43], y[43] = (-2.5, 8) x[33], y[33] = (2.5, 1) x[49], y[49] = (2.1, 2) else: x[42], y[42] = (-2, 4) x[43], y[43] = (-2.5, 8) x[53], y[53] = (2.5, 1) x[60], y[60] = (2.1, 2) x[72], y[72] = (1.8, -7) return x[:, np.newaxis], y, w, c def gen_toy_problem_2d(): random_state = np.random.RandomState(0) n_samples = 100 # Linear model y = 5*x_1 + 10*x_2 + N(1, 0.1**2) X = random_state.normal(size=(n_samples, 2)) w = np.array([5., 10.]) c = 1. noise = 0.1 * random_state.normal(size=n_samples) y = np.dot(X, w) + c + noise # Add some outliers n_outliers = n_samples // 10 ix = random_state.randint(0, n_samples, size=n_outliers) y[ix] = 50 * random_state.normal(size=n_outliers) return X, y, w, c def gen_toy_problem_4d(): random_state = np.random.RandomState(0) n_samples = 10000 # Linear model y = 5*x_1 + 10*x_2 + 42*x_3 + 7*x_4 + N(1, 0.1**2) X = random_state.normal(size=(n_samples, 4)) w = np.array([5., 10., 42., 7.]) c = 1. noise = 0.1 * random_state.normal(size=n_samples) y = np.dot(X, w) + c + noise # Add some outliers n_outliers = n_samples // 10 ix = random_state.randint(0, n_samples, size=n_outliers) y[ix] = 50 * random_state.normal(size=n_outliers) return X, y, w, c def test_modweiszfeld_step_1d(): X = np.array([1., 2., 3.]).reshape(3, 1) # Check startvalue is element of X and solution median = 2. new_y = _modified_weiszfeld_step(X, median) assert_array_almost_equal(new_y, median) # Check startvalue is not the solution y = 2.5 new_y = _modified_weiszfeld_step(X, y) assert_array_less(median, new_y) assert_array_less(new_y, y) # Check startvalue is not the solution but element of X y = 3. new_y = _modified_weiszfeld_step(X, y) assert_array_less(median, new_y) assert_array_less(new_y, y) # Check that a single vector is identity X = np.array([1., 2., 3.]).reshape(1, 3) y = X[0, ] new_y = _modified_weiszfeld_step(X, y) assert_array_equal(y, new_y) def test_modweiszfeld_step_2d(): X = np.array([0., 0., 1., 1., 0., 1.]).reshape(3, 2) y = np.array([0.5, 0.5]) # Check first two iterations new_y = _modified_weiszfeld_step(X, y) assert_array_almost_equal(new_y, np.array([1 / 3, 2 / 3])) new_y = _modified_weiszfeld_step(X, new_y) assert_array_almost_equal(new_y, np.array([0.2792408, 0.7207592])) # Check fix point y = np.array([0.21132505, 0.78867497]) new_y = _modified_weiszfeld_step(X, y) assert_array_almost_equal(new_y, y) def test_spatial_median_1d(): X = np.array([1., 2., 3.]).reshape(3, 1) true_median = 2. _, median = _spatial_median(X) assert_array_almost_equal(median, true_median) # Test larger problem and for exact solution in 1d case random_state = np.random.RandomState(0) X = random_state.randint(100, size=(1000, 1)) true_median = np.median(X.ravel()) _, median = _spatial_median(X) assert_array_equal(median, true_median) def test_spatial_median_2d(): X = np.array([0., 0., 1., 1., 0., 1.]).reshape(3, 2) _, median = _spatial_median(X, max_iter=100, tol=1.e-6) def cost_func(y): dists = np.array([norm(x - y) for x in X]) return np.sum(dists) # Check if median is solution of the Fermat-Weber location problem fermat_weber = fmin_bfgs(cost_func, median, disp=False) assert_array_almost_equal(median, fermat_weber) # Check when maximum iteration is exceeded a warning is emitted assert_warns(ConvergenceWarning, _spatial_median, X, max_iter=30, tol=0.) def test_theil_sen_1d(): X, y, w, c = gen_toy_problem_1d() # Check that Least Squares fails lstq = LinearRegression().fit(X, y) assert_greater(np.abs(lstq.coef_ - w), 0.9) # Check that Theil-Sen works theil_sen = TheilSenRegressor(random_state=0).fit(X, y) assert_array_almost_equal(theil_sen.coef_, w, 1) assert_array_almost_equal(theil_sen.intercept_, c, 1) def test_theil_sen_1d_no_intercept(): X, y, w, c = gen_toy_problem_1d(intercept=False) # Check that Least Squares fails lstq = LinearRegression(fit_intercept=False).fit(X, y) assert_greater(np.abs(lstq.coef_ - w - c), 0.5) # Check that Theil-Sen works theil_sen = TheilSenRegressor(fit_intercept=False, random_state=0).fit(X, y) assert_array_almost_equal(theil_sen.coef_, w + c, 1) assert_almost_equal(theil_sen.intercept_, 0.) def test_theil_sen_2d(): X, y, w, c = gen_toy_problem_2d() # Check that Least Squares fails lstq = LinearRegression().fit(X, y) assert_greater(norm(lstq.coef_ - w), 1.0) # Check that Theil-Sen works theil_sen = TheilSenRegressor(max_subpopulation=1e3, random_state=0).fit(X, y) assert_array_almost_equal(theil_sen.coef_, w, 1) assert_array_almost_equal(theil_sen.intercept_, c, 1) def test_calc_breakdown_point(): bp = _breakdown_point(1e10, 2) assert_less(np.abs(bp - 1 + 1/(np.sqrt(2))), 1.e-6) @raises(ValueError) def test_checksubparams_negative_subpopulation(): X, y, w, c = gen_toy_problem_1d() TheilSenRegressor(max_subpopulation=-1, random_state=0).fit(X, y) @raises(ValueError) def test_checksubparams_too_few_subsamples(): X, y, w, c = gen_toy_problem_1d() TheilSenRegressor(n_subsamples=1, random_state=0).fit(X, y) @raises(ValueError) def test_checksubparams_too_many_subsamples(): X, y, w, c = gen_toy_problem_1d() TheilSenRegressor(n_subsamples=101, random_state=0).fit(X, y) @raises(ValueError) def test_checksubparams_n_subsamples_if_less_samples_than_features(): random_state = np.random.RandomState(0) n_samples, n_features = 10, 20 X = random_state.normal(size=(n_samples, n_features)) y = random_state.normal(size=n_samples) TheilSenRegressor(n_subsamples=9, random_state=0).fit(X, y) def test_subpopulation(): X, y, w, c = gen_toy_problem_4d() theil_sen = TheilSenRegressor(max_subpopulation=250, random_state=0).fit(X, y) assert_array_almost_equal(theil_sen.coef_, w, 1) assert_array_almost_equal(theil_sen.intercept_, c, 1) def test_subsamples(): X, y, w, c = gen_toy_problem_4d() theil_sen = TheilSenRegressor(n_subsamples=X.shape[0], random_state=0).fit(X, y) lstq = LinearRegression().fit(X, y) # Check for exact the same results as Least Squares assert_array_almost_equal(theil_sen.coef_, lstq.coef_, 9) def test_verbosity(): X, y, w, c = gen_toy_problem_1d() # Check that Theil-Sen can be verbose with no_stdout_stderr(): TheilSenRegressor(verbose=True, random_state=0).fit(X, y) TheilSenRegressor(verbose=True, max_subpopulation=10, random_state=0).fit(X, y) def test_theil_sen_parallel(): X, y, w, c = gen_toy_problem_2d() # Check that Least Squares fails lstq = LinearRegression().fit(X, y) assert_greater(norm(lstq.coef_ - w), 1.0) # Check that Theil-Sen works theil_sen = TheilSenRegressor(n_jobs=-1, random_state=0, max_subpopulation=2e3).fit(X, y) assert_array_almost_equal(theil_sen.coef_, w, 1) assert_array_almost_equal(theil_sen.intercept_, c, 1) def test_less_samples_than_features(): random_state = np.random.RandomState(0) n_samples, n_features = 10, 20 X = random_state.normal(size=(n_samples, n_features)) y = random_state.normal(size=n_samples) # Check that Theil-Sen falls back to Least Squares if fit_intercept=False theil_sen = TheilSenRegressor(fit_intercept=False, random_state=0).fit(X, y) lstq = LinearRegression(fit_intercept=False).fit(X, y) assert_array_almost_equal(theil_sen.coef_, lstq.coef_, 12) # Check fit_intercept=True case. This will not be equal to the Least # Squares solution since the intercept is calculated differently. theil_sen = TheilSenRegressor(fit_intercept=True, random_state=0).fit(X, y) y_pred = theil_sen.predict(X) assert_array_almost_equal(y_pred, y, 12)
bsd-3-clause
schlos/eden
modules/templates/IRS/config.py
9
17978
# -*- coding: utf-8 -*- try: # Python 2.7 from collections import OrderedDict except: # Python 2.6 from gluon.contrib.simplejson.ordered_dict import OrderedDict from gluon import current from gluon.storage import Storage def config(settings): """ Template settings for an Incident Response System Initially targeting Sierra Leone's Ebola Response """ T = current.T settings.base.system_name = T("Sierra Leone Incident Response System") settings.base.system_name_short = T("SL IRS") # PrePopulate data settings.base.prepopulate = ("IRS", "default/users") # Theme (folder to use for views/layout.html) settings.base.theme = "IRS" # Authentication settings # Should users be allowed to register themselves? #settings.security.self_registration = False # Do new users need to verify their email address? settings.auth.registration_requires_verification = True # Do new users need to be approved by an administrator prior to being able to login? #settings.auth.registration_requires_approval = True #settings.auth.registration_requests_organisation = True # Approval emails get sent to all admins settings.mail.approver = "ADMIN" # Restrict the Location Selector to just certain countries # NB This can also be over-ridden for specific contexts later # e.g. Activities filtered to those of parent Project settings.gis.countries = ("SL",) # Uncomment to display the Map Legend as a floating DIV settings.gis.legend = "float" # Uncomment to Disable the Postcode selector in the LocationSelector settings.gis.postcode_selector = False # @ToDo: Vary by country (include in the gis_config!) # Uncomment to show the Print control: # http://eden.sahanafoundation.org/wiki/UserGuidelines/Admin/MapPrinting #settings.gis.print_button = True # L10n settings # Languages used in the deployment (used for Language Toolbar & GIS Locations) # http://www.loc.gov/standards/iso639-2/php/code_list.php settings.L10n.languages = OrderedDict([ # ("ar", "العربية"), # ("bs", "Bosanski"), ("en_gb", "English"), # ("fr", "Français"), # ("de", "Deutsch"), # ("el", "ελληνικά"), # ("es", "Español"), # ("it", "Italiano"), # ("ja", "日本語"), # ("km", "ភាសាខ្មែរ"), # ("ko", "한국어"), # ("ne", "नेपाली"), # Nepali # ("prs", "دری"), # Dari # ("ps", "پښتو"), # Pashto # ("pt", "Português"), # ("pt-br", "Português (Brasil)"), # ("ru", "русский"), # ("tet", "Tetum"), # ("tl", "Tagalog"), # ("ur", "اردو"), # ("vi", "Tiếng Việt"), # ("zh-cn", "中文 (简体)"), # ("zh-tw", "中文 (繁體)"), ]) # Default language for Language Toolbar (& GIS Locations in future) settings.L10n.default_language = "en_gb" # Uncomment to Hide the language toolbar settings.L10n.display_toolbar = False # Default timezone for users #settings.L10n.utc_offset = "+0100" # Number formats (defaults to ISO 31-0) # Decimal separator for numbers (defaults to ,) settings.L10n.decimal_separator = "." # Thousands separator for numbers (defaults to space) settings.L10n.thousands_separator = "," # Security Policy # http://eden.sahanafoundation.org/wiki/S3AAA#System-widePolicy # 1: Simple (default): Global as Reader, Authenticated as Editor # 2: Editor role required for Update/Delete, unless record owned by session # 3: Apply Controller ACLs # 4: Apply both Controller & Function ACLs # 5: Apply Controller, Function & Table ACLs # 6: Apply Controller, Function, Table ACLs and Entity Realm # 7: Apply Controller, Function, Table ACLs and Entity Realm + Hierarchy # 8: Apply Controller, Function, Table ACLs, Entity Realm + Hierarchy and Delegations # settings.security.policy = 7 # Organisation-ACLs # ============================================================================= # Project Settings # Uncomment this to use settings suitable for a global/regional organisation (e.g. DRR) settings.project.mode_3w = True # Uncomment this to use Codes for projects settings.project.codes = True # Uncomment this to enable Hazards in 3W projects #settings.project.hazards = True # Uncomment this to use multiple Budgets per project #settings.project.multiple_budgets = True # Uncomment this to use multiple Organisations per project settings.project.multiple_organisations = True # Uncomment this to enable Themes in 3W projects #settings.project.themes = True # Uncomment this to customise # Links to Filtered Components for Donors & Partners #settings.project.organisation_roles = { # 1: T("Lead Organization"), # 2: T("Partner Organization"), # 3: T("Donor"), # #4: T("Customer"), # T("Beneficiary")? # #5: T("Supplier"), # 9: T("Partner Organization"), # Needed for IFRC RMS interop ("Partner National Society") #} # ============================================================================= # Requests #settings.req.use_commit = False # Restrict the type of requests that can be made, valid values in the # list are ["Stock", "People", "Other"]. If this is commented out then # all types will be valid. settings.req.req_type = ["Stock"] # ----------------------------------------------------------------------------- def customise_hms_hospital_resource(r, tablename): if r.representation == "geojson": # Don't represent the facility_status as numbers are smaller to xmit current.s3db.hms_status.facility_status.represent = None return # Limit options to just those used & relabel them for context hms_facility_type_opts = { 1: T("Hospital"), #2: T("Field Hospital"), #3: T("Specialized Hospital"), #11: T("Health center"), #12: T("Health center with beds"), #13: T("Health center without beds"), #21: T("Dispensary"), #31: T("Long-term care"), #41: T("Emergency Treatment Centre"), 41: T("ETC"), 42: T("Triage"), 43: T("Holding Center"), 44: T("Transit Center"), #98: T("Other"), #99: T("Unknown"), } hms_facility_status_opts = { #1: T("Normal"), 1: T("Functioning"), #2: T("Compromised"), #3: T("Evacuating"), 4: T("Closed"), 5: T("Pending"), #99: T("No Response") } from gluon import IS_EMPTY_OR, IS_IN_SET s3db = current.s3db NONE = current.messages["NONE"] field = s3db.hms_hospital.facility_type field.represent = lambda opt: hms_facility_type_opts.get(opt, NONE) field.requires = IS_EMPTY_OR(IS_IN_SET(hms_facility_type_opts)) field = s3db.hms_status.facility_status field.represent = lambda opt: hms_facility_status_opts.get(opt, NONE) field.requires = IS_EMPTY_OR(IS_IN_SET(hms_facility_status_opts)) settings.customise_hms_hospital_resource = customise_hms_hospital_resource # ----------------------------------------------------------------------------- def customise_disease_stats_data_resource(r, tablename): s3db = current.s3db # Load model & set defaults table = s3db.disease_stats_data # Add a TimePlot tab to summary page summary = settings.get_ui_summary() settings.ui.summary = list(summary) + [{"name": "timeplot", "label": "Progression", "widgets": [{"method": "timeplot", "ajax_init": True, } ], }] # Default parameter filter def default_parameter_filter(selector, tablename=None): ptable = s3db.stats_parameter query = (ptable.deleted == False) & \ (ptable.name == "Cases") row = current.db(query).select(ptable.parameter_id, limitby = (0, 1)).first() if row: return row.parameter_id else: return None # Set filter defaults resource = r.resource filter_widgets = resource.get_config("filter_widgets", []) for filter_widget in filter_widgets: if filter_widget.field == "parameter_id": filter_widget.opts.default = default_parameter_filter elif filter_widget.field == "location_id$level": filter_widget.opts.default = "L2" settings.customise_disease_stats_data_resource = customise_disease_stats_data_resource # ----------------------------------------------------------------------------- def customise_stats_demographic_data_resource(r, tablename): s3db = current.s3db # Load model & set defaults table = s3db.stats_demographic_data # Default parameter filter def default_parameter_filter(selector, tablename=None): ptable = s3db.stats_parameter query = (ptable.deleted == False) & \ (ptable.name == "Population Total") row = current.db(query).select(ptable.parameter_id, limitby = (0, 1)).first() if row: return row.parameter_id else: return None # Set filter defaults resource = r.resource filter_widgets = resource.get_config("filter_widgets", []) for filter_widget in filter_widgets: if filter_widget.field == "parameter_id": filter_widget.opts.default = default_parameter_filter elif filter_widget.field == "location_id$level": filter_widget.opts.default = "L1" elif filter_widget.field == "year": filter_widget.opts.default = 2004 settings.customise_stats_demographic_data_resource = customise_stats_demographic_data_resource # ----------------------------------------------------------------------------- # Comment/uncomment modules here to disable/enable them # Modules menu is defined in modules/eden/menu.py settings.modules = OrderedDict([ # Core modules which shouldn't be disabled ("default", Storage( name_nice = T("Home"), restricted = False, # Use ACLs to control access to this module access = None, # All Users (inc Anonymous) can see this module in the default menu & access the controller module_type = None # This item is not shown in the menu )), ("admin", Storage( name_nice = T("Administration"), #description = "Site Administration", restricted = True, access = "|1|", # Only Administrators can see this module in the default menu & access the controller module_type = None # This item is handled separately for the menu )), ("appadmin", Storage( name_nice = T("Administration"), #description = "Site Administration", restricted = True, module_type = None # No Menu )), ("errors", Storage( name_nice = T("Ticket Viewer"), #description = "Needed for Breadcrumbs", restricted = False, module_type = None # No Menu )), #("sync", Storage( # name_nice = T("Synchronization"), # #description = "Synchronization", # restricted = True, # access = "|1|", # Only Administrators can see this module in the default menu & access the controller # module_type = None # This item is handled separately for the menu #)), #("tour", Storage( # name_nice = T("Guided Tour Functionality"), # module_type = None, #)), #("translate", Storage( # name_nice = T("Translation Functionality"), # #description = "Selective translation of strings based on module.", # module_type = None, #)), ("gis", Storage( name_nice = T("Map"), #description = "Situation Awareness & Geospatial Analysis", restricted = True, module_type = 1, # 1st item in the menu )), ("pr", Storage( name_nice = T("Person Registry"), #description = "Central point to record details on People", restricted = True, access = "|1|", # Only Administrators can see this module in the default menu (access to controller is possible to all still) module_type = 10 )), ("org", Storage( name_nice = T("Organizations"), #description = 'Lists "who is doing what & where". Allows relief agencies to coordinate their activities', restricted = True, module_type = 10 )), ("hrm", Storage( name_nice = T("Staff"), #description = "Human Resources Management", restricted = True, module_type = 3, )), #("vol", Storage( # name_nice = T("Volunteers"), # #description = "Human Resources Management", # restricted = True, # module_type = 2, #)), ("cms", Storage( name_nice = T("Content Management"), #description = "Content Management System", restricted = True, module_type = 10, )), ("doc", Storage( name_nice = T("Documents"), #description = "A library of digital resources, such as photos, documents and reports", restricted = True, module_type = 10, )), ("msg", Storage( name_nice = T("Messaging"), #description = "Sends & Receives Alerts via Email & SMS", restricted = True, # The user-visible functionality of this module isn't normally required. Rather it's main purpose is to be accessed from other modules. module_type = None, )), ("event", Storage( name_nice = T("Events"), #description = "Activate Events (e.g. from Scenario templates) for allocation of appropriate Resources (Human, Assets & Facilities).", restricted = True, module_type = 2, )), # Specific for Sierrra Leone: ("disease", Storage( name_nice = T("Disease"), restricted = True, module_type = 10 )), ("hms", Storage( name_nice = T("Hospitals"), #description = "Helps to monitor status of hospitals", restricted = True, module_type = 10 )), ("dvi", Storage( name_nice = T("Burials"), restricted = True, module_type = 10 )), ("supply", Storage( name_nice = T("Supply Chain Management"), #description = "Used within Inventory Management, Request Management and Asset Management", restricted = True, module_type = None, # Not displayed )), ("asset", Storage( name_nice = T("Assets"), #description = "Recording and Assigning Assets", restricted = True, module_type = None, # Just used for Vehicles )), # Vehicle depends on Assets ("vehicle", Storage( name_nice = T("Vehicles"), #description = "Manage Vehicles", restricted = True, module_type = 4, )), # Enable for org_resource? ("stats", Storage( name_nice = T("Statistics"), #description = "Manages statistics", restricted = True, module_type = 10, )), ("transport", Storage( name_nice = T("Transport"), restricted = True, module_type = 10, )), # Enabled as-requested by user ("inv", Storage( name_nice = T("Warehouses"), #description = "Receiving and Sending Items", restricted = True, module_type = 4 )), ("req", Storage( name_nice = T("Requests"), #description = "Manage requests for supplies, assets, staff or other resources. Matches against Inventories where supplies are requested.", restricted = True, module_type = 10, )), ("project", Storage( name_nice = T("Projects"), #description = "Tracking of Projects, Activities and Tasks", restricted = True, module_type = 2 )), #("cr", Storage( # name_nice = T("Shelters"), # #description = "Tracks the location, capacity and breakdown of victims in Shelters", # restricted = True, # module_type = 10 #)), #("dvr", Storage( # name_nice = T("Disaster Victim Registry"), # #description = "Allow affected individuals & households to register to receive compensation and distributions", # restricted = True, # module_type = 10, #)), ]) # END =========================================================================
mit
shadowbq/dot.atom
home/.atom/packages/wakatime/lib/wakatime-master/wakatime/packages/pygments_py3/pygments/lexers/actionscript.py
72
11179
# -*- coding: utf-8 -*- """ pygments.lexers.actionscript ~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Lexers for ActionScript and MXML. :copyright: Copyright 2006-2014 by the Pygments team, see AUTHORS. :license: BSD, see LICENSE for details. """ import re from pygments.lexer import RegexLexer, bygroups, using, this, words, default from pygments.token import Text, Comment, Operator, Keyword, Name, String, \ Number, Punctuation __all__ = ['ActionScriptLexer', 'ActionScript3Lexer', 'MxmlLexer'] class ActionScriptLexer(RegexLexer): """ For ActionScript source code. .. versionadded:: 0.9 """ name = 'ActionScript' aliases = ['as', 'actionscript'] filenames = ['*.as'] mimetypes = ['application/x-actionscript', 'text/x-actionscript', 'text/actionscript'] flags = re.DOTALL tokens = { 'root': [ (r'\s+', Text), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'/(\\\\|\\/|[^/\n])*/[gim]*', String.Regex), (r'[~^*!%&<>|+=:;,/?\\-]+', Operator), (r'[{}\[\]();.]+', Punctuation), (words(( 'case', 'default', 'for', 'each', 'in', 'while', 'do', 'break', 'return', 'continue', 'if', 'else', 'throw', 'try', 'catch', 'var', 'with', 'new', 'typeof', 'arguments', 'instanceof', 'this', 'switch'), suffix=r'\b'), Keyword), (words(( 'class', 'public', 'final', 'internal', 'native', 'override', 'private', 'protected', 'static', 'import', 'extends', 'implements', 'interface', 'intrinsic', 'return', 'super', 'dynamic', 'function', 'const', 'get', 'namespace', 'package', 'set'), suffix=r'\b'), Keyword.Declaration), (r'(true|false|null|NaN|Infinity|-Infinity|undefined|Void)\b', Keyword.Constant), (words(( 'Accessibility', 'AccessibilityProperties', 'ActionScriptVersion', 'ActivityEvent', 'AntiAliasType', 'ApplicationDomain', 'AsBroadcaster', 'Array', 'AsyncErrorEvent', 'AVM1Movie', 'BevelFilter', 'Bitmap', 'BitmapData', 'BitmapDataChannel', 'BitmapFilter', 'BitmapFilterQuality', 'BitmapFilterType', 'BlendMode', 'BlurFilter', 'Boolean', 'ByteArray', 'Camera', 'Capabilities', 'CapsStyle', 'Class', 'Color', 'ColorMatrixFilter', 'ColorTransform', 'ContextMenu', 'ContextMenuBuiltInItems', 'ContextMenuEvent', 'ContextMenuItem', 'ConvultionFilter', 'CSMSettings', 'DataEvent', 'Date', 'DefinitionError', 'DeleteObjectSample', 'Dictionary', 'DisplacmentMapFilter', 'DisplayObject', 'DisplacmentMapFilterMode', 'DisplayObjectContainer', 'DropShadowFilter', 'Endian', 'EOFError', 'Error', 'ErrorEvent', 'EvalError', 'Event', 'EventDispatcher', 'EventPhase', 'ExternalInterface', 'FileFilter', 'FileReference', 'FileReferenceList', 'FocusDirection', 'FocusEvent', 'Font', 'FontStyle', 'FontType', 'FrameLabel', 'FullScreenEvent', 'Function', 'GlowFilter', 'GradientBevelFilter', 'GradientGlowFilter', 'GradientType', 'Graphics', 'GridFitType', 'HTTPStatusEvent', 'IBitmapDrawable', 'ID3Info', 'IDataInput', 'IDataOutput', 'IDynamicPropertyOutput' 'IDynamicPropertyWriter', 'IEventDispatcher', 'IExternalizable', 'IllegalOperationError', 'IME', 'IMEConversionMode', 'IMEEvent', 'int', 'InteractiveObject', 'InterpolationMethod', 'InvalidSWFError', 'InvokeEvent', 'IOError', 'IOErrorEvent', 'JointStyle', 'Key', 'Keyboard', 'KeyboardEvent', 'KeyLocation', 'LineScaleMode', 'Loader', 'LoaderContext', 'LoaderInfo', 'LoadVars', 'LocalConnection', 'Locale', 'Math', 'Matrix', 'MemoryError', 'Microphone', 'MorphShape', 'Mouse', 'MouseEvent', 'MovieClip', 'MovieClipLoader', 'Namespace', 'NetConnection', 'NetStatusEvent', 'NetStream', 'NewObjectSample', 'Number', 'Object', 'ObjectEncoding', 'PixelSnapping', 'Point', 'PrintJob', 'PrintJobOptions', 'PrintJobOrientation', 'ProgressEvent', 'Proxy', 'QName', 'RangeError', 'Rectangle', 'ReferenceError', 'RegExp', 'Responder', 'Sample', 'Scene', 'ScriptTimeoutError', 'Security', 'SecurityDomain', 'SecurityError', 'SecurityErrorEvent', 'SecurityPanel', 'Selection', 'Shape', 'SharedObject', 'SharedObjectFlushStatus', 'SimpleButton', 'Socket', 'Sound', 'SoundChannel', 'SoundLoaderContext', 'SoundMixer', 'SoundTransform', 'SpreadMethod', 'Sprite', 'StackFrame', 'StackOverflowError', 'Stage', 'StageAlign', 'StageDisplayState', 'StageQuality', 'StageScaleMode', 'StaticText', 'StatusEvent', 'String', 'StyleSheet', 'SWFVersion', 'SyncEvent', 'SyntaxError', 'System', 'TextColorType', 'TextField', 'TextFieldAutoSize', 'TextFieldType', 'TextFormat', 'TextFormatAlign', 'TextLineMetrics', 'TextRenderer', 'TextSnapshot', 'Timer', 'TimerEvent', 'Transform', 'TypeError', 'uint', 'URIError', 'URLLoader', 'URLLoaderDataFormat', 'URLRequest', 'URLRequestHeader', 'URLRequestMethod', 'URLStream', 'URLVariabeles', 'VerifyError', 'Video', 'XML', 'XMLDocument', 'XMLList', 'XMLNode', 'XMLNodeType', 'XMLSocket', 'XMLUI'), suffix=r'\b'), Name.Builtin), (words(( 'decodeURI', 'decodeURIComponent', 'encodeURI', 'escape', 'eval', 'isFinite', 'isNaN', 'isXMLName', 'clearInterval', 'fscommand', 'getTimer', 'getURL', 'getVersion', 'parseFloat', 'parseInt', 'setInterval', 'trace', 'updateAfterEvent', 'unescape'), suffix=r'\b'), Name.Function), (r'[$a-zA-Z_]\w*', Name.Other), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), ] } class ActionScript3Lexer(RegexLexer): """ For ActionScript 3 source code. .. versionadded:: 0.11 """ name = 'ActionScript 3' aliases = ['as3', 'actionscript3'] filenames = ['*.as'] mimetypes = ['application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3'] identifier = r'[$a-zA-Z_]\w*' typeidentifier = identifier + '(?:\.<\w+>)?' flags = re.DOTALL | re.MULTILINE tokens = { 'root': [ (r'\s+', Text), (r'(function\s+)(' + identifier + r')(\s*)(\()', bygroups(Keyword.Declaration, Name.Function, Text, Operator), 'funcparams'), (r'(var|const)(\s+)(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r')', bygroups(Keyword.Declaration, Text, Name, Text, Punctuation, Text, Keyword.Type)), (r'(import|package)(\s+)((?:' + identifier + r'|\.)+)(\s*)', bygroups(Keyword, Text, Name.Namespace, Text)), (r'(new)(\s+)(' + typeidentifier + r')(\s*)(\()', bygroups(Keyword, Text, Keyword.Type, Text, Operator)), (r'//.*?\n', Comment.Single), (r'/\*.*?\*/', Comment.Multiline), (r'/(\\\\|\\/|[^\n])*/[gisx]*', String.Regex), (r'(\.)(' + identifier + r')', bygroups(Operator, Name.Attribute)), (r'(case|default|for|each|in|while|do|break|return|continue|if|else|' r'throw|try|catch|with|new|typeof|arguments|instanceof|this|' r'switch|import|include|as|is)\b', Keyword), (r'(class|public|final|internal|native|override|private|protected|' r'static|import|extends|implements|interface|intrinsic|return|super|' r'dynamic|function|const|get|namespace|package|set)\b', Keyword.Declaration), (r'(true|false|null|NaN|Infinity|-Infinity|undefined|void)\b', Keyword.Constant), (r'(decodeURI|decodeURIComponent|encodeURI|escape|eval|isFinite|isNaN|' r'isXMLName|clearInterval|fscommand|getTimer|getURL|getVersion|' r'isFinite|parseFloat|parseInt|setInterval|trace|updateAfterEvent|' r'unescape)\b', Name.Function), (identifier, Name), (r'[0-9][0-9]*\.[0-9]+([eE][0-9]+)?[fd]?', Number.Float), (r'0x[0-9a-f]+', Number.Hex), (r'[0-9]+', Number.Integer), (r'"(\\\\|\\"|[^"])*"', String.Double), (r"'(\\\\|\\'|[^'])*'", String.Single), (r'[~^*!%&<>|+=:;,/?\\{}\[\]().-]+', Operator), ], 'funcparams': [ (r'\s+', Text), (r'(\s*)(\.\.\.)?(' + identifier + r')(\s*)(:)(\s*)(' + typeidentifier + r'|\*)(\s*)', bygroups(Text, Punctuation, Name, Text, Operator, Text, Keyword.Type, Text), 'defval'), (r'\)', Operator, 'type') ], 'type': [ (r'(\s*)(:)(\s*)(' + typeidentifier + r'|\*)', bygroups(Text, Operator, Text, Keyword.Type), '#pop:2'), (r'\s+', Text, '#pop:2'), default('#pop:2') ], 'defval': [ (r'(=)(\s*)([^(),]+)(\s*)(,?)', bygroups(Operator, Text, using(this), Text, Operator), '#pop'), (r',', Operator, '#pop'), default('#pop') ] } def analyse_text(text): if re.match(r'\w+\s*:\s*\w', text): return 0.3 return 0 class MxmlLexer(RegexLexer): """ For MXML markup. Nested AS3 in <script> tags is highlighted by the appropriate lexer. .. versionadded:: 1.1 """ flags = re.MULTILINE | re.DOTALL name = 'MXML' aliases = ['mxml'] filenames = ['*.mxml'] mimetimes = ['text/xml', 'application/xml'] tokens = { 'root': [ ('[^<&]+', Text), (r'&\S*?;', Name.Entity), (r'(\<\!\[CDATA\[)(.*?)(\]\]\>)', bygroups(String, using(ActionScript3Lexer), String)), ('<!--', Comment, 'comment'), (r'<\?.*?\?>', Comment.Preproc), ('<![^>]*>', Comment.Preproc), (r'<\s*[\w:.-]+', Name.Tag, 'tag'), (r'<\s*/\s*[\w:.-]+\s*>', Name.Tag), ], 'comment': [ ('[^-]+', Comment), ('-->', Comment, '#pop'), ('-', Comment), ], 'tag': [ (r'\s+', Text), (r'[\w.:-]+\s*=', Name.Attribute, 'attr'), (r'/?\s*>', Name.Tag, '#pop'), ], 'attr': [ ('\s+', Text), ('".*?"', String, '#pop'), ("'.*?'", String, '#pop'), (r'[^\s>]+', String, '#pop'), ], }
mit
kzvyahin/cfme_tests
sprout/appliances/models.py
1
51562
# -*- coding: utf-8 -*- import base64 import re import yaml try: import cPickle as pickle except ImportError: import pickle # NOQA import mgmtsystem from cached_property import cached_property from celery import chain from contextlib import contextmanager from datetime import timedelta, date from django.contrib.auth.models import User, Group as DjangoGroup from django.core.exceptions import ObjectDoesNotExist from django.db import models, transaction from django.db.models import Q from django.db.models.signals import pre_save from django.dispatch import receiver from django.utils import timezone from sprout import critical_section, redis from sprout.log import create_logger from utils.appliance import Appliance as CFMEAppliance, IPAppliance from utils.bz import Bugzilla from utils.conf import cfme_data from utils.providers import get_mgmt from utils.timeutil import nice_seconds from utils.version import Version # Monkey patch the User object in order to have nicer checks def has_quotas(self): try: self.quotas except ObjectDoesNotExist: return False else: return True def is_a_bot(self): return self.last_name.lower() == "bot" User.has_quotas = property(has_quotas) User.is_a_bot = property(is_a_bot) def apply_if_not_none(o, meth, *args, **kwargs): if o is None: return None return getattr(o, meth)(*args, **kwargs) class MetadataMixin(models.Model): class Meta: abstract = True object_meta_data = models.TextField(default=yaml.dump({})) def reload(self): new_self = type(self).objects.get(pk=self.pk) self.__dict__.update(new_self.__dict__) @property @contextmanager def metadata_lock(self): with critical_section("metadata-({})[{}]".format(type(self).__name__, str(self.pk))): yield @property def metadata(self): return yaml.load(self.object_meta_data) @metadata.setter def metadata(self, value): if not isinstance(value, dict): raise TypeError("You can store only dict in metadata!") self.object_meta_data = yaml.dump(value) @property @contextmanager def edit_metadata(self): with transaction.atomic(): with self.metadata_lock: o = type(self).objects.get(pk=self.pk) metadata = o.metadata yield metadata o.metadata = metadata o.save() self.reload() @property def logger(self): return create_logger(self) @classmethod def class_logger(cls, id=None): return create_logger(cls, id) class DelayedProvisionTask(MetadataMixin): pool = models.ForeignKey("AppliancePool", on_delete=models.CASCADE) lease_time = models.IntegerField(null=True, blank=True) provider_to_avoid = models.ForeignKey( "Provider", null=True, blank=True, on_delete=models.CASCADE) def __unicode__(self): return u"Task {}: Provision on {}, lease time {}, avoid provider {}".format( self.id, self.pool.id, self.lease_time, self.provider_to_avoid.id if self.provider_to_avoid is not None else "---") class Provider(MetadataMixin): id = models.CharField(max_length=32, primary_key=True, help_text="Provider's key in YAML.") working = models.BooleanField(default=False, help_text="Whether provider is available.") num_simultaneous_provisioning = models.IntegerField(default=5, help_text="How many simultaneous background provisioning tasks can run on this provider.") num_simultaneous_configuring = models.IntegerField(default=1, help_text="How many simultaneous template configuring tasks can run on this provider.") appliance_limit = models.IntegerField( null=True, help_text="Hard limit of how many appliances can run on this provider") disabled = models.BooleanField(default=False, help_text="We can disable providers if we want.") hidden = models.BooleanField( default=False, help_text='We can hide providers if that is required.') user_groups = models.ManyToManyField( DjangoGroup, blank=True, help_text='We can specify the providers that are tied to a specific user group.') allow_renaming = models.BooleanField( default=False, help_text="Whether this provider can rename appliances.") container_base_template = models.CharField( max_length=64, null=True, blank=True, help_text='Base tempalte for containerized ManageIQ deployment.') @property def is_working(self): return self.working and not self.disabled @property def existing_templates(self): return self.provider_templates.filter(exists=True) @property def api(self): return get_mgmt(self.id) @property def num_currently_provisioning(self): return len( Appliance.objects.filter( ready=False, marked_for_deletion=False, template__provider=self, ip_address=None)) @property def num_templates_preparing(self): return len(Template.objects.filter(provider=self, ready=False)) @property def remaining_configuring_slots(self): result = self.num_simultaneous_configuring - self.num_templates_preparing if result < 0: return 0 return result @property def remaining_appliance_slots(self): if self.appliance_limit is None: return 1 result = self.appliance_limit - self.num_currently_managing if result < 0: return 0 return result @property def num_currently_managing(self): return len(Appliance.objects.filter(template__provider=self)) @property def currently_managed_appliances(self): return Appliance.objects.filter(template__provider=self) @property def remaining_provisioning_slots(self): result = self.num_simultaneous_provisioning - self.num_currently_provisioning if result < 0: return 0 # Take the appliance limit into account if self.appliance_limit is None: return result else: free_appl_slots = self.appliance_limit - self.num_currently_managing if free_appl_slots < 0: free_appl_slots = 0 return min(free_appl_slots, result) @property def free(self): return self.remaining_provisioning_slots > 0 @property def provisioning_load(self): if self.num_simultaneous_provisioning == 0: return 1.0 # prevent division by zero return float(self.num_currently_provisioning) / float(self.num_simultaneous_provisioning) @property def appliance_load(self): if self.appliance_limit is None or self.appliance_limit == 0: return 0.0 return float(self.num_currently_managing) / float(self.appliance_limit) @property def load(self): """Load for sorting""" if self.appliance_limit is None: return self.provisioning_load else: return self.appliance_load @classmethod def get_available_provider_keys(cls): return cfme_data.get("management_systems", {}).keys() @property def provider_data(self): return cfme_data.get("management_systems", {}).get(self.id, {}) @property def ip_address(self): return self.provider_data.get("ipaddress") @property def templates(self): return self.metadata.get("templates", []) @templates.setter def templates(self, value): with self.edit_metadata as metadata: metadata["templates"] = value @property def template_name_length(self): return self.metadata.get("template_name_length", None) @template_name_length.setter def template_name_length(self, value): with self.edit_metadata as metadata: metadata["template_name_length"] = value @property def appliances_manage_this_provider(self): return self.metadata.get("appliances_manage_this_provider", []) @appliances_manage_this_provider.setter def appliances_manage_this_provider(self, value): with self.edit_metadata as metadata: metadata["appliances_manage_this_provider"] = value @property def g_appliances_manage_this_provider(self): for appl_id in self.appliances_manage_this_provider: try: yield Appliance.objects.get(id=appl_id) except ObjectDoesNotExist: continue @property def user_usage(self): per_user_usage = {} for appliance in Appliance.objects.filter(template__provider=self): if appliance.owner is None: continue owner = appliance.owner if owner not in per_user_usage: per_user_usage[owner] = 1 else: per_user_usage[owner] += 1 per_user_usage = per_user_usage.items() per_user_usage.sort(key=lambda item: item[1], reverse=True) return per_user_usage @property def free_shepherd_appliances(self): return Appliance.objects.filter( template__provider=self, appliance_pool=None, marked_for_deletion=False, ready=True) @classmethod def complete_user_usage(cls, user_perspective=None): result = {} if user_perspective is None or user_perspective.is_superuser or user_perspective.is_staff: perspective_filter = {} else: perspective_filter = {'user_groups__in': user_perspective.groups.all()} for provider in cls.objects.filter(hidden=False, **perspective_filter): for user, count in provider.user_usage: if user not in result: result[user] = 0 result[user] += count result = result.items() result.sort(key=lambda item: item[1], reverse=True) return result def cleanup(self): """Put any cleanup tasks that might help the application stability here""" self.logger.info("Running cleanup on provider {}".format(self.id)) if isinstance(self.api, mgmtsystem.openstack.OpenstackSystem): # Openstack cleanup # Clean up the floating IPs for floating_ip in self.api.api.floating_ips.findall(fixed_ip=None): self.logger.info( "Cleaning up the {} floating ip {}".format(self.id, floating_ip.ip)) try: floating_ip.delete() except Exception as e: self.logger.exception(e) def vnc_console_link_for(self, appliance): if appliance.uuid is None: return None if isinstance(self.api, mgmtsystem.openstack.OpenstackSystem): return "http://{}/dashboard/project/instances/{}/?tab=instance_details__console".format( self.ip_address, appliance.uuid ) else: return None def user_can_use(self, user): groups = self.user_groups.all() return any(user_group in groups for user_group in user.groups.all()) def user_can_see(self, user): return user.is_staff or user.is_superuser or self.user_can_use(user) def __unicode__(self): return "{} {}".format(type(self).__name__, self.id) @receiver(pre_save, sender=Provider) def disable_if_hidden(sender, instance, **kwargs): if instance.hidden: instance.disabled = True class Group(MetadataMixin): id = models.CharField(max_length=32, primary_key=True, help_text="Group name as trackerbot says. (eg. upstream, downstream-53z, ...)") template_obsolete_days = models.IntegerField( null=True, blank=True, help_text="Templates older than X days won't be loaded into sprout") template_obsolete_days_delete = models.BooleanField( default=False, help_text="If template_obsolete_days set, this will enable deletion of obsolete templates" " using that metric. WARNING! Use with care. Best use for upstream templates.") templates_url = models.TextField( blank=True, null=True, help_text='Location of templates. Currently used for containers.') @property def obsolete_templates(self): """Return a list of obsolete templates. Ignores the latest one even if it was obsolete by the means of days.""" if self.template_obsolete_days is None: return None # Preconfigured because we presume that if the preconfigured works, so does unconfigured one latest_working_template_date = Template.objects.filter( exists=True, usable=True, ready=True, preconfigured=True, template_group=self).order_by("-date")[0].date latest_working_template_ids = [ tpl.id for tpl in Template.objects.filter( exists=True, usable=True, ready=True, template_group=self, date=latest_working_template_date)] return Template.objects.filter( exists=True, date__lt=date.today() - timedelta(days=self.template_obsolete_days), template_group=self).exclude(id__in=latest_working_template_ids).order_by("date") @property def templates(self): return Template.objects.filter(template_group=self).order_by("-date", "provider__id") @property def existing_templates(self): return self.templates.filter(exists=True) @property def appliances(self): return Appliance.objects.filter(template__template_group=self) @property def zstreams_versions(self): """Returns a dict with structure ``{zstream: [version1, version2, ...]``""" zstreams = {} for version in Template.get_versions(template_group=self, exists=True): zstream = ".".join(version.split(".")[:3]) if zstream not in zstreams: zstreams[zstream] = [] zstreams[zstream].append(version) return zstreams def pick_versions_to_delete(self): to_delete = {} for zstream, versions in self.zstreams_versions.iteritems(): versions = sorted(versions, key=Version, reverse=True) versions_to_delete = versions[1:] if versions_to_delete: to_delete[zstream] = versions[1:] return to_delete def __unicode__(self): return "{} {}".format( type(self).__name__, self.id) class GroupShepherd(MetadataMixin): template_group = models.ForeignKey(Group, on_delete=models.CASCADE) user_group = models.ForeignKey(DjangoGroup, on_delete=models.CASCADE) template_pool_size = models.IntegerField(default=0, help_text="How many appliances to keep spinned for quick taking.") unconfigured_template_pool_size = models.IntegerField(default=0, help_text="How many appliances to keep spinned for quick taking - unconfigured ones.") @property def appliances(self): return Appliance.objects.filter( template__template_group=self.template_group, template__provider__user_groups=self.user_group) def get_fulfillment_percentage(self, preconfigured): """Return percentage of fulfillment of the group shepherd. Values between 0-100, can be over 100 if there are more than required. Args: preconfigured: Whether to check the pure ones or configured ones. """ appliances_in_shepherd = len( self.appliances.filter( template__preconfigured=preconfigured, appliance_pool=None, marked_for_deletion=False)) wanted_pool_size = ( self.template_pool_size if preconfigured else self.unconfigured_template_pool_size) if wanted_pool_size == 0: return 100 return int(round((float(appliances_in_shepherd) / float(wanted_pool_size)) * 100.0)) def shepherd_appliances(self, preconfigured=True): return self.appliances.filter( appliance_pool=None, ready=True, marked_for_deletion=False, template__preconfigured=preconfigured) @property def configured_shepherd_appliances(self): return self.shepherd_appliances(True) @property def unconfigured_shepherd_appliances(self): return self.shepherd_appliances(False) def __unicode__(self): return "{} {}/{} (pool size={}/{})".format( type(self).__name__, self.template_group.id, self.user_group.name, self.template_pool_size, self.unconfigured_template_pool_size) class Template(MetadataMixin): provider = models.ForeignKey( Provider, on_delete=models.CASCADE, help_text="Where does this template reside", related_name="provider_templates") template_group = models.ForeignKey( Group, on_delete=models.CASCADE, help_text="Which group the template belongs to.") version = models.CharField(max_length=32, null=True, help_text="Downstream version.") date = models.DateField(help_text="Template build date (original).") original_name = models.CharField(max_length=64, help_text="Template's original name.") name = models.CharField(max_length=64, help_text="Template's name as it resides on provider.") status = models.TextField(default="Template inserted into the system") status_changed = models.DateTimeField(auto_now_add=True) ready = models.BooleanField(default=False, help_text="Template is ready-to-be-used") exists = models.BooleanField(default=True, help_text="Template exists in the provider.") usable = models.BooleanField(default=False, help_text="Template is marked as usable") preconfigured = models.BooleanField(default=True, help_text="Is prepared for immediate use?") suggested_delete = models.BooleanField( default=False, help_text="Whether Sprout suggests deleting this template.") parent_template = models.ForeignKey( "self", blank=True, null=True, related_name="child_templates", help_text="What was source of this template?") container = models.CharField( max_length=32, null=True, blank=True, help_text=( 'Whether the appliance is located in a container in the VM. ' 'This then specifies the container name.')) ga_released = models.BooleanField(default=False) @property def provider_api(self): return self.provider.api @property def provider_name(self): return self.provider.id @property def exists_in_provider(self): return self.name in self.provider_api.list_template() @property def exists_and_ready(self): return self.exists and self.ready def user_can_use(self, user): return self.provider.user_can_use(user) def user_can_see(self, user): return self.provider.user_can_see(user) def set_status(self, status): with transaction.atomic(): template = Template.objects.get(id=self.id) template.status = status template.status_changed = timezone.now() template.save() self.logger.info("{}: {}".format(self.pk, status)) @property def cfme(self): return CFMEAppliance(self.provider_name, self.name, container=self.container) @property def can_be_deleted(self): return self.exists and len(self.appliances) == 0 and not self.ga_released @property def appliances(self): return Appliance.objects.filter(template=self) @property def temporary_name(self): return self.metadata.get("temporary_name", None) @temporary_name.setter def temporary_name(self, name): with self.edit_metadata as metadata: metadata["temporary_name"] = name @temporary_name.deleter def temporary_name(self): with self.edit_metadata as metadata: if "temporary_name" in metadata: del metadata["temporary_name"] @classmethod def get_versions(cls, *filters, **kwfilters): versions = [] for version in cls.objects.filter(*filters, **kwfilters).values('version').distinct(): v = version.values()[0] if v is not None: versions.append(v) versions.sort(key=Version, reverse=True) return versions @classmethod def get_dates(cls, *filters, **kwfilters): dates = map( lambda d: d.values()[0], cls.objects.filter(*filters, **kwfilters).values('date').distinct()) dates.sort(reverse=True) return dates @classmethod def ga_version(cls, version): return bool(cls.objects.filter(version=version, ga_released=True)) def __unicode__(self): return "{} {}:{} @ {}".format( type(self).__name__, self.version, self.name, self.provider.id) class Appliance(MetadataMixin): class Power(object): ON = "on" OFF = "off" SUSPENDED = "suspended" REBOOTING = "rebooting" LOCKED = "locked" UNKNOWN = "unknown" ORPHANED = "orphaned" CREATION_FAILED = 'creation_failed' CUSTOMIZATION_FAILED = 'customization_failed' ERROR = 'error' POWER_ICON_MAPPING = { Power.ON: 'play', Power.OFF: 'stop', Power.SUSPENDED: 'pause', Power.REBOOTING: 'repeat', Power.LOCKED: 'lock', Power.UNKNOWN: 'exclamation-sign', Power.ORPHANED: 'exclamation-sign', Power.CREATION_FAILED: 'remove', Power.CUSTOMIZATION_FAILED: 'remove', Power.ERROR: 'remove', } BAD_POWER_STATES = { Power.UNKNOWN, Power.ORPHANED, Power.CREATION_FAILED, Power.CUSTOMIZATION_FAILED, Power.ERROR} POWER_STATES_MAPPING = { # vSphere "poweredOn": Power.ON, "poweredOff": Power.OFF, "suspended": Power.SUSPENDED, # RHEV "up": Power.ON, "down": Power.OFF, "suspended": Power.SUSPENDED, "image_locked": Power.LOCKED, # Openstack "ACTIVE": Power.ON, "SHUTOFF": Power.OFF, "SUSPENDED": Power.SUSPENDED, "ERROR": Power.ERROR, # SCVMM "Running": Power.ON, "PoweredOff": Power.OFF, "Stopped": Power.OFF, "Paused": Power.SUSPENDED, "Saved State": Power.SUSPENDED, "Creation Failed": Power.CREATION_FAILED, "Customization Failed": Power.CUSTOMIZATION_FAILED, "Missing": Power.ORPHANED, # When SCVMM says it is missing ... # EC2 (for VM manager) "stopped": Power.OFF, "running": Power.ON, } RESET_SWAP_STATES = {Power.OFF, Power.REBOOTING, Power.ORPHANED} template = models.ForeignKey( Template, on_delete=models.CASCADE, help_text="Appliance's source template.") appliance_pool = models.ForeignKey("AppliancePool", null=True, on_delete=models.CASCADE, help_text="Which appliance pool this appliance belongs to.") name = models.CharField(max_length=64, help_text="Appliance's name as it is in the provider.") ip_address = models.CharField(max_length=45, null=True, help_text="Appliance's IP address") datetime_leased = models.DateTimeField(null=True, help_text="When the appliance was leased") leased_until = models.DateTimeField(null=True, help_text="When does the appliance lease expire") status = models.TextField(default="Appliance inserted into the system.") status_changed = models.DateTimeField(auto_now_add=True) power_state_changed = models.DateTimeField(default=timezone.now) marked_for_deletion = models.BooleanField(default=False, help_text="Appliance is already being deleted.") power_state = models.CharField(max_length=32, default="unknown", help_text="Appliance's power state") ready = models.BooleanField(default=False, help_text="Appliance has an IP address and web UI is online.") uuid = models.CharField(max_length=36, null=True, blank=True, help_text="UUID of the machine") description = models.TextField(blank=True) lun_disk_connected = models.BooleanField( default=False, help_text="Whether the Direct LUN disk is connected. (RHEV Only)") swap = models.IntegerField( help_text="How many MB is the appliance in swap.", null=True, blank=True) ssh_failed = models.BooleanField(default=False, help_text="If last swap check failed on SSH.") def synchronize_metadata(self): """If possible, uploads some metadata to the provider VM object to be able to recover.""" self._set_meta('id', self.id) self._set_meta('source_template_id', self.template.id) if self.appliance_pool is not None: self._set_meta('pool_id', self.appliance_pool.id) self._set_meta('pool_total_count', self.appliance_pool.total_count) self._set_meta('pool_group', self.appliance_pool.group.id) if self.appliance_pool.provider is not None: self._set_meta('pool_provider', self.appliance_pool.provider.id) self._set_meta('pool_version', self.appliance_pool.version) self._set_meta( 'pool_appliance_date', apply_if_not_none(self.appliance_pool.date, "isoformat")) self._set_meta('pool_owner_id', self.appliance_pool.owner.id) self._set_meta('pool_owner_username', self.appliance_pool.owner.username) self._set_meta('pool_preconfigured', self.appliance_pool.preconfigured) self._set_meta('pool_description', self.appliance_pool.description) self._set_meta('pool_not_needed_anymore', self.appliance_pool.not_needed_anymore) self._set_meta('pool_finished', self.appliance_pool.finished) self._set_meta('pool_yum_update', self.appliance_pool.yum_update) self._set_meta('datetime_leased', apply_if_not_none(self.datetime_leased, "isoformat")) self._set_meta('leased_until', apply_if_not_none(self.leased_until, "isoformat")) self._set_meta('status_changed', apply_if_not_none(self.status_changed, "isoformat")) self._set_meta('ready', self.ready) self._set_meta('description', self.description) self._set_meta('lun_disk_connected', self.lun_disk_connected) self._set_meta('swap', self.swap) self._set_meta('ssh_failed', self.ssh_failed) def _set_meta(self, key, value): if self.power_state == self.Power.ORPHANED: return try: self.provider_api.set_meta_value(self.name, 'sprout_{}'.format(key), value) self.logger.info('Set metadata {}: {}'.format(key, repr(value))) except NotImplementedError: pass @property def serialized(self): return dict( id=self.id, ready=self.ready, name=self.name, ip_address=self.ip_address, status=self.status, power_state=self.power_state, status_changed=apply_if_not_none(self.status_changed, "isoformat"), datetime_leased=apply_if_not_none(self.datetime_leased, "isoformat"), leased_until=apply_if_not_none(self.leased_until, "isoformat"), template_name=self.template.original_name, template_id=self.template.id, provider=self.template.provider.id, marked_for_deletion=self.marked_for_deletion, uuid=self.uuid, template_version=self.template.version, template_build_date=self.template.date.isoformat(), template_group=self.template.template_group.id, template_sprout_name=self.template.name, preconfigured=self.preconfigured, lun_disk_connected=self.lun_disk_connected, container=self.template.container, ) @property @contextmanager def kill_lock(self): with critical_section("kill-({})[{}]".format(type(self).__name__, str(self.pk))): yield @property def provider_api(self): return self.template.provider_api @property def provider_name(self): return self.template.provider_name @property def provider(self): return self.template.provider @property def preconfigured(self): return self.template.preconfigured @property def cfme(self): return CFMEAppliance(self.provider_name, self.name, container=self.template.container) @property def ipapp(self): return IPAppliance(self.ip_address, container=self.template.container) def user_can_use(self, user): return self.provider.user_can_use(user) def user_can_see(self, user): return self.provider.user_can_see(user) @property def visible_in_groups(self): return self.provider.user_groups.all() def is_visible_only_in_group(self, group): return len(self.visible_in_groups) == 1 and self.visible_in_groups[0] == group @property def containerized(self): return self.template.container is not None def set_status(self, status): with transaction.atomic(): appliance = Appliance.objects.get(id=self.id) if status != appliance.status: appliance.status = status appliance.status_changed = timezone.now() appliance.save() self.logger.info("Status changed: {}".format(status)) def set_power_state(self, power_state): if power_state != self.power_state: self.logger.info("Changed power state to {}".format(power_state)) self.power_state = power_state self.power_state_changed = timezone.now() if power_state in self.RESET_SWAP_STATES: # Reset some values self.swap = 0 self.ssh_failed = False def __unicode__(self): return "{} {} @ {}".format(type(self).__name__, self.name, self.template.provider.id) @classmethod def unassigned(cls): return cls.objects.filter(appliance_pool=None, ready=True) @classmethod def give_to_pool(cls, pool, custom_limit=None): """Give appliances from shepherd to the pool where the maximum count is specified by pool or you can specify a custom limit """ from appliances.tasks import ( appliance_power_on, mark_appliance_ready, wait_appliance_ready, appliance_yum_update, appliance_reboot) limit = custom_limit if custom_limit is not None else pool.total_count appliances = [] if limit <= 0: # Nothing to do return 0 with transaction.atomic(): for template in pool.possible_templates: for appliance in cls.unassigned().filter( template=template).all()[:limit - len(appliances)]: with appliance.kill_lock: appliance.appliance_pool = pool appliance.save() appliance.set_status("Given to pool {}".format(pool.id)) tasks = [appliance_power_on.si(appliance.id)] if pool.yum_update: tasks.append(appliance_yum_update.si(appliance.id)) tasks.append( appliance_reboot.si(appliance.id, if_needs_restarting=True)) if appliance.preconfigured: tasks.append(wait_appliance_ready.si(appliance.id)) else: tasks.append(mark_appliance_ready.si(appliance.id)) chain(*tasks)() appliances.append(appliance) # We have the break twice, to be sure. For each for loop. if len(appliances) >= limit: break if len(appliances) >= limit: break return len(appliances) @classmethod def kill(cls, appliance_or_id): # Completely delete appliance from provider from appliances.tasks import kill_appliance if isinstance(appliance_or_id, cls): self = Appliance.objects.get(id=appliance_or_id.id) else: self = Appliance.objects.get(id=appliance_or_id) with self.kill_lock: with transaction.atomic(): self = type(self).objects.get(pk=self.pk) self.class_logger(self.pk).info("Killing") if not self.marked_for_deletion: self.marked_for_deletion = True self.leased_until = None self.save() return kill_appliance.delay(self.id) def delete(self, *args, **kwargs): # Intercept delete and lessen the number of appliances in the pool # Then if the appliance is still present in the management system, kill it self.logger.info("Deleting from database") pool = self.appliance_pool result = super(Appliance, self).delete(*args, **kwargs) do_not_touch = kwargs.pop("do_not_touch_ap", False) if pool is not None and not do_not_touch: if pool.current_count == 0: pool.delete() return result def prolong_lease(self, time=60): self.logger.info("Prolonging lease by {} minutes from now.".format(time)) with transaction.atomic(): appliance = Appliance.objects.get(id=self.id) appliance.leased_until = timezone.now() + timedelta(minutes=time) appliance.save() @property def owner(self): if self.appliance_pool is None: return None else: return self.appliance_pool.owner @property def expires_in(self): """Minutes""" if self.leased_until is None: return "never" seconds = (self.leased_until - timezone.now()).total_seconds() if seconds <= 0.0: return "Expired!" else: return nice_seconds(seconds) @property def can_launch(self): return self.power_state in {self.Power.OFF, self.Power.SUSPENDED} @property def can_reboot(self): return self.power_state in {self.Power.ON} @property def can_suspend(self): return self.power_state in {self.Power.ON} @property def can_stop(self): return self.power_state in {self.Power.ON} @property def has_uuid(self): return self.uuid is not None @property def has_uuid_angular(self): return "true" if self.has_uuid else "false" @property def version(self): if self.template.version is None: return "---" else: return self.template.version @property def managed_providers(self): return self.metadata.get("managed_providers", []) @managed_providers.setter def managed_providers(self, value): with self.edit_metadata as metadata: metadata["managed_providers"] = value @property def vnc_link(self): try: return self.provider.vnc_console_link_for(self) except KeyError: # provider does not exist any more return None class AppliancePool(MetadataMixin): total_count = models.IntegerField(help_text="How many appliances should be in this pool.") group = models.ForeignKey( Group, on_delete=models.CASCADE, help_text="Group which is used to provision appliances.") provider = models.ForeignKey( Provider, help_text="If requested, appliances can be on single provider.", null=True, blank=True, on_delete=models.CASCADE) version = models.CharField(max_length=32, null=True, help_text="Appliance version") date = models.DateField(null=True, help_text="Appliance date.") owner = models.ForeignKey( User, on_delete=models.CASCADE, help_text="User who owns the appliance pool") preconfigured = models.BooleanField( default=True, help_text="Whether to provision preconfigured appliances") description = models.TextField(blank=True) not_needed_anymore = models.BooleanField( default=False, help_text="Used for marking the appliance pool as being deleted") finished = models.BooleanField(default=False, help_text="Whether fulfillment has been met.") yum_update = models.BooleanField(default=False, help_text="Whether to update appliances.") is_container = models.BooleanField(default=False, help_text='Whether the pool uses containers.') @classmethod def create(cls, owner, group, version=None, date=None, provider=None, num_appliances=1, time_leased=60, preconfigured=True, yum_update=False, container=False): container_q = ~Q(container=None) if container else Q(container=None) if owner.has_quotas: user_pools_count = cls.objects.filter(owner=owner).count() user_vms_count = Appliance.objects.filter(appliance_pool__owner=owner).count() if owner.quotas.total_pool_quota is not None: if owner.quotas.total_pool_quota <= user_pools_count: raise ValueError( "User has too many pools ({} allowed, {} already existing)".format( owner.quotas.total_pool_quota, user_pools_count)) if owner.quotas.total_vm_quota is not None: if owner.quotas.total_vm_quota <= (user_vms_count + num_appliances): raise ValueError( "Requested {} appliances, limit is {} and currently user has {}".format( num_appliances, owner.quotas.total_vm_quota, user_vms_count)) if owner.quotas.per_pool_quota is not None: if num_appliances > owner.quotas.per_pool_quota: raise ValueError("You are limited to {} VMs per pool, requested {}".format( owner.quotas.per_pool_quota, num_appliances)) user_filter = {'provider__user_groups__in': owner.groups.all()} from appliances.tasks import request_appliance_pool # Retrieve latest possible if not version: versions = Template.get_versions(container_q, template_group=group, ready=True, usable=True, exists=True, preconfigured=preconfigured, provider__working=True, provider__disabled=False, **user_filter) if versions: version = versions[0] if not date: if version is not None: dates = Template.get_dates(container_q, template_group=group, version=version, ready=True, usable=True, exists=True, preconfigured=preconfigured, provider__working=True, provider__disabled=False, **user_filter) else: dates = Template.get_dates(container_q, template_group=group, ready=True, usable=True, exists=True, preconfigured=preconfigured, provider__working=True, provider__disabled=False, **user_filter) if dates: date = dates[0] if isinstance(group, basestring): group = Group.objects.get(id=group) if isinstance(provider, basestring): provider = Provider.objects.get(id=provider, working=True, disabled=False) if not (version or date): raise Exception( "Could not find proper combination of group, date, version and a working provider!") if provider and not provider.user_can_use(owner): raise Exception( 'The user does not have the right to use provider {}'.format(provider.id)) req_params = dict( group=group, version=version, date=date, total_count=num_appliances, owner=owner, provider=provider, preconfigured=preconfigured, yum_update=yum_update, is_container=container) req = cls(**req_params) if not req.possible_templates: raise Exception("No possible templates! (pool params: {})".format(str(req_params))) req.save() cls.class_logger(req.pk).info("Created") request_appliance_pool.delay(req.id, time_leased) return req def delete(self, *args, **kwargs): self.logger.info("Deleting") with transaction.atomic(): for task in DelayedProvisionTask.objects.filter(pool=self): task.delete() return super(AppliancePool, self).delete(*args, **kwargs) @property def container_q(self): return ~Q(container=None) if self.is_container else Q(container=None) @property def appliance_container_q(self): return ~Q(template__container=None) if self.is_container else Q(template__container=None) @property def filter_params(self): filter_params = { "template_group": self.group, "preconfigured": self.preconfigured, 'provider__user_groups__in': self.owner.groups.all(), } if self.version is not None: filter_params["version"] = self.version if self.date is not None: filter_params["date"] = self.date if self.provider is not None: filter_params["provider"] = self.provider return filter_params @property def appliance_filter_params(self): params = self.filter_params result = {} for key, value in params.iteritems(): result["template__{}".format(key)] = value return result @property def possible_templates(self): return Template.objects.filter( self.container_q, ready=True, exists=True, usable=True, **self.filter_params).all().distinct() @property def possible_provisioning_templates(self): return sorted( filter(lambda tpl: tpl.provider.free, self.possible_templates), # Sort by date and load to pick the best match (least loaded provider) key=lambda tpl: (tpl.date, 1.0 - tpl.provider.appliance_load), reverse=True) @property def possible_providers(self): """Which providers contain a template that could be used for provisioning?.""" return set(tpl.provider for tpl in self.possible_templates) @property def appliances(self): return Appliance.objects.filter(appliance_pool=self).order_by("id").all() @property def current_count(self): return len(self.appliances) @property def percent_finished(self): if self.total_count is None: return 0.0 total = 4 * self.total_count if total == 0: return 1.0 finished = 0 for appliance in self.appliances: if appliance.power_state not in {Appliance.Power.UNKNOWN, Appliance.Power.ORPHANED}: finished += 1 if appliance.power_state == Appliance.Power.ON: finished += 1 if appliance.ip_address is not None: finished += 1 if appliance.ready: finished += 1 return float(finished) / float(total) @property def appliance_ips(self): return [ap.ip_address for ap in filter(lambda a: a.ip_address is not None, self.appliances)] @property def fulfilled(self): try: return len(self.appliance_ips) == self.total_count\ and all(a.ready for a in self.appliances) except ObjectDoesNotExist: return False @property def queued_provision_tasks(self): return DelayedProvisionTask.objects.filter(pool=self).order_by("id") def prolong_lease(self, time=60): self.logger.info("Initiated lease prolonging by {} minutes".format(time)) for appliance in self.appliances: appliance.prolong_lease(time=time) def kill(self): with transaction.atomic(): p = type(self).objects.get(pk=self.pk) p.not_needed_anymore = True p.save() save_lives = not self.finished self.logger.info("Killing") if self.appliances: for appliance in self.appliances: kill = False with transaction.atomic(): with appliance.kill_lock: if ( save_lives and appliance.ready and appliance.leased_until is None and appliance.marked_for_deletion is False and not appliance.managed_providers and appliance.power_state not in appliance.BAD_POWER_STATES): appliance.appliance_pool = None appliance.datetime_leased = None appliance.save() self.total_count -= 1 if self.total_count < 0: self.total_count = 0 # Protection against stupidity self.save() appliance.set_status( "The appliance was taken out of dying pool {}".format(self.id)) else: kill = True if kill: # Because Appliance.kill uses kill_lock too Appliance.kill(appliance) if self.current_count == 0: # Pool is empty, no point of keeping it alive. # This is needed when deleting a pool that has appliances that can be salvaged. # They are not deleted. the .delete() method on appliances takes care that when the # last appliance in pool is deleted, it deletes the pool. But since we don't delete # in the case of salvaging them, we do have to do it manually here. self.delete() else: # No appliances, so just delete it self.delete() @property def possible_other_owners(self): """Returns a list of User objects that can own this pool instead of original owner""" if self.provider is not None: providers = {self.provider} else: providers = {appliance.template.provider for appliance in self.appliances} possible_groups = set() for provider in providers: for group in provider.user_groups.all(): possible_groups.add(group) common_groups = set() for group in possible_groups: if all(group in provider.user_groups.all() for provider in providers): common_groups.add(group) return User.objects\ .filter(groups__in=common_groups)\ .exclude(pk=self.owner.pk)\ .order_by("last_name", "first_name", 'username') @property def num_delayed_provisioning_tasks(self): return len(self.queued_provision_tasks) @property def num_provisioning_tasks_before(self): tasks = self.queued_provision_tasks if len(tasks) == 0: return 0 latest_id = tasks[0].id return len(DelayedProvisionTask.objects.filter(id__lt=latest_id)) @property def num_possible_provisioning_slots(self): providers = set([]) for template in self.possible_provisioning_templates: providers.add(template.provider) slots = 0 for provider in providers: slots += provider.remaining_provisioning_slots return slots @property def num_possible_appliance_slots(self): providers = set([]) for template in self.possible_templates: providers.add(template.provider) slots = 0 for provider in providers: slots += provider.remaining_appliance_slots return slots @property def num_shepherd_appliances(self): return len( Appliance.objects.filter( self.appliance_container_q, appliance_pool=None, **self.appliance_filter_params ).distinct()) def __repr__(self): return "<AppliancePool id: {}, group: {}, total_count: {}>".format( self.id, self.group.id, self.total_count) def __unicode__(self): return "AppliancePool id: {}, group: {}, total_count: {}".format( self.id, self.group.id, self.total_count) class MismatchVersionMailer(models.Model): provider = models.ForeignKey(Provider, on_delete=models.CASCADE) template_name = models.CharField(max_length=64) supposed_version = models.CharField(max_length=32) actual_version = models.CharField(max_length=32) sent = models.BooleanField(default=False) class UserApplianceQuota(models.Model): user = models.OneToOneField(User, related_name="quotas", on_delete=models.CASCADE) per_pool_quota = models.IntegerField(null=True, blank=True) total_pool_quota = models.IntegerField(null=True, blank=True) total_vm_quota = models.IntegerField(null=True, blank=True) class BugQuery(models.Model): EMAIL_PLACEHOLDER = re.compile(r'\{\{EMAIL\}\}') CACHE_TIMEOUT = 180 name = models.CharField(max_length=64) url = models.TextField() owner = models.ForeignKey(User, on_delete=models.CASCADE, null=True, blank=True) @property def is_global(self): return self.owner is None @property def is_parametrized(self): return self.EMAIL_PLACEHOLDER.search(self.url) is not None @cached_property def bugzilla(self): # Returns the original bugzilla object return Bugzilla.from_config().bugzilla def query_for_user(self, user): if self.is_parametrized: if not user.email: return None url = self.EMAIL_PLACEHOLDER.sub(user.email, self.url) else: url = self.url return self.bugzilla.url_to_query(url) def list_bugs(self, user): cache_id = 'bq-{}-{}'.format(self.id, user.id) cached = redis.get(cache_id) if cached is not None: return pickle.loads(base64.b64decode(cached)) query = self.query_for_user(user) if query is None: result = [] else: def process_bug(bug): return { 'id': bug.id, 'weburl': bug.weburl, 'summary': bug.summary, 'severity': bug.severity, 'status': bug.status, 'component': bug.component, 'version': bug.version, 'fixed_in': bug.fixed_in, 'whiteboard': bug.whiteboard, 'flags': ['{}{}'.format(flag['name'], flag['status']) for flag in bug.flags], } result = [process_bug(bug) for bug in self.bugzilla.query(query)] redis.set(cache_id, base64.b64encode(pickle.dumps(result)), ex=self.CACHE_TIMEOUT) return result @classmethod def visible_for_user(cls, user): return [ bq for bq in cls.objects.filter(Q(owner=None) | Q(owner=user)).order_by('owner', 'id') if not (bq.is_parametrized and not user.email)]
gpl-2.0
nuxeh/keystone
keystone/auth/controllers.py
3
27269
# Copyright 2013 OpenStack Foundation # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import sys from keystoneclient.common import cms from oslo_config import cfg from oslo_log import log from oslo_serialization import jsonutils from oslo_utils import importutils from oslo_utils import timeutils import six from keystone.common import controller from keystone.common import dependency from keystone.common import wsgi from keystone import config from keystone.contrib import federation from keystone import exception from keystone.i18n import _, _LI, _LW from keystone.resource import controllers as resource_controllers LOG = log.getLogger(__name__) CONF = cfg.CONF # registry of authentication methods AUTH_METHODS = {} AUTH_PLUGINS_LOADED = False def load_auth_methods(): global AUTH_PLUGINS_LOADED if AUTH_PLUGINS_LOADED: # Only try and load methods a single time. return # config.setup_authentication should be idempotent, call it to ensure we # have setup all the appropriate configuration options we may need. config.setup_authentication() for plugin in CONF.auth.methods: if '.' in plugin: # NOTE(morganfainberg): if '.' is in the plugin name, it should be # imported rather than used as a plugin identifier. plugin_class = plugin driver = importutils.import_object(plugin) if not hasattr(driver, 'method'): raise ValueError(_('Cannot load an auth-plugin by class-name ' 'without a "method" attribute defined: %s'), plugin_class) LOG.info(_LI('Loading auth-plugins by class-name is deprecated.')) plugin_name = driver.method else: plugin_name = plugin plugin_class = CONF.auth.get(plugin) driver = importutils.import_object(plugin_class) if plugin_name in AUTH_METHODS: raise ValueError(_('Auth plugin %(plugin)s is requesting ' 'previously registered method %(method)s') % {'plugin': plugin_class, 'method': driver.method}) AUTH_METHODS[plugin_name] = driver AUTH_PLUGINS_LOADED = True def get_auth_method(method_name): global AUTH_METHODS if method_name not in AUTH_METHODS: raise exception.AuthMethodNotSupported() return AUTH_METHODS[method_name] class AuthContext(dict): """Retrofitting auth_context to reconcile identity attributes. The identity attributes must not have conflicting values among the auth plug-ins. The only exception is `expires_at`, which is set to its earliest value. """ # identity attributes need to be reconciled among the auth plugins IDENTITY_ATTRIBUTES = frozenset(['user_id', 'project_id', 'access_token_id', 'domain_id', 'expires_at']) def __setitem__(self, key, val): if key in self.IDENTITY_ATTRIBUTES and key in self: existing_val = self[key] if key == 'expires_at': # special treatment for 'expires_at', we are going to take # the earliest expiration instead. if existing_val != val: LOG.info(_LI('"expires_at" has conflicting values ' '%(existing)s and %(new)s. Will use the ' 'earliest value.'), {'existing': existing_val, 'new': val}) if existing_val is None or val is None: val = existing_val or val else: val = min(existing_val, val) elif existing_val != val: msg = _('Unable to reconcile identity attribute %(attribute)s ' 'as it has conflicting values %(new)s and %(old)s') % ( {'attribute': key, 'new': val, 'old': existing_val}) raise exception.Unauthorized(msg) return super(AuthContext, self).__setitem__(key, val) # TODO(blk-u): this class doesn't use identity_api directly, but makes it # available for consumers. Consumers should probably not be getting # identity_api from this since it's available in global registry, then # identity_api should be removed from this list. @dependency.requires('identity_api', 'resource_api', 'trust_api') class AuthInfo(object): """Encapsulation of "auth" request.""" @staticmethod def create(context, auth=None): auth_info = AuthInfo(context, auth=auth) auth_info._validate_and_normalize_auth_data() return auth_info def __init__(self, context, auth=None): self.context = context self.auth = auth self._scope_data = (None, None, None, None) # self._scope_data is (domain_id, project_id, trust_ref, unscoped) # project scope: (None, project_id, None, None) # domain scope: (domain_id, None, None, None) # trust scope: (None, None, trust_ref, None) # unscoped: (None, None, None, 'unscoped') def _assert_project_is_enabled(self, project_ref): # ensure the project is enabled try: self.resource_api.assert_project_enabled( project_id=project_ref['id'], project=project_ref) except AssertionError as e: LOG.warning(six.text_type(e)) six.reraise(exception.Unauthorized, exception.Unauthorized(e), sys.exc_info()[2]) def _assert_domain_is_enabled(self, domain_ref): try: self.resource_api.assert_domain_enabled( domain_id=domain_ref['id'], domain=domain_ref) except AssertionError as e: LOG.warning(six.text_type(e)) six.reraise(exception.Unauthorized, exception.Unauthorized(e), sys.exc_info()[2]) def _lookup_domain(self, domain_info): domain_id = domain_info.get('id') domain_name = domain_info.get('name') domain_ref = None if not domain_id and not domain_name: raise exception.ValidationError(attribute='id or name', target='domain') try: if domain_name: domain_ref = self.resource_api.get_domain_by_name( domain_name) else: domain_ref = self.resource_api.get_domain(domain_id) except exception.DomainNotFound as e: LOG.exception(six.text_type(e)) raise exception.Unauthorized(e) self._assert_domain_is_enabled(domain_ref) return domain_ref def _lookup_project(self, project_info): project_id = project_info.get('id') project_name = project_info.get('name') project_ref = None if not project_id and not project_name: raise exception.ValidationError(attribute='id or name', target='project') try: if project_name: if 'domain' not in project_info: raise exception.ValidationError(attribute='domain', target='project') domain_ref = self._lookup_domain(project_info['domain']) project_ref = self.resource_api.get_project_by_name( project_name, domain_ref['id']) else: project_ref = self.resource_api.get_project(project_id) # NOTE(morganfainberg): The _lookup_domain method will raise # exception.Unauthorized if the domain isn't found or is # disabled. self._lookup_domain({'id': project_ref['domain_id']}) except exception.ProjectNotFound as e: raise exception.Unauthorized(e) self._assert_project_is_enabled(project_ref) return project_ref def _lookup_trust(self, trust_info): trust_id = trust_info.get('id') if not trust_id: raise exception.ValidationError(attribute='trust_id', target='trust') trust = self.trust_api.get_trust(trust_id) if not trust: raise exception.TrustNotFound(trust_id=trust_id) return trust def _validate_and_normalize_scope_data(self): """Validate and normalize scope data.""" if 'scope' not in self.auth: return if sum(['project' in self.auth['scope'], 'domain' in self.auth['scope'], 'unscoped' in self.auth['scope'], 'OS-TRUST:trust' in self.auth['scope']]) != 1: raise exception.ValidationError( attribute='project, domain, OS-TRUST:trust or unscoped', target='scope') if 'unscoped' in self.auth['scope']: self._scope_data = (None, None, None, 'unscoped') return if 'project' in self.auth['scope']: project_ref = self._lookup_project(self.auth['scope']['project']) self._scope_data = (None, project_ref['id'], None, None) elif 'domain' in self.auth['scope']: domain_ref = self._lookup_domain(self.auth['scope']['domain']) self._scope_data = (domain_ref['id'], None, None, None) elif 'OS-TRUST:trust' in self.auth['scope']: if not CONF.trust.enabled: raise exception.Forbidden('Trusts are disabled.') trust_ref = self._lookup_trust( self.auth['scope']['OS-TRUST:trust']) # TODO(ayoung): when trusts support domains, fill in domain data if trust_ref.get('project_id') is not None: project_ref = self._lookup_project( {'id': trust_ref['project_id']}) self._scope_data = (None, project_ref['id'], trust_ref, None) else: self._scope_data = (None, None, trust_ref, None) def _validate_auth_methods(self): if 'identity' not in self.auth: raise exception.ValidationError(attribute='identity', target='auth') # make sure auth methods are provided if 'methods' not in self.auth['identity']: raise exception.ValidationError(attribute='methods', target='identity') # make sure all the method data/payload are provided for method_name in self.get_method_names(): if method_name not in self.auth['identity']: raise exception.ValidationError(attribute=method_name, target='identity') # make sure auth method is supported for method_name in self.get_method_names(): if method_name not in AUTH_METHODS: raise exception.AuthMethodNotSupported() def _validate_and_normalize_auth_data(self): """Make sure "auth" is valid.""" # make sure "auth" exist if not self.auth: raise exception.ValidationError(attribute='auth', target='request body') self._validate_auth_methods() self._validate_and_normalize_scope_data() def get_method_names(self): """Returns the identity method names. :returns: list of auth method names """ # Sanitizes methods received in request's body # Filters out duplicates, while keeping elements' order. method_names = [] for method in self.auth['identity']['methods']: if method not in method_names: method_names.append(method) return method_names def get_method_data(self, method): """Get the auth method payload. :returns: auth method payload """ if method not in self.auth['identity']['methods']: raise exception.ValidationError(attribute=method, target='identity') return self.auth['identity'][method] def get_scope(self): """Get scope information. Verify and return the scoping information. :returns: (domain_id, project_id, trust_ref, unscoped). If scope to a project, (None, project_id, None, None) will be returned. If scoped to a domain, (domain_id, None, None, None) will be returned. If scoped to a trust, (None, project_id, trust_ref, None), Will be returned, where the project_id comes from the trust definition. If unscoped, (None, None, None, 'unscoped') will be returned. """ return self._scope_data def set_scope(self, domain_id=None, project_id=None, trust=None, unscoped=None): """Set scope information.""" if domain_id and project_id: msg = _('Scoping to both domain and project is not allowed') raise ValueError(msg) if domain_id and trust: msg = _('Scoping to both domain and trust is not allowed') raise ValueError(msg) if project_id and trust: msg = _('Scoping to both project and trust is not allowed') raise ValueError(msg) self._scope_data = (domain_id, project_id, trust, unscoped) @dependency.requires('assignment_api', 'catalog_api', 'identity_api', 'resource_api', 'token_provider_api', 'trust_api') class Auth(controller.V3Controller): # Note(atiwari): From V3 auth controller code we are # calling protection() wrappers, so we need to setup # the member_name and collection_name attributes of # auth controller code. # In the absence of these attributes, default 'entity' # string will be used to represent the target which is # generic. Policy can be defined using 'entity' but it # would not reflect the exact entity that is in context. # We are defining collection_name = 'tokens' and # member_name = 'token' to facilitate policy decisions. collection_name = 'tokens' member_name = 'token' def __init__(self, *args, **kw): super(Auth, self).__init__(*args, **kw) config.setup_authentication() def authenticate_for_token(self, context, auth=None): """Authenticate user and issue a token.""" include_catalog = 'nocatalog' not in context['query_string'] try: auth_info = AuthInfo.create(context, auth=auth) auth_context = AuthContext(extras={}, method_names=[], bind={}) self.authenticate(context, auth_info, auth_context) if auth_context.get('access_token_id'): auth_info.set_scope(None, auth_context['project_id'], None) self._check_and_set_default_scoping(auth_info, auth_context) (domain_id, project_id, trust, unscoped) = auth_info.get_scope() method_names = auth_info.get_method_names() method_names += auth_context.get('method_names', []) # make sure the list is unique method_names = list(set(method_names)) expires_at = auth_context.get('expires_at') # NOTE(morganfainberg): define this here so it is clear what the # argument is during the issue_v3_token provider call. metadata_ref = None token_audit_id = auth_context.get('audit_id') (token_id, token_data) = self.token_provider_api.issue_v3_token( auth_context['user_id'], method_names, expires_at, project_id, domain_id, auth_context, trust, metadata_ref, include_catalog, parent_audit_id=token_audit_id) # NOTE(wanghong): We consume a trust use only when we are using # trusts and have successfully issued a token. if trust: self.trust_api.consume_use(trust['id']) return render_token_data_response(token_id, token_data, created=True) except exception.TrustNotFound as e: raise exception.Unauthorized(e) def _check_and_set_default_scoping(self, auth_info, auth_context): (domain_id, project_id, trust, unscoped) = auth_info.get_scope() if trust: project_id = trust['project_id'] if domain_id or project_id or trust: # scope is specified return # Skip scoping when unscoped federated token is being issued if federation.IDENTITY_PROVIDER in auth_context: return # Do not scope if request is for explicitly unscoped token if unscoped is not None: return # fill in default_project_id if it is available try: user_ref = self.identity_api.get_user(auth_context['user_id']) except exception.UserNotFound as e: LOG.exception(six.text_type(e)) raise exception.Unauthorized(e) default_project_id = user_ref.get('default_project_id') if not default_project_id: # User has no default project. He shall get an unscoped token. return # make sure user's default project is legit before scoping to it try: default_project_ref = self.resource_api.get_project( default_project_id) default_project_domain_ref = self.resource_api.get_domain( default_project_ref['domain_id']) if (default_project_ref.get('enabled', True) and default_project_domain_ref.get('enabled', True)): if self.assignment_api.get_roles_for_user_and_project( user_ref['id'], default_project_id): auth_info.set_scope(project_id=default_project_id) else: msg = _LW("User %(user_id)s doesn't have access to" " default project %(project_id)s. The token" " will be unscoped rather than scoped to the" " project.") LOG.warning(msg, {'user_id': user_ref['id'], 'project_id': default_project_id}) else: msg = _LW("User %(user_id)s's default project %(project_id)s" " is disabled. The token will be unscoped rather" " than scoped to the project.") LOG.warning(msg, {'user_id': user_ref['id'], 'project_id': default_project_id}) except (exception.ProjectNotFound, exception.DomainNotFound): # default project or default project domain doesn't exist, # will issue unscoped token instead msg = _LW("User %(user_id)s's default project %(project_id)s not" " found. The token will be unscoped rather than" " scoped to the project.") LOG.warning(msg, {'user_id': user_ref['id'], 'project_id': default_project_id}) def authenticate(self, context, auth_info, auth_context): """Authenticate user.""" # The 'external' method allows any 'REMOTE_USER' based authentication # In some cases the server can set REMOTE_USER as '' instead of # dropping it, so this must be filtered out if context['environment'].get('REMOTE_USER'): try: external = get_auth_method('external') external.authenticate(context, auth_info, auth_context) except exception.AuthMethodNotSupported: # This will happen there is no 'external' plugin registered # and the container is performing authentication. # The 'kerberos' and 'saml' methods will be used this way. # In those cases, it is correct to not register an # 'external' plugin; if there is both an 'external' and a # 'kerberos' plugin, it would run the check on identity twice. LOG.debug("No 'external' plugin is registered.") except exception.Unauthorized: # If external fails then continue and attempt to determine # user identity using remaining auth methods LOG.debug("Authorization failed for 'external' auth method.") # need to aggregate the results in case two or more methods # are specified auth_response = {'methods': []} for method_name in auth_info.get_method_names(): method = get_auth_method(method_name) resp = method.authenticate(context, auth_info.get_method_data(method_name), auth_context) if resp: auth_response['methods'].append(method_name) auth_response[method_name] = resp if auth_response["methods"]: # authentication continuation required raise exception.AdditionalAuthRequired(auth_response) if 'user_id' not in auth_context: msg = _('User not found') raise exception.Unauthorized(msg) @controller.protected() def check_token(self, context): token_id = context.get('subject_token_id') token_data = self.token_provider_api.validate_v3_token( token_id) # NOTE(morganfainberg): The code in # ``keystone.common.wsgi.render_response`` will remove the content # body. return render_token_data_response(token_id, token_data) @controller.protected() def revoke_token(self, context): token_id = context.get('subject_token_id') return self.token_provider_api.revoke_token(token_id) @controller.protected() def validate_token(self, context): token_id = context.get('subject_token_id') include_catalog = 'nocatalog' not in context['query_string'] token_data = self.token_provider_api.validate_v3_token( token_id) if not include_catalog and 'catalog' in token_data['token']: del token_data['token']['catalog'] return render_token_data_response(token_id, token_data) @controller.protected() def revocation_list(self, context, auth=None): if not CONF.token.revoke_by_id: raise exception.Gone() tokens = self.token_provider_api.list_revoked_tokens() for t in tokens: expires = t['expires'] if not (expires and isinstance(expires, six.text_type)): t['expires'] = timeutils.isotime(expires) data = {'revoked': tokens} json_data = jsonutils.dumps(data) signed_text = cms.cms_sign_text(json_data, CONF.signing.certfile, CONF.signing.keyfile) return {'signed': signed_text} def _combine_lists_uniquely(self, a, b): # it's most likely that only one of these will be filled so avoid # the combination if possible. if a and b: return {x['id']: x for x in a + b}.values() else: return a or b @controller.protected() def get_auth_projects(self, context): auth_context = self.get_auth_context(context) user_id = auth_context.get('user_id') user_refs = [] if user_id: try: user_refs = self.assignment_api.list_projects_for_user(user_id) except exception.UserNotFound: # federated users have an id but they don't link to anything pass group_ids = auth_context.get('group_ids') grp_refs = [] if group_ids: grp_refs = self.assignment_api.list_projects_for_groups(group_ids) refs = self._combine_lists_uniquely(user_refs, grp_refs) return resource_controllers.ProjectV3.wrap_collection(context, refs) @controller.protected() def get_auth_domains(self, context): auth_context = self.get_auth_context(context) user_id = auth_context.get('user_id') user_refs = [] if user_id: try: user_refs = self.assignment_api.list_domains_for_user(user_id) except exception.UserNotFound: # federated users have an id but they don't link to anything pass group_ids = auth_context.get('group_ids') grp_refs = [] if group_ids: grp_refs = self.assignment_api.list_domains_for_groups(group_ids) refs = self._combine_lists_uniquely(user_refs, grp_refs) return resource_controllers.DomainV3.wrap_collection(context, refs) @controller.protected() def get_auth_catalog(self, context): auth_context = self.get_auth_context(context) user_id = auth_context.get('user_id') project_id = auth_context.get('project_id') if not project_id: raise exception.Forbidden( _('A project-scoped token is required to produce a service ' 'catalog.')) # The V3Controller base methods mostly assume that you're returning # either a collection or a single element from a collection, neither of # which apply to the catalog. Because this is a special case, this # re-implements a tiny bit of work done by the base controller (such as # self-referential link building) to avoid overriding or refactoring # several private methods. return { 'catalog': self.catalog_api.get_v3_catalog(user_id, project_id), 'links': {'self': self.base_url(context, path='auth/catalog')} } # FIXME(gyee): not sure if it belongs here or keystone.common. Park it here # for now. def render_token_data_response(token_id, token_data, created=False): """Render token data HTTP response. Stash token ID into the X-Subject-Token header. """ headers = [('X-Subject-Token', token_id)] if created: status = (201, 'Created') else: status = (200, 'OK') return wsgi.render_response(body=token_data, status=status, headers=headers)
apache-2.0
MphasisWyde/eWamSublimeAdaptor
POC/v0_3_POC_with_project_aborted/third-party/simplejson/tests/test_scanstring.py
139
7311
import sys from unittest import TestCase import simplejson as json import simplejson.decoder from simplejson.compat import b, PY3 class TestScanString(TestCase): # The bytes type is intentionally not used in most of these tests # under Python 3 because the decoder immediately coerces to str before # calling scanstring. In Python 2 we are testing the code paths # for both unicode and str. # # The reason this is done is because Python 3 would require # entirely different code paths for parsing bytes and str. # def test_py_scanstring(self): self._test_scanstring(simplejson.decoder.py_scanstring) def test_c_scanstring(self): if not simplejson.decoder.c_scanstring: return self._test_scanstring(simplejson.decoder.c_scanstring) def _test_scanstring(self, scanstring): if sys.maxunicode == 65535: self.assertEqual( scanstring(u'"z\U0001d120x"', 1, None, True), (u'z\U0001d120x', 6)) else: self.assertEqual( scanstring(u'"z\U0001d120x"', 1, None, True), (u'z\U0001d120x', 5)) self.assertEqual( scanstring('"\\u007b"', 1, None, True), (u'{', 8)) self.assertEqual( scanstring('"A JSON payload should be an object or array, not a string."', 1, None, True), (u'A JSON payload should be an object or array, not a string.', 60)) self.assertEqual( scanstring('["Unclosed array"', 2, None, True), (u'Unclosed array', 17)) self.assertEqual( scanstring('["extra comma",]', 2, None, True), (u'extra comma', 14)) self.assertEqual( scanstring('["double extra comma",,]', 2, None, True), (u'double extra comma', 21)) self.assertEqual( scanstring('["Comma after the close"],', 2, None, True), (u'Comma after the close', 24)) self.assertEqual( scanstring('["Extra close"]]', 2, None, True), (u'Extra close', 14)) self.assertEqual( scanstring('{"Extra comma": true,}', 2, None, True), (u'Extra comma', 14)) self.assertEqual( scanstring('{"Extra value after close": true} "misplaced quoted value"', 2, None, True), (u'Extra value after close', 26)) self.assertEqual( scanstring('{"Illegal expression": 1 + 2}', 2, None, True), (u'Illegal expression', 21)) self.assertEqual( scanstring('{"Illegal invocation": alert()}', 2, None, True), (u'Illegal invocation', 21)) self.assertEqual( scanstring('{"Numbers cannot have leading zeroes": 013}', 2, None, True), (u'Numbers cannot have leading zeroes', 37)) self.assertEqual( scanstring('{"Numbers cannot be hex": 0x14}', 2, None, True), (u'Numbers cannot be hex', 24)) self.assertEqual( scanstring('[[[[[[[[[[[[[[[[[[[["Too deep"]]]]]]]]]]]]]]]]]]]]', 21, None, True), (u'Too deep', 30)) self.assertEqual( scanstring('{"Missing colon" null}', 2, None, True), (u'Missing colon', 16)) self.assertEqual( scanstring('{"Double colon":: null}', 2, None, True), (u'Double colon', 15)) self.assertEqual( scanstring('{"Comma instead of colon", null}', 2, None, True), (u'Comma instead of colon', 25)) self.assertEqual( scanstring('["Colon instead of comma": false]', 2, None, True), (u'Colon instead of comma', 25)) self.assertEqual( scanstring('["Bad value", truth]', 2, None, True), (u'Bad value', 12)) for c in map(chr, range(0x00, 0x1f)): self.assertEqual( scanstring(c + '"', 0, None, False), (c, 2)) self.assertRaises( ValueError, scanstring, c + '"', 0, None, True) self.assertRaises(ValueError, scanstring, '', 0, None, True) self.assertRaises(ValueError, scanstring, 'a', 0, None, True) self.assertRaises(ValueError, scanstring, '\\', 0, None, True) self.assertRaises(ValueError, scanstring, '\\u', 0, None, True) self.assertRaises(ValueError, scanstring, '\\u0', 0, None, True) self.assertRaises(ValueError, scanstring, '\\u01', 0, None, True) self.assertRaises(ValueError, scanstring, '\\u012', 0, None, True) self.assertRaises(ValueError, scanstring, '\\u0123', 0, None, True) if sys.maxunicode > 65535: self.assertRaises(ValueError, scanstring, '\\ud834\\u"', 0, None, True) self.assertRaises(ValueError, scanstring, '\\ud834\\x0123"', 0, None, True) def test_issue3623(self): self.assertRaises(ValueError, json.decoder.scanstring, "xxx", 1, "xxx") self.assertRaises(UnicodeDecodeError, json.encoder.encode_basestring_ascii, b("xx\xff")) def test_overflow(self): # Python 2.5 does not have maxsize, Python 3 does not have maxint maxsize = getattr(sys, 'maxsize', getattr(sys, 'maxint', None)) assert maxsize is not None self.assertRaises(OverflowError, json.decoder.scanstring, "xxx", maxsize + 1) def test_surrogates(self): scanstring = json.decoder.scanstring def assertScan(given, expect, test_utf8=True): givens = [given] if not PY3 and test_utf8: givens.append(given.encode('utf8')) for given in givens: (res, count) = scanstring(given, 1, None, True) self.assertEqual(len(given), count) self.assertEqual(res, expect) assertScan( u'"z\\ud834\\u0079x"', u'z\ud834yx') assertScan( u'"z\\ud834\\udd20x"', u'z\U0001d120x') assertScan( u'"z\\ud834\\ud834\\udd20x"', u'z\ud834\U0001d120x') assertScan( u'"z\\ud834x"', u'z\ud834x') assertScan( u'"z\\udd20x"', u'z\udd20x') assertScan( u'"z\ud834x"', u'z\ud834x') # It may look strange to join strings together, but Python is drunk. # https://gist.github.com/etrepum/5538443 assertScan( u'"z\\ud834\udd20x12345"', u''.join([u'z\ud834', u'\udd20x12345'])) assertScan( u'"z\ud834\\udd20x"', u''.join([u'z\ud834', u'\udd20x'])) # these have different behavior given UTF8 input, because the surrogate # pair may be joined (in maxunicode > 65535 builds) assertScan( u''.join([u'"z\ud834', u'\udd20x"']), u''.join([u'z\ud834', u'\udd20x']), test_utf8=False) self.assertRaises(ValueError, scanstring, u'"z\\ud83x"', 1, None, True) self.assertRaises(ValueError, scanstring, u'"z\\ud834\\udd2x"', 1, None, True)
mit
Jusedawg/SickRage
sickbeard/indexers/indexer_config.py
9
1349
# coding=utf-8 from tvdb_api.tvdb_api import Tvdb from sickbeard import helpers initConfig = { 'valid_languages': [ "da", "fi", "nl", "de", "it", "es", "fr", "pl", "hu", "el", "tr", "ru", "he", "ja", "pt", "zh", "cs", "sl", "hr", "ko", "en", "sv", "no" ], 'langabbv_to_id': { 'el': 20, 'en': 7, 'zh': 27, 'it': 15, 'cs': 28, 'es': 16, 'ru': 22, 'nl': 13, 'pt': 26, 'no': 9, 'tr': 21, 'pl': 18, 'fr': 17, 'hr': 31, 'de': 14, 'da': 10, 'fi': 11, 'hu': 19, 'ja': 25, 'he': 24, 'ko': 32, 'sv': 8, 'sl': 30 } } INDEXER_TVDB = 1 INDEXER_TVRAGE = 2 # Must keep indexerConfig = { INDEXER_TVDB: { 'id': INDEXER_TVDB, 'name': 'theTVDB', 'module': Tvdb, 'api_params': { 'apikey': 'F9C450E78D99172E', 'language': 'en', 'useZip': True, }, 'session': helpers.make_session(), 'trakt_id': 'tvdb_id', 'xem_origin': 'tvdb', 'icon': 'thetvdb16.png', 'scene_loc': 'http://sickrage.github.io/scene_exceptions/scene_exceptions.json', 'show_url': 'http://thetvdb.com/?tab=series&id=', 'base_url': 'http://thetvdb.com/api/%(apikey)s/series/' } } indexerConfig[INDEXER_TVDB]['base_url'] %= indexerConfig[INDEXER_TVDB]['api_params'] # insert API key into base url
gpl-3.0
appapantula/scikit-learn
sklearn/utils/tests/test_shortest_path.py
303
2841
from collections import defaultdict import numpy as np from numpy.testing import assert_array_almost_equal from sklearn.utils.graph import (graph_shortest_path, single_source_shortest_path_length) def floyd_warshall_slow(graph, directed=False): N = graph.shape[0] #set nonzero entries to infinity graph[np.where(graph == 0)] = np.inf #set diagonal to zero graph.flat[::N + 1] = 0 if not directed: graph = np.minimum(graph, graph.T) for k in range(N): for i in range(N): for j in range(N): graph[i, j] = min(graph[i, j], graph[i, k] + graph[k, j]) graph[np.where(np.isinf(graph))] = 0 return graph def generate_graph(N=20): #sparse grid of distances rng = np.random.RandomState(0) dist_matrix = rng.random_sample((N, N)) #make symmetric: distances are not direction-dependent dist_matrix = dist_matrix + dist_matrix.T #make graph sparse i = (rng.randint(N, size=N * N // 2), rng.randint(N, size=N * N // 2)) dist_matrix[i] = 0 #set diagonal to zero dist_matrix.flat[::N + 1] = 0 return dist_matrix def test_floyd_warshall(): dist_matrix = generate_graph(20) for directed in (True, False): graph_FW = graph_shortest_path(dist_matrix, directed, 'FW') graph_py = floyd_warshall_slow(dist_matrix.copy(), directed) assert_array_almost_equal(graph_FW, graph_py) def test_dijkstra(): dist_matrix = generate_graph(20) for directed in (True, False): graph_D = graph_shortest_path(dist_matrix, directed, 'D') graph_py = floyd_warshall_slow(dist_matrix.copy(), directed) assert_array_almost_equal(graph_D, graph_py) def test_shortest_path(): dist_matrix = generate_graph(20) # We compare path length and not costs (-> set distances to 0 or 1) dist_matrix[dist_matrix != 0] = 1 for directed in (True, False): if not directed: dist_matrix = np.minimum(dist_matrix, dist_matrix.T) graph_py = floyd_warshall_slow(dist_matrix.copy(), directed) for i in range(dist_matrix.shape[0]): # Non-reachable nodes have distance 0 in graph_py dist_dict = defaultdict(int) dist_dict.update(single_source_shortest_path_length(dist_matrix, i)) for j in range(graph_py[i].shape[0]): assert_array_almost_equal(dist_dict[j], graph_py[i, j]) def test_dijkstra_bug_fix(): X = np.array([[0., 0., 4.], [1., 0., 2.], [0., 5., 0.]]) dist_FW = graph_shortest_path(X, directed=False, method='FW') dist_D = graph_shortest_path(X, directed=False, method='D') assert_array_almost_equal(dist_D, dist_FW)
bsd-3-clause
appleseedhq/cortex
contrib/IECoreAlembic/test/IECoreAlembic/AlembicPerformanceTest.py
1
5388
########################################################################## # # Copyright (c) 2018, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # # * Neither the name of Image Engine Design nor the names of any # other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import os import shutil import unittest import imath import tempfile import IECore import IECoreScene import IECoreAlembic class Timer( object ) : def __init__( self, name) : self.name = name self.timer = IECore.Timer( False, IECore.Timer.WallClock ) def __enter__( self ) : self.timer.start() return self def __exit__( self, type, value, traceback ) : t = self.timer.stop() print "{0} , time: {1}s".format(self.name, t) class AlembicPerformanceTest( unittest.TestCase ) : def setUp(self): self.filesCreated = [] def tearDown(self): for fileName in self.filesCreated: if os.path.exists( fileName ): os.unlink( fileName ) def writeCacheFile( self, suffix = ".abc", withAttributes = False ) : with tempfile.NamedTemporaryFile(suffix = suffix) as tf: fileName = tf.name with Timer("write file with attributes: '{0}', filename: '{1}'".format(withAttributes, fileName)) as t: root = IECoreScene.SceneInterface.create( fileName, IECore.IndexedIO.OpenMode.Write ) # pow( 10, 4 ) = 100,000 locations maxDepth = 5 numChildren = 10 def createChildren( location, depth, withAttributes = False ) : if depth >= maxDepth : # scene interface forbids writing attributes on the root if withAttributes and depth != 0: location.writeAttribute( "testFloat", IECore.FloatData( 1.0 ), 0.0 ) location.writeAttribute( "testInt", IECore.IntData( 2 ), 0.0 ) location.writeAttribute( "testString", IECore.StringData( "don" ), 0.0 ) location.writeAttribute( "testV3f", IECore.V3fData( imath.V3f(1,2,3), IECore.GeometricData.Interpretation.Vector ), 0.0 ) return for i in range( numChildren ) : c = location.createChild( str( i ) ) createChildren( c, depth + 1, withAttributes = withAttributes ) createChildren( root, 0, withAttributes = withAttributes ) self.filesCreated.append( fileName ) return fileName @unittest.skipUnless( os.environ.get("IE_PERFORMANCE_TEST", False), "'IE_PERFORMANCE_TEST' env var not set" ) def testCompareReadWithAttributes( self ) : extensions = ['.abc', '.scc'] for extension in extensions: print "== {0} ==".format( extension ) cacheFileName = self.writeCacheFile( suffix = extension ) root = IECoreScene.SceneInterface.create( cacheFileName, IECore.IndexedIO.OpenMode.Read ) def readAll( location ) : # read all attributes attributeNames = location.attributeNames() for attributeName in attributeNames: location.readAttribute( attributeName, 0.0 ) numAttributes = len( attributeNames ) # recurse into child locations for childName in location.childNames() : numAttributes += readAll( location.child( childName ) ) return numAttributes times = [] for testRun in range( 10 ): t = IECore.Timer( True , IECore.Timer.WallClock ) results = IECoreScene.SceneAlgo.parallelReadAll(root, 0, 0, 24.0, IECoreScene.SceneAlgo.Transforms | IECoreScene.SceneAlgo.Attributes ) times.append( t.stop() ) self.assertEqual( results["attributes"], 0 ) print times cacheFileName = self.writeCacheFile( suffix = extension, withAttributes = True ) root = IECoreScene.SceneInterface.create( cacheFileName, IECore.IndexedIO.OpenMode.Read ) times = [] for testRun in range( 10 ): t = IECore.Timer( True , IECore.Timer.WallClock ) results = IECoreScene.SceneAlgo.parallelReadAll(root, 0, 0, 24.0, IECoreScene.SceneAlgo.Transforms | IECoreScene.SceneAlgo.Attributes ) times.append( t.stop() ) self.assertEqual( results["attributes"] , 100000 * 4 ) print times
bsd-3-clause
eduNEXT/edunext-platform
lms/djangoapps/course_home_api/dates/v1/tests/test_views.py
3
2087
""" Tests for Dates Tab API in the Course Home API """ import ddt from django.urls import reverse from course_modes.models import CourseMode from lms.djangoapps.course_home_api.tests.utils import BaseCourseHomeTests from student.models import CourseEnrollment @ddt.ddt class DatesTabTestViews(BaseCourseHomeTests): """ Tests for the Dates Tab API """ @classmethod def setUpClass(cls): BaseCourseHomeTests.setUpClass() cls.url = reverse('course-home-dates-tab', args=[cls.course.id]) @ddt.data(CourseMode.AUDIT, CourseMode.VERIFIED) def test_get_authenticated_enrolled_user(self, enrollment_mode): CourseEnrollment.enroll(self.user, self.course.id, enrollment_mode) response = self.client.get(self.url) self.assertEqual(response.status_code, 200) # Pulling out the date blocks to check learner has access. The Verification Deadline Date # should not be accessible to the audit learner, but accessible to the verified learner. date_blocks = response.data.get('course_date_blocks') if enrollment_mode == CourseMode.AUDIT: self.assertFalse(response.data.get('learner_is_verified')) self.assertTrue(any(block.get('learner_has_access') is False for block in date_blocks)) else: self.assertTrue(response.data.get('learner_is_verified')) self.assertTrue(all(block.get('learner_has_access') for block in date_blocks)) def test_get_authenticated_user_not_enrolled(self): response = self.client.get(self.url) self.assertEqual(response.status_code, 200) self.assertFalse(response.data.get('learner_is_verified')) def test_get_unauthenticated_user(self): self.client.logout() response = self.client.get(self.url) self.assertEqual(response.status_code, 403) def test_get_unknown_course(self): url = reverse('course-home-dates-tab', args=['course-v1:unknown+course+2T2020']) response = self.client.get(url) self.assertEqual(response.status_code, 404)
agpl-3.0
binary13/Python_Koans
python2/koans/about_exceptions.py
83
1732
#!/usr/bin/env python # -*- coding: utf-8 -*- from runner.koan import * class AboutExceptions(Koan): class MySpecialError(RuntimeError): pass def test_exceptions_inherit_from_exception(self): mro = self.MySpecialError.__mro__ self.assertEqual(__, mro[1].__name__) self.assertEqual(__, mro[2].__name__) self.assertEqual(__, mro[3].__name__) self.assertEqual(__, mro[4].__name__) def test_try_clause(self): result = None try: self.fail("Oops") except StandardError as ex: result = 'exception handled' self.assertEqual(__, result) self.assertEqual(____, isinstance(ex, StandardError)) self.assertEqual(____, isinstance(ex, RuntimeError)) self.assertTrue(issubclass(RuntimeError, StandardError), \ "RuntimeError is a subclass of StandardError") self.assertEqual(__, ex[0]) def test_raising_a_specific_error(self): result = None try: raise self.MySpecialError, "My Message" except self.MySpecialError as ex: result = 'exception handled' self.assertEqual(__, result) self.assertEqual(__, ex[0]) def test_else_clause(self): result = None try: pass except RuntimeError: result = 'it broke' pass else: result = 'no damage done' self.assertEqual(__, result) def test_finally_clause(self): result = None try: self.fail("Oops") except: # no code here pass finally: result = 'always run' self.assertEqual(__, result)
mit
mavenlin/tensorflow
tensorflow/contrib/timeseries/python/timeseries/model_utils.py
25
18383
# Copyright 2017 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Helper functions for training and constructing time series Models.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import re import numpy from tensorflow.contrib.framework.python.ops import variables from tensorflow.contrib.layers.python.layers import optimizers from tensorflow.contrib.timeseries.python.timeseries import feature_keys from tensorflow.python.estimator import estimator_lib from tensorflow.python.estimator.export import export_lib from tensorflow.python.framework import dtypes from tensorflow.python.framework import ops from tensorflow.python.ops import array_ops from tensorflow.python.ops import control_flow_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import math_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import state_ops from tensorflow.python.ops import variable_scope from tensorflow.python.util import nest def _check_feature_shapes_compatible_with( features, compatible_with_name, compatible_with_value, ignore=None): """Checks all features are compatible with the given time-like feature.""" if ignore is None: ignore = set() for name, value in features.items(): if name in ignore: continue feature_shape = value.get_shape() if feature_shape.ndims is None: continue if feature_shape.ndims < 2: raise ValueError( ("Features must have shape (batch dimension, window size, ...) " "(got rank {} for feature '{}')").format( feature_shape.ndims, name)) if not feature_shape[:2].is_compatible_with( compatible_with_value.get_shape()): raise ValueError( ("Features must have shape (batch dimension, window size, ...) " "where batch dimension and window size match the " "'{times_feature}' feature (got shape {feature_shape} for " "feature '{feature_name}' but shape {times_shape} for feature " "'{times_feature}')").format( times_feature=compatible_with_name, feature_shape=feature_shape, feature_name=name, times_shape=compatible_with_value.get_shape())) def _check_predict_features(features): """Raises errors if features are not suitable for prediction.""" if feature_keys.PredictionFeatures.TIMES not in features: raise ValueError("Expected a '{}' feature for prediction.".format( feature_keys.PredictionFeatures.TIMES)) if feature_keys.PredictionFeatures.STATE_TUPLE not in features: raise ValueError("Expected a '{}' feature for prediction.".format( feature_keys.PredictionFeatures.STATE_TUPLE)) times_feature = features[feature_keys.PredictionFeatures.TIMES] if not times_feature.get_shape().is_compatible_with([None, None]): raise ValueError( ("Expected shape (batch dimension, window size) for feature '{}' " "(got shape {})").format(feature_keys.PredictionFeatures.TIMES, times_feature.get_shape())) _check_feature_shapes_compatible_with( features=features, compatible_with_name=feature_keys.PredictionFeatures.TIMES, compatible_with_value=times_feature, ignore=set([ feature_keys.PredictionFeatures.STATE_TUPLE # Model-dependent shapes ])) def _check_train_eval_features(features, model): """Raise errors if features are not suitable for training/evaluation.""" if feature_keys.TrainEvalFeatures.TIMES not in features: raise ValueError("Expected a '{}' feature for training/evaluation.".format( feature_keys.TrainEvalFeatures.TIMES)) if feature_keys.TrainEvalFeatures.VALUES not in features: raise ValueError("Expected a '{}' feature for training/evaluation.".format( feature_keys.TrainEvalFeatures.VALUES)) times_feature = features[feature_keys.TrainEvalFeatures.TIMES] if not times_feature.get_shape().is_compatible_with([None, None]): raise ValueError( ("Expected shape (batch dimension, window size) for feature '{}' " "(got shape {})").format(feature_keys.TrainEvalFeatures.TIMES, times_feature.get_shape())) values_feature = features[feature_keys.TrainEvalFeatures.VALUES] if not values_feature.get_shape().is_compatible_with( [None, None, model.num_features]): raise ValueError( ("Expected shape (batch dimension, window size, {num_features}) " "for feature '{feature_name}', since the model was configured " "with num_features={num_features} (got shape {got_shape})").format( num_features=model.num_features, feature_name=feature_keys.TrainEvalFeatures.VALUES, got_shape=times_feature.get_shape())) _check_feature_shapes_compatible_with( features=features, compatible_with_name=feature_keys.TrainEvalFeatures.TIMES, compatible_with_value=times_feature, ignore=set([ feature_keys.State.STATE_TUPLE # Model-dependent shapes ])) def _identity_metric_single(name, input_tensor): """A metric which takes on its last updated value. This keeps evaluation metrics in sync with one another, since update ops are run separately from their result Tensors. Simply returning (input_tensor, no_op) as a metric with a value but no update means that a metric will come from a different batch of data than metrics which cache values in a Variable (e.g. the default loss metric). Args: name: A name for the metric. input_tensor: Any Tensor. Returns: A tuple of (value, update_op). """ metric_variable = variable_scope.variable( name="{}_identity_metric".format(name), initial_value=array_ops.zeros([], dtype=input_tensor.dtype), collections=[ops.GraphKeys.LOCAL_VARIABLES], validate_shape=False) update_op = state_ops.assign(metric_variable, input_tensor, validate_shape=False) # This shape will be correct once the first update runs (but may be # incomplete, so is not helpful for initializing the variable). metric_variable.set_shape(input_tensor.get_shape()) return (metric_variable.value(), update_op) def _identity_metric_nested(name, input_tensors): """Create identity metrics for a nested tuple of Tensors.""" update_ops = [] value_tensors = [] for tensor_number, tensor in enumerate(nest.flatten(input_tensors)): value_tensor, update_op = _identity_metric_single( name="{}_{}".format(name, tensor_number), input_tensor=tensor) update_ops.append(update_op) value_tensors.append(value_tensor) return (nest.pack_sequence_as(input_tensors, value_tensors), control_flow_ops.group(*update_ops)) def state_to_dictionary(state_tuple): """Flatten model state into a dictionary with string keys.""" flattened = {} for state_number, state_value in enumerate(nest.flatten(state_tuple)): prefixed_state_name = "{}_{:02d}".format(feature_keys.State.STATE_PREFIX, state_number) flattened[prefixed_state_name] = state_value return flattened def make_model_fn( model, state_manager, optimizer, input_statistics_generator=None): """Returns a model function suitable for use with a tf.estimator. Args: model: The object (inheriting from Model) to create a function for. state_manager: A state manager to wrap the model with (or PassthroughStateManager if no state needs to be managed). optimizer: An instance of `tf.train.Optimizer` to use for training. input_statistics_generator: An InputStatisticsFromMiniBatch object from math_utils.py, used for collecting statistics about input data during training. Returns: The model function, suitable for passing to a tf.estimator.Estimator. """ def _convert_feature_to_tensor(name, value): """Casts features to the correct dtype based on their name.""" if name in [ feature_keys.TrainEvalFeatures.TIMES, feature_keys.PredictionFeatures.TIMES ]: return math_ops.cast(value, dtypes.int64) if name == feature_keys.TrainEvalFeatures.VALUES: return math_ops.cast(value, model.dtype) if name == feature_keys.PredictionFeatures.STATE_TUPLE: return value # Correct dtypes are model-dependent return ops.convert_to_tensor(value) def _gather_state(features): """Returns `features` with state packed, indicates if packing was done.""" prefixed_state_re = re.compile(r"^" + feature_keys.State.STATE_PREFIX + r"_(\d+)$") numbered_state = [] for key, tensor in features.items(): search_result = prefixed_state_re.search(key) if search_result: numbered_state.append((int(search_result.group(1)), key, tensor)) if not numbered_state: return features, False features = features.copy() for _, key, _ in numbered_state: del features[key] numbered_state.sort(key=lambda number, *_: number) features[feature_keys.State.STATE_TUPLE] = nest.pack_sequence_as( structure=model.get_start_state(), flat_sequence=[tensor for _, _, tensor in numbered_state]) return features, True def _train(features): """Add training ops to the graph.""" with variable_scope.variable_scope("model"): model_outputs = state_manager.define_loss(model, features, estimator_lib.ModeKeys.TRAIN) train_op = optimizers.optimize_loss( model_outputs.loss, global_step=variables.get_global_step(), optimizer=optimizer, # Learning rate is set in the Optimizer object learning_rate=None) return estimator_lib.EstimatorSpec( loss=model_outputs.loss, mode=estimator_lib.ModeKeys.TRAIN, train_op=train_op) def _evaluate(features): """Add ops for evaluation (aka filtering) to the graph.""" with variable_scope.variable_scope("model"): model_outputs = state_manager.define_loss(model, features, estimator_lib.ModeKeys.EVAL) metrics = {} # Just output in-sample predictions for the last chunk seen for prediction_key, prediction_value in model_outputs.predictions.items(): metrics[prediction_key] = _identity_metric_single(prediction_key, prediction_value) metrics[feature_keys.FilteringResults.TIMES] = _identity_metric_single( feature_keys.FilteringResults.TIMES, model_outputs.prediction_times) metrics[feature_keys.FilteringResults.STATE_TUPLE] = ( _identity_metric_nested(feature_keys.FilteringResults.STATE_TUPLE, model_outputs.end_state)) return estimator_lib.EstimatorSpec( loss=model_outputs.loss, mode=estimator_lib.ModeKeys.EVAL, eval_metric_ops=metrics, predictions={}) def _predict(features): """Add ops for prediction to the graph.""" with variable_scope.variable_scope("model"): prediction = model.predict(features=features) prediction[feature_keys.PredictionResults.TIMES] = features[ feature_keys.PredictionFeatures.TIMES] return estimator_lib.EstimatorSpec( predictions=prediction, mode=estimator_lib.ModeKeys.PREDICT) def _serving(features): with variable_scope.variable_scope("model"): prediction_outputs = model.predict(features=features) with variable_scope.variable_scope("model", reuse=True): filtering_outputs = state_manager.define_loss(model, features, estimator_lib.ModeKeys.EVAL) return estimator_lib.EstimatorSpec( mode=estimator_lib.ModeKeys.PREDICT, export_outputs={ feature_keys.SavedModelLabels.PREDICT: export_lib.PredictOutput(prediction_outputs), feature_keys.SavedModelLabels.FILTER: export_lib.PredictOutput( state_to_dictionary(filtering_outputs.end_state)) }, # Likely unused, but it is necessary to return `predictions` to satisfy # the Estimator's error checking. predictions={}) def _model_fn(features, labels, mode): """Given a time series in `features`, define a loss for `mode`. Args: features: A dictionary, the output of a chunker (typically with keys feature_keys.TrainEvalFeatures.TIMES and feature_keys.TrainEvalFeatures.VALUES). labels: Not used; included for compatibility with tf.learn. mode: The tf.estimator.ModeKeys mode to use (TRAIN, EVAL, INFER). Returns: A tuple of predictions, a loss Tensor, and a train op. Raises: ValueError: If the model makes predictions which do not have static shape information. """ if labels: raise ValueError("The model received a `labels` dictionary, which is not" " supported. Pass '{}' and '{}' as features.".format( feature_keys.TrainEvalFeatures.TIMES, feature_keys.TrainEvalFeatures.VALUES)) del labels features = {name: _convert_feature_to_tensor(name=name, value=value) for name, value in features.items()} if input_statistics_generator is not None: input_statistics = input_statistics_generator.initialize_graph( features, update_statistics=(mode == estimator_lib.ModeKeys.TRAIN)) else: input_statistics = None model.initialize_graph(input_statistics=input_statistics) # _gather_state requires the model to have its graph initialized (so it has # access to the structure of the model's state) features, passed_flat_state = _gather_state(features) if (mode == estimator_lib.ModeKeys.TRAIN or mode == estimator_lib.ModeKeys.EVAL): _check_train_eval_features(features, model) elif mode == estimator_lib.ModeKeys.PREDICT: _check_predict_features(features) else: raise ValueError("Unknown mode '{}' passed to model_fn.".format(mode)) state_manager.initialize_graph( model=model, input_statistics=input_statistics) if mode == estimator_lib.ModeKeys.TRAIN: return _train(features) elif mode == estimator_lib.ModeKeys.EVAL: return _evaluate(features) elif mode == estimator_lib.ModeKeys.PREDICT and not passed_flat_state: return _predict(features) elif mode == estimator_lib.ModeKeys.PREDICT and passed_flat_state: # The mode is PREDICT, but we're actually in export_savedmodel for # serving. We want to return two graphs: one for filtering (state + data # -> state) and one for predicting (state -> prediction). return _serving(features) return _model_fn # TODO(agarwal): Remove and replace with functionality from tf.slim def fully_connected(inp, inp_size, layer_size, name, activation=nn_ops.relu, dtype=dtypes.float32): """Helper method to create a fully connected hidden layer.""" wt = variable_scope.get_variable( name="{}_weight".format(name), shape=[inp_size, layer_size], dtype=dtype) bias = variable_scope.get_variable( name="{}_bias".format(name), shape=[layer_size], initializer=init_ops.zeros_initializer()) output = nn_ops.xw_plus_b(inp, wt, bias) if activation is not None: assert callable(activation) output = activation(output) return output def parameter_switch(parameter_overrides): """Create a function which chooses between overridden and model parameters. Args: parameter_overrides: A dictionary with explicit overrides of model parameters, mapping from Tensors to their overridden values. Returns: A function which takes a Tensor and returns the override if it is specified, or otherwise the evaluated value (given current Variable values). """ def get_passed_or_trained_value(parameter): return ops.convert_to_tensor( parameter_overrides.get(parameter, parameter)).eval() return get_passed_or_trained_value def canonicalize_times_or_steps_from_output(times, steps, previous_model_output): """Canonicalizes either relative or absolute times, with error checking.""" if steps is not None and times is not None: raise ValueError("Only one of `steps` and `times` may be specified.") if steps is None and times is None: raise ValueError("One of `steps` and `times` must be specified.") if times is not None: times = numpy.array(times) if len(times.shape) != 2: times = times[None, ...] if (previous_model_output[feature_keys.FilteringResults.TIMES].shape[0] != times.shape[0]): raise ValueError( ("`times` must have a batch dimension matching" " the previous model output (got a batch dimension of {} for `times`" " and {} for the previous model output).").format( times.shape[0], previous_model_output[ feature_keys.FilteringResults.TIMES].shape[0])) if not (previous_model_output[feature_keys.FilteringResults.TIMES][:, -1] < times[:, 0]).all(): raise ValueError("Prediction times must be after the corresponding " "previous model output.") if steps is not None: predict_times = ( previous_model_output[feature_keys.FilteringResults.TIMES][:, -1:] + 1 + numpy.arange(steps)[None, ...]) else: predict_times = times return predict_times
apache-2.0
SummerLW/Perf-Insight-Report
third_party/graphy/graphy/backends/google_chart_api/base_encoder_test.py
33
22575
#!/usr/bin/python2.4 # # Copyright 2008 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Test for the base encoder. Also serves as a base class for the chart-type-specific tests.""" from graphy import common from graphy import graphy_test from graphy import formatters from graphy.backends.google_chart_api import encoders from graphy.backends.google_chart_api import util class TestEncoder(encoders.BaseChartEncoder): """Simple implementation of BaseChartEncoder for testing common behavior.""" def _GetType(self, chart): return {'chart_type': 'TEST_TYPE'} def _GetDependentAxis(self, chart): return chart.left class TestChart(common.BaseChart): """Simple implementation of BaseChart for testing common behavior.""" def __init__(self, points=None): super(TestChart, self).__init__() if points is not None: self.AddData(points) def AddData(self, points, color=None, label=None): style = common._BasicStyle(color) series = common.DataSeries(points, style=style, label=label) self.data.append(series) return series class BaseChartTest(graphy_test.GraphyTest): """Base class for all chart-specific tests""" def ExpectAxes(self, labels, positions): """Helper to test that the chart axis spec matches the expected values.""" self.assertEqual(self.Param('chxl'), labels) self.assertEqual(self.Param('chxp'), positions) def GetChart(self, *args, **kwargs): """Get a chart object. Other classes can override to change the type of chart being tested. """ chart = TestChart(*args, **kwargs) chart.display = TestEncoder(chart) return chart def AddToChart(self, chart, points, color=None, label=None): """Add data to the chart. Chart is assumed to be of the same type as returned by self.GetChart(). """ return chart.AddData(points, color=color, label=label) def setUp(self): self.chart = self.GetChart() def testImgAndUrlUseSameUrl(self): """Check that Img() and Url() return the same URL.""" self.assertIn(self.chart.display.Url(500, 100, use_html_entities=True), self.chart.display.Img(500, 100)) def testImgUsesHtmlEntitiesInUrl(self): img_tag = self.chart.display.Img(500, 100) self.assertNotIn('&ch', img_tag) self.assertIn('&amp;ch', img_tag) def testParamsAreStrings(self): """Test that params are all converted to strings.""" self.chart.display.extra_params['test'] = 32 self.assertEqual(self.Param('test'), '32') def testExtraParamsOverideDefaults(self): self.assertNotEqual(self.Param('cht'), 'test') # Sanity check. self.chart.display.extra_params['cht'] = 'test' self.assertEqual(self.Param('cht'), 'test') def testExtraParamsCanUseLongNames(self): self.chart.display.extra_params['color'] = 'XYZ' self.assertEqual(self.Param('chco'), 'XYZ') def testExtraParamsCanUseNewNames(self): """Make sure future Google Chart API features can be accessed immediately through extra_params. (Double-checks that the long-to-short name conversion doesn't mess up the ability to use new features). """ self.chart.display.extra_params['fancy_new_feature'] = 'shiny' self.assertEqual(self.Param('fancy_new_feature'), 'shiny') def testEmptyParamsDropped(self): """Check that empty parameters don't end up in the URL.""" self.assertEqual(self.Param('chxt'), '') self.assertNotIn('chxt', self.chart.display.Url(0, 0)) def testSizes(self): self.assertIn('89x102', self.chart.display.Url(89, 102)) img = self.chart.display.Img(89, 102) self.assertIn('chs=89x102', img) self.assertIn('width="89"', img) self.assertIn('height="102"', img) def testChartType(self): self.assertEqual(self.Param('cht'), 'TEST_TYPE') def testChartSizeConvertedToInt(self): url = self.chart.display.Url(100.1, 200.2) self.assertIn('100x200', url) def testUrlBase(self): def assertStartsWith(actual_text, expected_start): message = "[%s] didn't start with [%s]" % (actual_text, expected_start) self.assert_(actual_text.startswith(expected_start), message) assertStartsWith(self.chart.display.Url(0, 0), 'http://chart.apis.google.com/chart') url_base = 'http://example.com/charts' self.chart.display.url_base = url_base assertStartsWith(self.chart.display.Url(0, 0), url_base) def testEnhancedEncoder(self): self.chart.display.enhanced_encoding = True self.assertEqual(self.Param('chd'), 'e:') def testUrlsEscaped(self): self.AddToChart(self.chart, [1, 2, 3]) url = self.chart.display.Url(500, 100) self.assertNotIn('chd=s:', url) self.assertIn('chd=s%3A', url) def testUrls_DefaultIsWithoutHtmlEntities(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [1, 2, 3], label='Ciao&"Mario>Luigi"') url_default = self.chart.display.Url(500, 100) url_forced = self.chart.display.Url(500, 100, use_html_entities=False) self.assertEqual(url_forced, url_default) def testUrls_HtmlEntities(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [1, 2, 3], label='Ciao&"Mario>Luigi"') url = self.chart.display.Url(500, 100, use_html_entities=True) self.assertNotIn('&ch', url) self.assertIn('&amp;ch', url) self.assertIn('%7CCiao%26%22Mario%3ELuigi%22', url) def testUrls_NoEscapeWithHtmlEntities(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [1, 2, 3], label='Ciao&"Mario>Luigi"') self.chart.display.escape_url = False url = self.chart.display.Url(500, 100, use_html_entities=True) self.assertNotIn('&ch', url) self.assertIn('&amp;ch', url) self.assertIn('Ciao&amp;&quot;Mario&gt;Luigi&quot;', url) def testUrls_NoHtmlEntities(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [1, 2, 3], label='Ciao&"Mario>Luigi"') url = self.chart.display.Url(500, 100, use_html_entities=False) self.assertIn('&ch', url) self.assertNotIn('&amp;ch', url) self.assertIn('%7CCiao%26%22Mario%3ELuigi%22', url) def testCanRemoveDefaultFormatters(self): self.assertEqual(3, len(self.chart.formatters)) # I don't know why you'd want to remove the default formatters like this. # It is just a proof that we can manipulate the default formatters # through their aliases. self.chart.formatters.remove(self.chart.auto_color) self.chart.formatters.remove(self.chart.auto_legend) self.chart.formatters.remove(self.chart.auto_scale) self.assertEqual(0, len(self.chart.formatters)) def testFormattersWorkOnCopy(self): """Make sure formatters can't modify the user's chart.""" self.AddToChart(self.chart, [1]) # By making sure our point is at the upper boundry, we make sure that both # line, pie, & bar charts encode it as a '9' in the simple encoding. self.chart.left.max = 1 self.chart.left.min = 0 # Sanity checks before adding a formatter. self.assertEqual(self.Param('chd'), 's:9') self.assertEqual(len(self.chart.data), 1) def MaliciousFormatter(chart): chart.data.pop() # Modify a mutable chart attribute self.chart.AddFormatter(MaliciousFormatter) self.assertEqual(self.Param('chd'), 's:', "Formatter wasn't used.") self.assertEqual(len(self.chart.data), 1, "Formatter was able to modify original chart.") self.chart.formatters.remove(MaliciousFormatter) self.assertEqual(self.Param('chd'), 's:9', "Chart changed even after removing the formatter") class XYChartTest(BaseChartTest): """Base class for charts that display lines or points in 2d. Pretty much anything but the pie chart. """ def testImgAndUrlUseSameUrl(self): """Check that Img() and Url() return the same URL.""" super(XYChartTest, self).testImgAndUrlUseSameUrl() self.AddToChart(self.chart, range(0, 100)) self.assertIn(self.chart.display.Url(500, 100, use_html_entities=True), self.chart.display.Img(500, 100)) self.chart = self.GetChart([-1, 0, 1]) self.assertIn(self.chart.display.Url(500, 100, use_html_entities=True), self.chart.display.Img(500, 100)) # TODO: Once the deprecated AddSeries is removed, revisit # whether we need this test. def testAddSeries(self): self.chart.auto_scale.buffer = 0 # Buffer causes trouble for testing. self.assertEqual(self.Param('chd'), 's:') self.AddToChart(self.chart, (1, 2, 3)) self.assertEqual(self.Param('chd'), 's:Af9') self.AddToChart(self.chart, (4, 5, 6)) self.assertEqual(self.Param('chd'), 's:AMY,lx9') # TODO: Once the deprecated AddSeries is removed, revisit # whether we need this test. def testAddSeriesReturnsValue(self): points = (1, 2, 3) series = self.AddToChart(self.chart, points, '#000000') self.assertTrue(series is not None) self.assertEqual(series.data, points) self.assertEqual(series.style.color, '#000000') def testFlatSeries(self): """Make sure we handle scaling of a flat data series correctly (there are div by zero issues). """ self.AddToChart(self.chart, [5, 5, 5]) self.assertEqual(self.Param('chd'), 's:AAA') self.chart.left.min = 0 self.chart.left.max = 5 self.assertEqual(self.Param('chd'), 's:999') self.chart.left.min = 5 self.chart.left.max = 15 self.assertEqual(self.Param('chd'), 's:AAA') def testEmptyPointsStillCreatesSeries(self): """If we pass an empty list for points, we expect to get an empty data series, not nothing. This way we can add data points later.""" chart = self.GetChart() self.assertEqual(0, len(chart.data)) data = [] chart = self.GetChart(data) self.assertEqual(1, len(chart.data)) self.assertEqual(0, len(chart.data[0].data)) # This is the use case we are trying to serve: adding points later. data.append(0) self.assertEqual(1, len(chart.data[0].data)) def testEmptySeriesDroppedFromParams(self): """By the time we make parameters, we don't want empty series to be included because it will mess up the indexes of other things like colors and makers. They should be dropped instead.""" self.chart.auto_scale.buffer = 0 # Check just an empty series. self.AddToChart(self.chart, [], color='eeeeee') self.assertEqual(self.Param('chd'), 's:') # Now check when there are some real series in there too. self.AddToChart(self.chart, [1], color='111111') self.AddToChart(self.chart, [], color='FFFFFF') self.AddToChart(self.chart, [2], color='222222') self.assertEqual(self.Param('chd'), 's:A,9') self.assertEqual(self.Param('chco'), '111111,222222') def testDataSeriesCorrectlyConverted(self): # To avoid problems caused by floating-point errors, the input in this test # is carefully chosen to avoid 0.5 boundries (1.5, 2.5, 3.5, ...). chart = self.GetChart() chart.auto_scale.buffer = 0 # The buffer makes testing difficult. self.assertEqual(self.Param('chd', chart), 's:') chart = self.GetChart(range(0, 10)) chart.auto_scale.buffer = 0 self.assertEqual(self.Param('chd', chart), 's:AHOUbipv29') chart = self.GetChart(range(-10, 0)) chart.auto_scale.buffer = 0 self.assertEqual(self.Param('chd', chart), 's:AHOUbipv29') chart = self.GetChart((-1.1, 0.0, 1.1, 2.2)) chart.auto_scale.buffer = 0 self.assertEqual(self.Param('chd', chart), 's:AUp9') def testSeriesColors(self): self.AddToChart(self.chart, [1, 2, 3], '000000') self.AddToChart(self.chart, [4, 5, 6], 'FFFFFF') self.assertEqual(self.Param('chco'), '000000,FFFFFF') def testSeriesCaption_NoCaptions(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [4, 5, 6]) self.assertRaises(KeyError, self.Param, 'chdl') def testSeriesCaption_SomeCaptions(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [4, 5, 6], label='Label') self.AddToChart(self.chart, [7, 8, 9]) self.assertEqual(self.Param('chdl'), '|Label|') def testThatZeroIsPreservedInCaptions(self): """Test that a 0 caption becomes '0' and not ''. (This makes sure that the logic to rewrite a label of None to '' doesn't also accidentally rewrite 0 to ''). """ self.AddToChart(self.chart, [], label=0) self.AddToChart(self.chart, [], label=1) self.assertEqual(self.Param('chdl'), '0|1') def testSeriesCaption_AllCaptions(self): self.AddToChart(self.chart, [1, 2, 3], label='Its') self.AddToChart(self.chart, [4, 5, 6], label='Me') self.AddToChart(self.chart, [7, 8, 9], label='Mario') self.assertEqual(self.Param('chdl'), 'Its|Me|Mario') def testDefaultColorsApplied(self): self.AddToChart(self.chart, [1, 2, 3]) self.AddToChart(self.chart, [4, 5, 6]) self.assertEqual(self.Param('chco'), '0000ff,ff0000') def testShowingAxes(self): self.assertEqual(self.Param('chxt'), '') self.chart.left.min = 3 self.chart.left.max = 5 self.assertEqual(self.Param('chxt'), '') self.chart.left.labels = ['a'] self.assertEqual(self.Param('chxt'), 'y') self.chart.right.labels = ['a'] self.assertEqual(self.Param('chxt'), 'y,r') self.chart.left.labels = [] # Set back to the original state. self.assertEqual(self.Param('chxt'), 'r') def testAxisRanges(self): self.chart.left.labels = ['a'] self.chart.bottom.labels = ['a'] self.assertEqual(self.Param('chxr'), '') self.chart.left.min = -5 self.chart.left.max = 10 self.assertEqual(self.Param('chxr'), '0,-5,10') self.chart.bottom.min = 0.5 self.chart.bottom.max = 0.75 self.assertEqual(self.Param('chxr'), '0,-5,10|1,0.5,0.75') def testAxisLabels(self): self.ExpectAxes('', '') self.chart.left.labels = [10, 20, 30] self.ExpectAxes('0:|10|20|30', '') self.chart.left.label_positions = [0, 50, 100] self.ExpectAxes('0:|10|20|30', '0,0,50,100') self.chart.right.labels = ['cow', 'horse', 'monkey'] self.chart.right.label_positions = [3.7, 10, -22.9] self.ExpectAxes('0:|10|20|30|1:|cow|horse|monkey', '0,0,50,100|1,3.7,10,-22.9') def testGridBottomAxis(self): self.chart.bottom.min = 0 self.chart.bottom.max = 20 self.chart.bottom.grid_spacing = 10 self.assertEqual(self.Param('chg'), '50,0,1,0') self.chart.bottom.grid_spacing = 2 self.assertEqual(self.Param('chg'), '10,0,1,0') def testGridFloatingPoint(self): """Test that you can get decimal grid values in chg.""" self.chart.bottom.min = 0 self.chart.bottom.max = 8 self.chart.bottom.grid_spacing = 1 self.assertEqual(self.Param('chg'), '12.5,0,1,0') self.chart.bottom.max = 3 self.assertEqual(self.Param('chg'), '33.3,0,1,0') def testGridLeftAxis(self): self.chart.auto_scale.buffer = 0 self.AddToChart(self.chart, (0, 20)) self.chart.left.grid_spacing = 5 self.assertEqual(self.Param('chg'), '0,25,1,0') def testLabelGridBottomAxis(self): self.AddToChart(self.chart, [0, 20, 40]) self.chart.bottom.label_gridlines = True self.chart.bottom.labels = ['Apple', 'Banana', 'Coconut'] self.chart.bottom.label_positions = [1.5, 5, 8.5] self.chart.display._width = 320 self.chart.display._height = 240 self.assertEqual(self.Param('chxtc'), '0,-320') def testLabelGridLeftAxis(self): self.AddToChart(self.chart, [0, 20, 40]) self.chart.left.label_gridlines = True self.chart.left.labels = ['Few', 'Some', 'Lots'] self.chart.left.label_positions = [5, 20, 35] self.chart.display._width = 320 self.chart.display._height = 240 self.assertEqual(self.Param('chxtc'), '0,-320') def testLabelGridBothAxes(self): self.AddToChart(self.chart, [0, 20, 40]) self.chart.left.label_gridlines = True self.chart.left.labels = ['Few', 'Some', 'Lots'] self.chart.left.label_positions = [5, 20, 35] self.chart.bottom.label_gridlines = True self.chart.bottom.labels = ['Apple', 'Banana', 'Coconut'] self.chart.bottom.label_positions = [1.5, 5, 8.5] self.chart.display._width = 320 self.chart.display._height = 240 self.assertEqual(self.Param('chxtc'), '0,-320|1,-320') def testDefaultDataScalingNotPersistant(self): """The auto-scaling shouldn't permanantly set the scale.""" self.chart.auto_scale.buffer = 0 # Buffer just makes the math tricky here. # This data should scale to the simple encoding's min/middle/max values # (A, f, 9). self.AddToChart(self.chart, [1, 2, 3]) self.assertEqual(self.Param('chd'), 's:Af9') # Different data that maintains the same relative spacing *should* scale # to the same min/middle/max. self.chart.data[0].data = [10, 20, 30] self.assertEqual(self.Param('chd'), 's:Af9') def FakeScale(self, data, old_min, old_max, new_min, new_max): self.min = old_min self.max = old_max return data def testDefaultDataScaling(self): """If you don't set min/max, it should use the data's min/max.""" orig_scale = util.ScaleData util.ScaleData = self.FakeScale try: self.AddToChart(self.chart, [2, 3, 5, 7, 11]) self.chart.auto_scale.buffer = 0 # This causes scaling to happen & calls FakeScale. self.chart.display.Url(0, 0) self.assertEqual(2, self.min) self.assertEqual(11, self.max) finally: util.ScaleData = orig_scale def testDefaultDataScalingAvoidsCropping(self): """The default scaling should give a little buffer to avoid cropping.""" orig_scale = util.ScaleData util.ScaleData = self.FakeScale try: self.AddToChart(self.chart, [1, 6]) # This causes scaling to happen & calls FakeScale. self.chart.display.Url(0, 0) buffer = 5 * self.chart.auto_scale.buffer self.assertEqual(1 - buffer, self.min) self.assertEqual(6 + buffer, self.max) finally: util.ScaleData = orig_scale def testExplicitDataScaling(self): """If you set min/max, data should be scaled to this.""" orig_scale = util.ScaleData util.ScaleData = self.FakeScale try: self.AddToChart(self.chart, [2, 3, 5, 7, 11]) self.chart.left.min = -7 self.chart.left.max = 49 # This causes scaling to happen & calls FakeScale. self.chart.display.Url(0, 0) self.assertEqual(-7, self.min) self.assertEqual(49, self.max) finally: util.ScaleData = orig_scale def testImplicitMinValue(self): """min values should be filled in if they are not set explicitly.""" orig_scale = util.ScaleData util.ScaleData = self.FakeScale try: self.AddToChart(self.chart, [0, 10]) self.chart.auto_scale.buffer = 0 self.chart.display.Url(0, 0) # This causes a call to FakeScale. self.assertEqual(0, self.min) self.chart.left.min = -5 self.chart.display.Url(0, 0) # This causes a call to FakeScale. self.assertEqual(-5, self.min) finally: util.ScaleData = orig_scale def testImplicitMaxValue(self): """max values should be filled in if they are not set explicitly.""" orig_scale = util.ScaleData util.ScaleData = self.FakeScale try: self.AddToChart(self.chart, [0, 10]) self.chart.auto_scale.buffer = 0 self.chart.display.Url(0, 0) # This causes a call to FakeScale. self.assertEqual(10, self.max) self.chart.left.max = 15 self.chart.display.Url(0, 0) # This causes a call to FakeScale. self.assertEqual(15, self.max) finally: util.ScaleData = orig_scale def testNoneCanAppearInData(self): """None should be a valid value in a data series. (It means "no data at this point") """ # Buffer makes comparison difficult because min/max aren't A & 9 self.chart.auto_scale.buffer = 0 self.AddToChart(self.chart, [1, None, 3]) self.assertEqual(self.Param('chd'), 's:A_9') def testResolveLabelCollision(self): self.chart.auto_scale.buffer = 0 self.AddToChart(self.chart, [500, 1000]) self.AddToChart(self.chart, [100, 999]) self.AddToChart(self.chart, [200, 900]) self.AddToChart(self.chart, [200, -99]) self.AddToChart(self.chart, [100, -100]) self.chart.right.max = 1000 self.chart.right.min = -100 self.chart.right.labels = [1000, 999, 900, 0, -99, -100] self.chart.right.label_positions = self.chart.right.labels separation = formatters.LabelSeparator(right=40) self.chart.AddFormatter(separation) self.assertEqual(self.Param('chxp'), '0,1000,960,900,0,-60,-100') # Try to force a greater spacing than possible separation.right = 300 self.assertEqual(self.Param('chxp'), '0,1000,780,560,340,120,-100') # Cluster some values around the lower and upper threshold to verify # that order is preserved. self.chart.right.labels = [1000, 901, 900, 899, 10, 1, -50, -100] self.chart.right.label_positions = self.chart.right.labels separation.right = 100 self.assertEqual(self.Param('chxp'), '0,1000,900,800,700,200,100,0,-100') self.assertEqual(self.Param('chxl'), '0:|1000|901|900|899|10|1|-50|-100') # Try to adjust a single label self.chart.right.labels = [1000] self.chart.right.label_positions = self.chart.right.labels self.assertEqual(self.Param('chxp'), '0,1000') self.assertEqual(self.Param('chxl'), '0:|1000') def testAdjustSingleLabelDoesNothing(self): """Make sure adjusting doesn't bork the single-label case.""" self.AddToChart(self.chart, (5, 6, 7)) self.chart.left.labels = ['Cutoff'] self.chart.left.label_positions = [3] def CheckExpectations(): self.assertEqual(self.Param('chxl'), '0:|Cutoff') self.assertEqual(self.Param('chxp'), '0,3') CheckExpectations() # Check without adjustment self.chart.AddFormatter(formatters.LabelSeparator(right=15)) CheckExpectations() # Make sure adjustment hasn't changed anything if __name__ == '__main__': graphy_test.main()
bsd-3-clause
h3biomed/ansible
lib/ansible/modules/cloud/azure/azure_rm_sqlfirewallrule.py
14
9606
#!/usr/bin/python # # Copyright (c) 2017 Zim Kalinowski, <zikalino@microsoft.com> # # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: azure_rm_sqlfirewallrule version_added: "2.7" short_description: Manage Firewall Rule instance. description: - Create, update and delete instance of Firewall Rule. options: resource_group: description: - The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal. required: True server_name: description: - The name of the server. required: True name: description: - The name of the firewall rule. required: True start_ip_address: description: - The start IP address of the firewall rule. Must be IPv4 format. Use value C(0.0.0.0) to represent all Azure-internal IP addresses. end_ip_address: description: - "The end IP address of the firewall rule. Must be IPv4 format. Must be greater than or equal to startIpAddress. Use value C(0.0.0.0) to represe nt all Azure-internal IP addresses." state: description: - Assert the state of the SQL Database. Use C(present) to create or update an SQL Database and C(absent) to delete it. default: present choices: - absent - present extends_documentation_fragment: - azure author: - "Zim Kalinowski (@zikalino)" ''' EXAMPLES = ''' - name: Create (or update) Firewall Rule azure_rm_sqlfirewallrule: resource_group: myResourceGroup server_name: firewallrulecrudtest-6285 name: firewallrulecrudtest-5370 start_ip_address: 172.28.10.136 end_ip_address: 172.28.10.138 ''' RETURN = ''' id: description: - Resource ID. returned: always type: str sample: "/subscriptions/xxxxxxxx-xxxx-xxxx-xxxx-xxxxxxxxxxxx/resourceGroups/myResourceGroup/providers/Microsoft.Sql/servers/firewallrulecrudtest-628 5/firewallRules/firewallrulecrudtest-5370" ''' import time from ansible.module_utils.azure_rm_common import AzureRMModuleBase try: from msrestazure.azure_exceptions import CloudError from msrest.polling import LROPoller from azure.mgmt.sql import SqlManagementClient from msrest.serialization import Model except ImportError: # This is handled in azure_rm_common pass class Actions: NoAction, Create, Update, Delete = range(4) class AzureRMSqlFirewallRule(AzureRMModuleBase): """Configuration class for an Azure RM Firewall Rule resource""" def __init__(self): self.module_arg_spec = dict( resource_group=dict( type='str', required=True ), server_name=dict( type='str', required=True ), name=dict( type='str', required=True ), start_ip_address=dict( type='str' ), end_ip_address=dict( type='str' ), state=dict( type='str', default='present', choices=['present', 'absent'] ) ) self.resource_group = None self.server_name = None self.name = None self.start_ip_address = None self.end_ip_address = None self.results = dict(changed=False) self.state = None self.to_do = Actions.NoAction super(AzureRMSqlFirewallRule, self).__init__(derived_arg_spec=self.module_arg_spec, supports_check_mode=True, supports_tags=False) def exec_module(self, **kwargs): """Main module execution method""" for key in list(self.module_arg_spec.keys()): if hasattr(self, key): setattr(self, key, kwargs[key]) old_response = self.get_firewallrule() response = None if not old_response: self.log("Firewall Rule instance doesn't exist") if self.state == 'absent': self.log("Old instance didn't exist") else: self.to_do = Actions.Create else: self.log("Firewall Rule instance already exists") if self.state == 'absent': self.to_do = Actions.Delete elif self.state == 'present': self.log("Need to check if Firewall Rule instance has to be deleted or may be updated") if (self.start_ip_address is not None) and (self.start_ip_address != old_response['start_ip_address']): self.to_do = Actions.Update if (self.end_ip_address is not None) and (self.end_ip_address != old_response['end_ip_address']): self.to_do = Actions.Update if (self.to_do == Actions.Create) or (self.to_do == Actions.Update): self.log("Need to Create / Update the Firewall Rule instance") if self.check_mode: self.results['changed'] = True return self.results response = self.create_update_firewallrule() if not old_response: self.results['changed'] = True else: self.results['changed'] = old_response.__ne__(response) self.log("Creation / Update done") elif self.to_do == Actions.Delete: self.log("Firewall Rule instance deleted") self.results['changed'] = True if self.check_mode: return self.results self.delete_firewallrule() # make sure instance is actually deleted, for some Azure resources, instance is hanging around # for some time after deletion -- this should be really fixed in Azure while self.get_firewallrule(): time.sleep(20) else: self.log("Firewall Rule instance unchanged") self.results['changed'] = False response = old_response if response: self.results["id"] = response["id"] return self.results def create_update_firewallrule(self): ''' Creates or updates Firewall Rule with the specified configuration. :return: deserialized Firewall Rule instance state dictionary ''' self.log("Creating / Updating the Firewall Rule instance {0}".format(self.name)) try: response = self.sql_client.firewall_rules.create_or_update(resource_group_name=self.resource_group, server_name=self.server_name, firewall_rule_name=self.name, start_ip_address=self.start_ip_address, end_ip_address=self.end_ip_address) if isinstance(response, LROPoller): response = self.get_poller_result(response) except CloudError as exc: self.log('Error attempting to create the Firewall Rule instance.') self.fail("Error creating the Firewall Rule instance: {0}".format(str(exc))) return response.as_dict() def delete_firewallrule(self): ''' Deletes specified Firewall Rule instance in the specified subscription and resource group. :return: True ''' self.log("Deleting the Firewall Rule instance {0}".format(self.name)) try: response = self.sql_client.firewall_rules.delete(resource_group_name=self.resource_group, server_name=self.server_name, firewall_rule_name=self.name) except CloudError as e: self.log('Error attempting to delete the Firewall Rule instance.') self.fail("Error deleting the Firewall Rule instance: {0}".format(str(e))) return True def get_firewallrule(self): ''' Gets the properties of the specified Firewall Rule. :return: deserialized Firewall Rule instance state dictionary ''' self.log("Checking if the Firewall Rule instance {0} is present".format(self.name)) found = False try: response = self.sql_client.firewall_rules.get(resource_group_name=self.resource_group, server_name=self.server_name, firewall_rule_name=self.name) found = True self.log("Response : {0}".format(response)) self.log("Firewall Rule instance : {0} found".format(response.name)) except CloudError as e: self.log('Did not find the Firewall Rule instance.') if found is True: return response.as_dict() return False def main(): """Main execution""" AzureRMSqlFirewallRule() if __name__ == '__main__': main()
gpl-3.0
jdavisp3/TigerShark
tigershark/tools/generate_all_parsers.py
2
2975
#!/usr/bin/python """ Generate all parsers from PyX12-1.5.0.zip Example Usage: python generate_all_parsers.py ../../Downloads/pyx12-1.5.0.zip """ import argparse import fnmatch import logging import os import sys import zipfile from tigershark.tools import convertPyX12 def convert_from_zip(zip_file_path, dest_path, structure): """ Convert all of the xml files in a zip. """ zipf = zipfile.ZipFile(zip_file_path) filenames = [f for f in zipf.namelist() if "map" in f and "xml" in f] pyx12_version_str = filenames[0].split("/", 1)[0] for filename in filenames: if fnmatch.fnmatch(filename.rsplit('/', 1)[1], "[0-9][0-9][0-9]*.4010.X*.xml"): try: data_ele_file = zipf.open("{pyx12}/pyx12/map/dataele.xml".format( pyx12=pyx12_version_str)) codes_file = zipf.open("{pyx12}/pyx12/map/codes.xml".format( pyx12=pyx12_version_str)) spec_file = zipf.open(filename) dest_fname = os.path.join(dest_path, "M%s.py") % \ filename.rsplit('/', 1)[1].rsplit('.', 1)[0].\ replace(".", "_") parser_name = "parsed_%s" % \ filename.rsplit('/', 1)[1].split('.', 1)[0] logging.info("Converting file: {filename} to {dest}".format( filename=filename, dest=dest_fname)) convertPyX12.writeFile(dest_fname, parser_name, convertPyX12.convertFile(spec_file, data_ele_file, codes_file), structure=structure) # ZipFile file-like objects don't support seek, so we have to # close and re-open them every time :\ spec_file.close() data_ele_file.close() codes_file.close() except Exception as e: raise e logging.error(e) if __name__ == "__main__": logging.basicConfig(stream=sys.stdout, level=logging.INFO) parser = argparse.ArgumentParser( description="Convert all PyX12 XML files to python modules.") parser.add_argument('zip_file_path', help="The path to the zip file "\ "containing the pyX12 X12 spec files to convert") parser.add_argument('-d', '--destination_path', default="parsers", help="The destination path. Defaults to './parsers/'.") parser.add_argument('-s', '--structure', choices=['flat', 'nested'], default="flat", help="The structure of the resulting python "\ "class. Nested is easier to read, but may not compile "\ "due to too many object instantiations in a single "\ "call.") args = parser.parse_args() try: os.makedirs(args.destination_path) except: pass convert_from_zip(args.zip_file_path, args.destination_path, args.structure)
bsd-3-clause
kumarshivam675/Mobile10X-Hack
sidd/virtualenv-14.0.6/flask/lib/python2.7/site-packages/pip/_vendor/requests/packages/urllib3/contrib/pyopenssl.py
197
10094
'''SSL with SNI_-support for Python 2. Follow these instructions if you would like to verify SSL certificates in Python 2. Note, the default libraries do *not* do certificate checking; you need to do additional work to validate certificates yourself. This needs the following packages installed: * pyOpenSSL (tested with 0.13) * ndg-httpsclient (tested with 0.3.2) * pyasn1 (tested with 0.1.6) You can install them with the following command: pip install pyopenssl ndg-httpsclient pyasn1 To activate certificate checking, call :func:`~urllib3.contrib.pyopenssl.inject_into_urllib3` from your Python code before you begin making HTTP requests. This can be done in a ``sitecustomize`` module, or at any other time before your application begins using ``urllib3``, like this:: try: import urllib3.contrib.pyopenssl urllib3.contrib.pyopenssl.inject_into_urllib3() except ImportError: pass Now you can use :mod:`urllib3` as you normally would, and it will support SNI when the required modules are installed. Activating this module also has the positive side effect of disabling SSL/TLS compression in Python 2 (see `CRIME attack`_). If you want to configure the default list of supported cipher suites, you can set the ``urllib3.contrib.pyopenssl.DEFAULT_SSL_CIPHER_LIST`` variable. Module Variables ---------------- :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) ''' from __future__ import absolute_import try: from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT from ndg.httpsclient.subj_alt_name import SubjectAltName as BaseSubjectAltName except SyntaxError as e: raise ImportError(e) import OpenSSL.SSL from pyasn1.codec.der import decoder as der_decoder from pyasn1.type import univ, constraint from socket import _fileobject, timeout, error as SocketError import ssl import select from .. import connection from .. import util __all__ = ['inject_into_urllib3', 'extract_from_urllib3'] # SNI only *really* works if we can read the subjectAltName of certificates. HAS_SNI = SUBJ_ALT_NAME_SUPPORT # Map from urllib3 to PyOpenSSL compatible parameter-values. _openssl_versions = { ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD, ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD, } if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'): _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD try: _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD}) except AttributeError: pass _openssl_verify = { ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE, ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER, ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, } DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS # OpenSSL will only write 16K at a time SSL_WRITE_BLOCKSIZE = 16384 orig_util_HAS_SNI = util.HAS_SNI orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket def inject_into_urllib3(): 'Monkey-patch urllib3 with PyOpenSSL-backed SSL-support.' connection.ssl_wrap_socket = ssl_wrap_socket util.HAS_SNI = HAS_SNI def extract_from_urllib3(): 'Undo monkey-patching by :func:`inject_into_urllib3`.' connection.ssl_wrap_socket = orig_connection_ssl_wrap_socket util.HAS_SNI = orig_util_HAS_SNI # Note: This is a slightly bug-fixed version of same from ndg-httpsclient. class SubjectAltName(BaseSubjectAltName): '''ASN.1 implementation for subjectAltNames support''' # There is no limit to how many SAN certificates a certificate may have, # however this needs to have some limit so we'll set an arbitrarily high # limit. sizeSpec = univ.SequenceOf.sizeSpec + \ constraint.ValueSizeConstraint(1, 1024) # Note: This is a slightly bug-fixed version of same from ndg-httpsclient. def get_subj_alt_name(peer_cert): # Search through extensions dns_name = [] if not SUBJ_ALT_NAME_SUPPORT: return dns_name general_names = SubjectAltName() for i in range(peer_cert.get_extension_count()): ext = peer_cert.get_extension(i) ext_name = ext.get_short_name() if ext_name != 'subjectAltName': continue # PyOpenSSL returns extension data in ASN.1 encoded form ext_dat = ext.get_data() decoded_dat = der_decoder.decode(ext_dat, asn1Spec=general_names) for name in decoded_dat: if not isinstance(name, SubjectAltName): continue for entry in range(len(name)): component = name.getComponentByPosition(entry) if component.getName() != 'dNSName': continue dns_name.append(str(component.getComponent())) return dns_name class WrappedSocket(object): '''API-compatibility wrapper for Python OpenSSL's Connection-class. Note: _makefile_refs, _drop() and _reuse() are needed for the garbage collector of pypy. ''' def __init__(self, connection, socket, suppress_ragged_eofs=True): self.connection = connection self.socket = socket self.suppress_ragged_eofs = suppress_ragged_eofs self._makefile_refs = 0 def fileno(self): return self.socket.fileno() def makefile(self, mode, bufsize=-1): self._makefile_refs += 1 return _fileobject(self, mode, bufsize, close=True) def recv(self, *args, **kwargs): try: data = self.connection.recv(*args, **kwargs) except OpenSSL.SSL.SysCallError as e: if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'): return b'' else: raise SocketError(e) except OpenSSL.SSL.ZeroReturnError as e: if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN: return b'' else: raise except OpenSSL.SSL.WantReadError: rd, wd, ed = select.select( [self.socket], [], [], self.socket.gettimeout()) if not rd: raise timeout('The read operation timed out') else: return self.recv(*args, **kwargs) else: return data def settimeout(self, timeout): return self.socket.settimeout(timeout) def _send_until_done(self, data): while True: try: return self.connection.send(data) except OpenSSL.SSL.WantWriteError: _, wlist, _ = select.select([], [self.socket], [], self.socket.gettimeout()) if not wlist: raise timeout() continue def sendall(self, data): total_sent = 0 while total_sent < len(data): sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE]) total_sent += sent def shutdown(self): # FIXME rethrow compatible exceptions should we ever use this self.connection.shutdown() def close(self): if self._makefile_refs < 1: try: return self.connection.close() except OpenSSL.SSL.Error: return else: self._makefile_refs -= 1 def getpeercert(self, binary_form=False): x509 = self.connection.get_peer_certificate() if not x509: return x509 if binary_form: return OpenSSL.crypto.dump_certificate( OpenSSL.crypto.FILETYPE_ASN1, x509) return { 'subject': ( (('commonName', x509.get_subject().CN),), ), 'subjectAltName': [ ('DNS', value) for value in get_subj_alt_name(x509) ] } def _reuse(self): self._makefile_refs += 1 def _drop(self): if self._makefile_refs < 1: self.close() else: self._makefile_refs -= 1 def _verify_callback(cnx, x509, err_no, err_depth, return_code): return err_no == 0 def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None, ca_certs=None, server_hostname=None, ssl_version=None, ca_cert_dir=None): ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version]) if certfile: keyfile = keyfile or certfile # Match behaviour of the normal python ssl library ctx.use_certificate_file(certfile) if keyfile: ctx.use_privatekey_file(keyfile) if cert_reqs != ssl.CERT_NONE: ctx.set_verify(_openssl_verify[cert_reqs], _verify_callback) if ca_certs or ca_cert_dir: try: ctx.load_verify_locations(ca_certs, ca_cert_dir) except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad ca_certs: %r' % ca_certs, e) else: ctx.set_default_verify_paths() # Disable TLS compression to migitate CRIME attack (issue #309) OP_NO_COMPRESSION = 0x20000 ctx.set_options(OP_NO_COMPRESSION) # Set list of supported ciphersuites. ctx.set_cipher_list(DEFAULT_SSL_CIPHER_LIST) cnx = OpenSSL.SSL.Connection(ctx, sock) cnx.set_tlsext_host_name(server_hostname) cnx.set_connect_state() while True: try: cnx.do_handshake() except OpenSSL.SSL.WantReadError: rd, _, _ = select.select([sock], [], [], sock.gettimeout()) if not rd: raise timeout('select timed out') continue except OpenSSL.SSL.Error as e: raise ssl.SSLError('bad handshake: %r' % e) break return WrappedSocket(cnx, sock)
gpl-3.0
philsong/p2pool
nattraverso/pynupnp/upnp.py
283
18985
""" This module is the heart of the upnp support. Device discover, ip discovery and port mappings are implemented here. @author: Raphael Slinckx @author: Anthony Baxter @copyright: Copyright 2005 @license: LGPL @contact: U{raphael@slinckx.net<mailto:raphael@slinckx.net>} @version: 0.1.0 """ __revision__ = "$id" import socket, random, urlparse, logging from twisted.internet import reactor, defer from twisted.web import client from twisted.internet.protocol import DatagramProtocol from twisted.internet.error import CannotListenError from twisted.python import failure from nattraverso.pynupnp.soap import SoapProxy from nattraverso.pynupnp.upnpxml import UPnPXml from nattraverso import ipdiscover, portmapper class UPnPError(Exception): """ A generic UPnP error, with a descriptive message as content. """ pass class UPnPMapper(portmapper.NATMapper): """ This is the UPnP port mapper implementing the L{NATMapper<portmapper.NATMapper>} interface. @see: L{NATMapper<portmapper.NATMapper>} """ def __init__(self, upnp): """ Creates the mapper, with the given L{UPnPDevice} instance. @param upnp: L{UPnPDevice} instance """ self._mapped = {} self._upnp = upnp def map(self, port): """ See interface """ self._check_valid_port(port) #Port is already mapped if port in self._mapped: return defer.succeed(self._mapped[port]) #Trigger a new mapping creation, first fetch local ip. result = ipdiscover.get_local_ip() self._mapped[port] = result return result.addCallback(self._map_got_local_ip, port) def info(self, port): """ See interface """ # If the mapping exists, everything's ok if port in self._mapped: return self._mapped[port] else: raise ValueError('Port %r is not currently mapped'%(port)) def unmap(self, port): """ See interface """ if port in self._mapped: existing = self._mapped[port] #Pending mapping, queue an unmap,return existing deferred if type(existing) is not tuple: existing.addCallback(lambda x: self.unmap(port)) return existing #Remove our local mapping del self._mapped[port] #Ask the UPnP to remove the mapping extaddr, extport = existing return self._upnp.remove_port_mapping(extport, port.getHost().type) else: raise ValueError('Port %r is not currently mapped'%(port)) def get_port_mappings(self): """ See interface """ return self._upnp.get_port_mappings() def _map_got_local_ip(self, ip_result, port): """ We got the local ip address, retreive the existing port mappings in the device. @param ip_result: result of L{ipdiscover.get_local_ip} @param port: a L{twisted.internet.interfaces.IListeningPort} we want to map """ local, ip = ip_result return self._upnp.get_port_mappings().addCallback( self._map_got_port_mappings, ip, port) def _map_got_port_mappings(self, mappings, ip, port): """ We got all the existing mappings in the device, find an unused one and assign it for the requested port. @param ip: The local ip of this host "x.x.x.x" @param port: a L{twisted.internet.interfaces.IListeningPort} we want to map @param mappings: result of L{UPnPDevice.get_port_mappings} """ #Get the requested mapping's info ptype = port.getHost().type intport = port.getHost().port for extport in [random.randrange(1025, 65536) for val in range(20)]: # Check if there is an existing mapping, if it does not exist, bingo if not (ptype, extport) in mappings: break if (ptype, extport) in mappings: existing = mappings[ptype, extport] local_ip, local_port = existing if local_ip == ip and local_port == intport: # Existing binding for this host/port/proto - replace it break # Triggers the creation of the mapping on the device result = self._upnp.add_port_mapping(ip, intport, extport, 'pynupnp', ptype) # We also need the external IP, so we queue first an # External IP Discovery, then we add the mapping. return result.addCallback( lambda x: self._upnp.get_external_ip()).addCallback( self._port_mapping_added, extport, port) def _port_mapping_added(self, extaddr, extport, port): """ The port mapping was added in the device, this means:: Internet NAT LAN | > IP:extaddr |> IP:local ip > Port:extport |> Port:port | @param extaddr: The exernal ip address @param extport: The external port as number @param port: The internal port as a L{twisted.internet.interfaces.IListeningPort} object, that has been mapped """ self._mapped[port] = (extaddr, extport) return (extaddr, extport) class UPnPDevice: """ Represents an UPnP device, with the associated infos, and remote methods. """ def __init__(self, soap_proxy, info): """ Build the device, with the given SOAP proxy, and the meta-infos. @param soap_proxy: an initialized L{SoapProxy} to the device @param info: a dictionnary of various infos concerning the device extracted with L{UPnPXml} """ self._soap_proxy = soap_proxy self._info = info def get_external_ip(self): """ Triggers an external ip discovery on the upnp device. Returns a deferred called with the external ip of this host. @return: A deferred called with the ip address, as "x.x.x.x" @rtype: L{twisted.internet.defer.Deferred} """ result = self._soap_proxy.call('GetExternalIPAddress') result.addCallback(self._on_external_ip) return result def get_port_mappings(self): """ Retreive the existing port mappings @see: L{portmapper.NATMapper.get_port_mappings} @return: A deferred called with the dictionnary as defined in the interface L{portmapper.NATMapper.get_port_mappings} @rtype: L{twisted.internet.defer.Deferred} """ return self._get_port_mapping() def add_port_mapping(self, local_ip, intport, extport, desc, proto, lease=0): """ Add a port mapping in the upnp device. Returns a deferred. @param local_ip: the LAN ip of this host as "x.x.x.x" @param intport: the internal port number @param extport: the external port number @param desc: the description of this mapping (string) @param proto: "UDP" or "TCP" @param lease: The duration of the lease in (mili)seconds(??) @return: A deferred called with None when the mapping is done @rtype: L{twisted.internet.defer.Deferred} """ result = self._soap_proxy.call('AddPortMapping', NewRemoteHost="", NewExternalPort=extport, NewProtocol=proto, NewInternalPort=intport, NewInternalClient=local_ip, NewEnabled=1, NewPortMappingDescription=desc, NewLeaseDuration=lease) return result.addCallbacks(self._on_port_mapping_added, self._on_no_port_mapping_added) def remove_port_mapping(self, extport, proto): """ Remove an existing port mapping on the device. Returns a deferred @param extport: the external port number associated to the mapping to be removed @param proto: either "UDP" or "TCP" @return: A deferred called with None when the mapping is done @rtype: L{twisted.internet.defer.Deferred} """ result = self._soap_proxy.call('DeletePortMapping', NewRemoteHost="", NewExternalPort=extport, NewProtocol=proto) return result.addCallbacks(self._on_port_mapping_removed, self._on_no_port_mapping_removed) # Private -------- def _on_external_ip(self, res): """ Called when we received the external ip address from the device. @param res: the SOAPpy structure of the result @return: the external ip string, as "x.x.x.x" """ logging.debug("Got external ip struct: %r", res) return res['NewExternalIPAddress'] def _get_port_mapping(self, mapping_id=0, mappings=None): """ Fetch the existing mappings starting at index "mapping_id" from the device. To retreive all the mappings call this without parameters. @param mapping_id: The index of the mapping to start fetching from @param mappings: the dictionnary of already fetched mappings @return: A deferred called with the existing mappings when all have been retreived, see L{get_port_mappings} @rtype: L{twisted.internet.defer.Deferred} """ if mappings == None: mappings = {} result = self._soap_proxy.call('GetGenericPortMappingEntry', NewPortMappingIndex=mapping_id) return result.addCallbacks( lambda x: self._on_port_mapping_received(x, mapping_id+1, mappings), lambda x: self._on_no_port_mapping_received( x, mappings)) def _on_port_mapping_received(self, response, mapping_id, mappings): """ Called we we receive a single mapping from the device. @param response: a SOAPpy structure, representing the device's answer @param mapping_id: The index of the next mapping in the device @param mappings: the already fetched mappings, see L{get_port_mappings} @return: A deferred called with the existing mappings when all have been retreived, see L{get_port_mappings} @rtype: L{twisted.internet.defer.Deferred} """ logging.debug("Got mapping struct: %r", response) mappings[ response['NewProtocol'], response['NewExternalPort'] ] = (response['NewInternalClient'], response['NewInternalPort']) return self._get_port_mapping(mapping_id, mappings) def _on_no_port_mapping_received(self, failure, mappings): """ Called when we have no more port mappings to retreive, or an error occured while retreiving them. Either we have a "SpecifiedArrayIndexInvalid" SOAP error, and that's ok, it just means we have finished. If it returns some other error, then we fail with an UPnPError. @param mappings: the already retreived mappings @param failure: the failure @return: The existing mappings as defined in L{get_port_mappings} @raise UPnPError: When we got any other error than "SpecifiedArrayIndexInvalid" """ logging.debug("_on_no_port_mapping_received: %s", failure) err = failure.value message = err.args[0]["UPnPError"]["errorDescription"] if "SpecifiedArrayIndexInvalid" == message: return mappings else: return failure def _on_port_mapping_added(self, response): """ The port mapping was successfully added, return None to the deferred. """ return None def _on_no_port_mapping_added(self, failure): """ Called when the port mapping could not be added. Immediately raise an UPnPError, with the SOAPpy structure inside. @raise UPnPError: When the port mapping could not be added """ return failure def _on_port_mapping_removed(self, response): """ The port mapping was successfully removed, return None to the deferred. """ return None def _on_no_port_mapping_removed(self, failure): """ Called when the port mapping could not be removed. Immediately raise an UPnPError, with the SOAPpy structure inside. @raise UPnPError: When the port mapping could not be deleted """ return failure # UPNP multicast address, port and request string _UPNP_MCAST = '239.255.255.250' _UPNP_PORT = 1900 _UPNP_SEARCH_REQUEST = """M-SEARCH * HTTP/1.1\r Host:%s:%s\r ST:urn:schemas-upnp-org:device:InternetGatewayDevice:1\r Man:"ssdp:discover"\r MX:3\r \r """ % (_UPNP_MCAST, _UPNP_PORT) class UPnPProtocol(DatagramProtocol, object): """ The UPnP Device discovery udp multicast twisted protocol. """ def __init__(self, *args, **kwargs): """ Init the protocol, no parameters needed. """ super(UPnPProtocol, self).__init__(*args, **kwargs) #Device discovery deferred self._discovery = None self._discovery_timeout = None self.mcast = None self._done = False # Public methods def search_device(self): """ Triggers a UPnP device discovery. The returned deferred will be called with the L{UPnPDevice} that has been found in the LAN. @return: A deferred called with the detected L{UPnPDevice} instance. @rtype: L{twisted.internet.defer.Deferred} """ if self._discovery is not None: raise ValueError('already used') self._discovery = defer.Deferred() self._discovery_timeout = reactor.callLater(6, self._on_discovery_timeout) attempt = 0 mcast = None while True: try: self.mcast = reactor.listenMulticast(1900+attempt, self) break except CannotListenError: attempt = random.randint(0, 500) # joined multicast group, starting upnp search self.mcast.joinGroup('239.255.255.250', socket.INADDR_ANY) self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT)) self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT)) self.transport.write(_UPNP_SEARCH_REQUEST, (_UPNP_MCAST, _UPNP_PORT)) return self._discovery #Private methods def datagramReceived(self, dgram, address): if self._done: return """ This is private, handle the multicast answer from the upnp device. """ logging.debug("Got UPNP multicast search answer:\n%s", dgram) #This is an HTTP response response, message = dgram.split('\r\n', 1) # Prepare status line version, status, textstatus = response.split(None, 2) if not version.startswith('HTTP'): return if status != "200": return # Launch the info fetching def parse_discovery_response(message): """Separate headers and body from the received http answer.""" hdict = {} body = '' remaining = message while remaining: line, remaining = remaining.split('\r\n', 1) line = line.strip() if not line: body = remaining break key, val = line.split(':', 1) key = key.lower() hdict.setdefault(key, []).append(val.strip()) return hdict, body headers, body = parse_discovery_response(message) if not 'location' in headers: self._on_discovery_failed( UPnPError( "No location header in response to M-SEARCH!: %r"%headers)) return loc = headers['location'][0] result = client.getPage(url=loc) result.addCallback(self._on_gateway_response, loc).addErrback(self._on_discovery_failed) def _on_gateway_response(self, body, loc): if self._done: return """ Called with the UPnP device XML description fetched via HTTP. If the device has suitable services for ip discovery and port mappings, the callback returned in L{search_device} is called with the discovered L{UPnPDevice}. @raise UPnPError: When no suitable service has been found in the description, or another error occurs. @param body: The xml description of the device. @param loc: the url used to retreive the xml description """ # Parse answer upnpinfo = UPnPXml(body) # Check if we have a base url, if not consider location as base url urlbase = upnpinfo.urlbase if urlbase == None: urlbase = loc # Check the control url, if None, then the device cannot do what we want controlurl = upnpinfo.controlurl if controlurl == None: self._on_discovery_failed(UPnPError("upnp response showed no WANConnections")) return control_url2 = urlparse.urljoin(urlbase, controlurl) soap_proxy = SoapProxy(control_url2, upnpinfo.wanservice) self._on_discovery_succeeded(UPnPDevice(soap_proxy, upnpinfo.deviceinfos)) def _on_discovery_succeeded(self, res): if self._done: return self._done = True self.mcast.stopListening() self._discovery_timeout.cancel() self._discovery.callback(res) def _on_discovery_failed(self, err): if self._done: return self._done = True self.mcast.stopListening() self._discovery_timeout.cancel() self._discovery.errback(err) def _on_discovery_timeout(self): if self._done: return self._done = True self.mcast.stopListening() self._discovery.errback(failure.Failure(defer.TimeoutError('in _on_discovery_timeout'))) def search_upnp_device (): """ Check the network for an UPnP device. Returns a deferred with the L{UPnPDevice} instance as result, if found. @return: A deferred called with the L{UPnPDevice} instance @rtype: L{twisted.internet.defer.Deferred} """ return defer.maybeDeferred(UPnPProtocol().search_device)
gpl-3.0
mozilla/socorro
webapp-django/crashstats/tokens/views.py
1
2626
# This Source Code Form is subject to the terms of the Mozilla Public # License, v. 2.0. If a copy of the MPL was not distributed with this # file, You can obtain one at http://mozilla.org/MPL/2.0/. from django import http from django.contrib.auth.models import Permission from django.contrib.sites.requests import RequestSite from django.db import transaction from django.views.decorators.http import require_POST from django.shortcuts import render, get_object_or_404, redirect from crashstats.crashstats.decorators import login_required from crashstats.tokens import forms from crashstats.tokens import models @login_required @transaction.atomic def home(request): context = {} all_possible_permissions = Permission.objects.filter( content_type__model="" ).order_by("name") possible_permissions = [] for permission in all_possible_permissions: if request.user.has_perm("crashstats." + permission.codename): possible_permissions.append(permission) if request.method == "POST": form = forms.GenerateTokenForm( request.POST, possible_permissions=possible_permissions ) if form.is_valid(): if "permissions" in form.cleaned_data: for permission in form.cleaned_data["permissions"]: perm_name = "crashstats.%s" % permission.codename if not request.user.has_perm(perm_name): return http.HttpResponseForbidden( "You do not have this permission" ) token = models.Token.objects.create( user=request.user, notes=form.cleaned_data["notes"] ) if "permissions" in form.cleaned_data: for permission in form.cleaned_data["permissions"]: token.permissions.add(permission) return redirect("tokens:home") else: form = forms.GenerateTokenForm(possible_permissions=possible_permissions) context["possible_permissions"] = possible_permissions context["form"] = form context["your_tokens"] = models.Token.objects.filter(user=request.user).order_by( "-created" ) context["absolute_base_url"] = "%s://%s" % ( request.is_secure() and "https" or "http", RequestSite(request).domain, ) return render(request, "tokens/home.html", context) @require_POST @login_required @transaction.atomic def delete_token(request, pk): token = get_object_or_404(models.Token, pk=pk, user=request.user) token.delete() return redirect("tokens:home")
mpl-2.0
ClearCorp-dev/account-financial-tools
__unported__/async_move_line_importer/model/account.py
43
5323
# -*- coding: utf-8 -*- ############################################################################## # # Author: Nicolas Bessi # Copyright 2013 Camptocamp SA # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import psycopg2 import logging from openerp.osv import orm from openerp.tools.float_utils import float_compare _logger = logging.getLogger(__name__) def _format_inserts_values(vals): cols = vals.keys() if 'line_id' in cols: cols.remove('line_id') return (', '.join(cols), ', '.join(['%%(%s)s' % i for i in cols])) class account_move(orm.Model): """redefine account move create to bypass orm. Async_bypass_create must be set to True in context. """ _inherit = "account.move" def _prepare_line(self, cr, uid, move_id, line, vals, context=None): """Take incomming move vals and complete move line dict with missing data :param move_id: parent move id :param line: dict of vals of move line :param vals: dict of vals of move :returns: dict of val of move line completed """ if isinstance(line, tuple): line = line[2] line['journal_id'] = vals.get('journal_id') line['date'] = vals.get('date') line['period_id'] = vals.get('period_id') line['company_id'] = vals.get('company_id') line['state'] = vals['state'] line['move_id'] = move_id if line['debit'] and line['credit']: raise ValueError('debit and credit set on same line') if not line.get('analytic_account_id'): line['analytic_account_id'] = None for key in line: if line[key] is False: line[key] = None return line def _check_balance(self, vals): """Check if move is balanced""" line_dicts = [y[2] for y in vals['line_id']] debit = sum(x.get('debit') or 0.0 for x in line_dicts) credit = sum(x.get('credit') or 0.0 for x in line_dicts) if float_compare(debit, credit, precision_digits=2): raise ValueError('Move is not balanced %s %s' % (debit, credits)) def _bypass_create(self, cr, uid, vals, context=None): """Create entries using cursor directly :returns: created id """ mvl_obj = self.pool['account.move.line'] vals['company_id'] = context.get('company_id', False) vals['state'] = 'draft' if not vals.get('name'): vals['name'] = "/" sql = u"Insert INTO account_move (%s) VALUES (%s) RETURNING id" sql = sql % _format_inserts_values(vals) try: cr.execute(sql, vals) except psycopg2.Error: _logger.exception('ORM by pass error for move') raise created_id = cr.fetchone()[0] if vals.get('line_id'): self._check_balance(vals) for line in vals['line_id']: l_vals = self._prepare_line(cr, uid, created_id, line, vals, context=context) mvl_obj.create(cr, uid, l_vals, context=context) return created_id def create(self, cr, uid, vals, context=None): """Please refer to orm.BaseModel.create documentation""" if context is None: context = {} if context.get('async_bypass_create'): return self._bypass_create(cr, uid, vals, context=context) return super(account_move, self).create(cr, uid, vals, context=context) class account_move_line(orm.Model): """Redefine account move line create to bypass orm. Async_bypass_create must be set to True in context """ _inherit = "account.move.line" def create(self, cr, uid, vals, context=None): """Please refer to orm.BaseModel.create documentation""" if context is None: context = {} if context.get('async_bypass_create'): return self._bypass_create(cr, uid, vals, context=context) return super(account_move_line, self).create(cr, uid, vals, context=context) def _bypass_create(self, cr, uid, vals, context=None): """Create entries using cursor directly :returns: created id """ sql = u"Insert INTO account_move_line (%s) VALUES (%s) RETURNING id" sql = sql % _format_inserts_values(vals) try: cr.execute(sql, vals) except psycopg2.Error: _logger.exception('ORM by pass error for move line') raise return cr.fetchone()[0]
agpl-3.0
p990-slimrom/platform_external_chromium
testing/gtest/test/gtest_list_tests_unittest.py
1068
5415
#!/usr/bin/env python # # Copyright 2006, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following disclaimer # in the documentation and/or other materials provided with the # distribution. # * Neither the name of Google Inc. nor the names of its # contributors may be used to endorse or promote products derived from # this software without specific prior written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR # A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT # OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, # SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT # LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, # DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. """Unit test for Google Test's --gtest_list_tests flag. A user can ask Google Test to list all tests by specifying the --gtest_list_tests flag. This script tests such functionality by invoking gtest_list_tests_unittest_ (a program written with Google Test) the command line flags. """ __author__ = 'phanna@google.com (Patrick Hanna)' import gtest_test_utils # Constants. # The command line flag for enabling/disabling listing all tests. LIST_TESTS_FLAG = 'gtest_list_tests' # Path to the gtest_list_tests_unittest_ program. EXE_PATH = gtest_test_utils.GetTestExecutablePath('gtest_list_tests_unittest_') # The expected output when running gtest_list_tests_unittest_ with # --gtest_list_tests EXPECTED_OUTPUT_NO_FILTER = """FooDeathTest. Test1 Foo. Bar1 Bar2 DISABLED_Bar3 Abc. Xyz Def FooBar. Baz FooTest. Test1 DISABLED_Test2 Test3 """ # The expected output when running gtest_list_tests_unittest_ with # --gtest_list_tests and --gtest_filter=Foo*. EXPECTED_OUTPUT_FILTER_FOO = """FooDeathTest. Test1 Foo. Bar1 Bar2 DISABLED_Bar3 FooBar. Baz FooTest. Test1 DISABLED_Test2 Test3 """ # Utilities. def Run(args): """Runs gtest_list_tests_unittest_ and returns the list of tests printed.""" return gtest_test_utils.Subprocess([EXE_PATH] + args, capture_stderr=False).output # The unit test. class GTestListTestsUnitTest(gtest_test_utils.TestCase): """Tests using the --gtest_list_tests flag to list all tests.""" def RunAndVerify(self, flag_value, expected_output, other_flag): """Runs gtest_list_tests_unittest_ and verifies that it prints the correct tests. Args: flag_value: value of the --gtest_list_tests flag; None if the flag should not be present. expected_output: the expected output after running command; other_flag: a different flag to be passed to command along with gtest_list_tests; None if the flag should not be present. """ if flag_value is None: flag = '' flag_expression = 'not set' elif flag_value == '0': flag = '--%s=0' % LIST_TESTS_FLAG flag_expression = '0' else: flag = '--%s' % LIST_TESTS_FLAG flag_expression = '1' args = [flag] if other_flag is not None: args += [other_flag] output = Run(args) msg = ('when %s is %s, the output of "%s" is "%s".' % (LIST_TESTS_FLAG, flag_expression, ' '.join(args), output)) if expected_output is not None: self.assert_(output == expected_output, msg) else: self.assert_(output != EXPECTED_OUTPUT_NO_FILTER, msg) def testDefaultBehavior(self): """Tests the behavior of the default mode.""" self.RunAndVerify(flag_value=None, expected_output=None, other_flag=None) def testFlag(self): """Tests using the --gtest_list_tests flag.""" self.RunAndVerify(flag_value='0', expected_output=None, other_flag=None) self.RunAndVerify(flag_value='1', expected_output=EXPECTED_OUTPUT_NO_FILTER, other_flag=None) def testOverrideNonFilterFlags(self): """Tests that --gtest_list_tests overrides the non-filter flags.""" self.RunAndVerify(flag_value='1', expected_output=EXPECTED_OUTPUT_NO_FILTER, other_flag='--gtest_break_on_failure') def testWithFilterFlags(self): """Tests that --gtest_list_tests takes into account the --gtest_filter flag.""" self.RunAndVerify(flag_value='1', expected_output=EXPECTED_OUTPUT_FILTER_FOO, other_flag='--gtest_filter=Foo*') if __name__ == '__main__': gtest_test_utils.Main()
bsd-3-clause
coldnew/tf700-kernel
tools/perf/scripts/python/netdev-times.py
11271
15048
# Display a process of packets and processed time. # It helps us to investigate networking or network device. # # options # tx: show only tx chart # rx: show only rx chart # dev=: show only thing related to specified device # debug: work with debug mode. It shows buffer status. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * all_event_list = []; # insert all tracepoint event related with this script irq_dic = {}; # key is cpu and value is a list which stacks irqs # which raise NET_RX softirq net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry # and a list which stacks receive receive_hunk_list = []; # a list which include a sequence of receive events rx_skb_list = []; # received packet list for matching # skb_copy_datagram_iovec buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and # tx_xmit_list of_count_rx_skb_list = 0; # overflow count tx_queue_list = []; # list of packets which pass through dev_queue_xmit of_count_tx_queue_list = 0; # overflow count tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit of_count_tx_xmit_list = 0; # overflow count tx_free_list = []; # list of packets which is freed # options show_tx = 0; show_rx = 0; dev = 0; # store a name of device specified by option "dev=" debug = 0; # indices of event_info tuple EINFO_IDX_NAME= 0 EINFO_IDX_CONTEXT=1 EINFO_IDX_CPU= 2 EINFO_IDX_TIME= 3 EINFO_IDX_PID= 4 EINFO_IDX_COMM= 5 # Calculate a time interval(msec) from src(nsec) to dst(nsec) def diff_msec(src, dst): return (dst - src) / 1000000.0 # Display a process of transmitting a packet def print_transmit(hunk): if dev != 0 and hunk['dev'].find(dev) < 0: return print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \ (hunk['dev'], hunk['len'], nsecs_secs(hunk['queue_t']), nsecs_nsecs(hunk['queue_t'])/1000, diff_msec(hunk['queue_t'], hunk['xmit_t']), diff_msec(hunk['xmit_t'], hunk['free_t'])) # Format for displaying rx packet processing PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)" PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)" PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)" PF_JOINT= " |" PF_WJOINT= " | |" PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)" PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)" PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)" PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)" PF_CONS_SKB= " | consume_skb(+%.3fmsec)" # Display a process of received packets and interrputs associated with # a NET_RX softirq def print_receive(hunk): show_hunk = 0 irq_list = hunk['irq_list'] cpu = irq_list[0]['cpu'] base_t = irq_list[0]['irq_ent_t'] # check if this hunk should be showed if dev != 0: for i in range(len(irq_list)): if irq_list[i]['name'].find(dev) >= 0: show_hunk = 1 break else: show_hunk = 1 if show_hunk == 0: return print "%d.%06dsec cpu=%d" % \ (nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu) for i in range(len(irq_list)): print PF_IRQ_ENTRY % \ (diff_msec(base_t, irq_list[i]['irq_ent_t']), irq_list[i]['irq'], irq_list[i]['name']) print PF_JOINT irq_event_list = irq_list[i]['event_list'] for j in range(len(irq_event_list)): irq_event = irq_event_list[j] if irq_event['event'] == 'netif_rx': print PF_NET_RX % \ (diff_msec(base_t, irq_event['time']), irq_event['skbaddr']) print PF_JOINT print PF_SOFT_ENTRY % \ diff_msec(base_t, hunk['sirq_ent_t']) print PF_JOINT event_list = hunk['event_list'] for i in range(len(event_list)): event = event_list[i] if event['event_name'] == 'napi_poll': print PF_NAPI_POLL % \ (diff_msec(base_t, event['event_t']), event['dev']) if i == len(event_list) - 1: print "" else: print PF_JOINT else: print PF_NET_RECV % \ (diff_msec(base_t, event['event_t']), event['skbaddr'], event['len']) if 'comm' in event.keys(): print PF_WJOINT print PF_CPY_DGRAM % \ (diff_msec(base_t, event['comm_t']), event['pid'], event['comm']) elif 'handle' in event.keys(): print PF_WJOINT if event['handle'] == "kfree_skb": print PF_KFREE_SKB % \ (diff_msec(base_t, event['comm_t']), event['location']) elif event['handle'] == "consume_skb": print PF_CONS_SKB % \ diff_msec(base_t, event['comm_t']) print PF_JOINT def trace_begin(): global show_tx global show_rx global dev global debug for i in range(len(sys.argv)): if i == 0: continue arg = sys.argv[i] if arg == 'tx': show_tx = 1 elif arg =='rx': show_rx = 1 elif arg.find('dev=',0, 4) >= 0: dev = arg[4:] elif arg == 'debug': debug = 1 if show_tx == 0 and show_rx == 0: show_tx = 1 show_rx = 1 def trace_end(): # order all events in time all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME], b[EINFO_IDX_TIME])) # process all events for i in range(len(all_event_list)): event_info = all_event_list[i] name = event_info[EINFO_IDX_NAME] if name == 'irq__softirq_exit': handle_irq_softirq_exit(event_info) elif name == 'irq__softirq_entry': handle_irq_softirq_entry(event_info) elif name == 'irq__softirq_raise': handle_irq_softirq_raise(event_info) elif name == 'irq__irq_handler_entry': handle_irq_handler_entry(event_info) elif name == 'irq__irq_handler_exit': handle_irq_handler_exit(event_info) elif name == 'napi__napi_poll': handle_napi_poll(event_info) elif name == 'net__netif_receive_skb': handle_netif_receive_skb(event_info) elif name == 'net__netif_rx': handle_netif_rx(event_info) elif name == 'skb__skb_copy_datagram_iovec': handle_skb_copy_datagram_iovec(event_info) elif name == 'net__net_dev_queue': handle_net_dev_queue(event_info) elif name == 'net__net_dev_xmit': handle_net_dev_xmit(event_info) elif name == 'skb__kfree_skb': handle_kfree_skb(event_info) elif name == 'skb__consume_skb': handle_consume_skb(event_info) # display receive hunks if show_rx: for i in range(len(receive_hunk_list)): print_receive(receive_hunk_list[i]) # display transmit hunks if show_tx: print " dev len Qdisc " \ " netdevice free" for i in range(len(tx_free_list)): print_transmit(tx_free_list[i]) if debug: print "debug buffer status" print "----------------------------" print "xmit Qdisc:remain:%d overflow:%d" % \ (len(tx_queue_list), of_count_tx_queue_list) print "xmit netdevice:remain:%d overflow:%d" % \ (len(tx_xmit_list), of_count_tx_xmit_list) print "receive:remain:%d overflow:%d" % \ (len(rx_skb_list), of_count_rx_skb_list) # called from perf, when it finds a correspoinding event def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec): if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX": return event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec) all_event_list.append(event_info) def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm, irq, irq_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, irq_name) all_event_list.append(event_info) def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret) all_event_list.append(event_info) def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, napi, dev_name) all_event_list.append(event_info) def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, dev_name) all_event_list.append(event_info) def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen, rc, dev_name): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen, rc ,dev_name) all_event_list.append(event_info) def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr, protocol, location): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, protocol, location) all_event_list.append(event_info) def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr) all_event_list.append(event_info) def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm, skbaddr, skblen): event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, skbaddr, skblen) all_event_list.append(event_info) def handle_irq_handler_entry(event_info): (name, context, cpu, time, pid, comm, irq, irq_name) = event_info if cpu not in irq_dic.keys(): irq_dic[cpu] = [] irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time} irq_dic[cpu].append(irq_record) def handle_irq_handler_exit(event_info): (name, context, cpu, time, pid, comm, irq, ret) = event_info if cpu not in irq_dic.keys(): return irq_record = irq_dic[cpu].pop() if irq != irq_record['irq']: return irq_record.update({'irq_ext_t':time}) # if an irq doesn't include NET_RX softirq, drop. if 'event_list' in irq_record.keys(): irq_dic[cpu].append(irq_record) def handle_irq_softirq_raise(event_info): (name, context, cpu, time, pid, comm, vec) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'sirq_raise'}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_irq_softirq_entry(event_info): (name, context, cpu, time, pid, comm, vec) = event_info net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]} def handle_irq_softirq_exit(event_info): (name, context, cpu, time, pid, comm, vec) = event_info irq_list = [] event_list = 0 if cpu in irq_dic.keys(): irq_list = irq_dic[cpu] del irq_dic[cpu] if cpu in net_rx_dic.keys(): sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t'] event_list = net_rx_dic[cpu]['event_list'] del net_rx_dic[cpu] if irq_list == [] or event_list == 0: return rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time, 'irq_list':irq_list, 'event_list':event_list} # merge information realted to a NET_RX softirq receive_hunk_list.append(rec_data) def handle_napi_poll(event_info): (name, context, cpu, time, pid, comm, napi, dev_name) = event_info if cpu in net_rx_dic.keys(): event_list = net_rx_dic[cpu]['event_list'] rec_data = {'event_name':'napi_poll', 'dev':dev_name, 'event_t':time} event_list.append(rec_data) def handle_netif_rx(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu not in irq_dic.keys() \ or len(irq_dic[cpu]) == 0: return irq_record = irq_dic[cpu].pop() if 'event_list' in irq_record.keys(): irq_event_list = irq_record['event_list'] else: irq_event_list = [] irq_event_list.append({'time':time, 'event':'netif_rx', 'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name}) irq_record.update({'event_list':irq_event_list}) irq_dic[cpu].append(irq_record) def handle_netif_receive_skb(event_info): global of_count_rx_skb_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info if cpu in net_rx_dic.keys(): rec_data = {'event_name':'netif_receive_skb', 'event_t':time, 'skbaddr':skbaddr, 'len':skblen} event_list = net_rx_dic[cpu]['event_list'] event_list.append(rec_data) rx_skb_list.insert(0, rec_data) if len(rx_skb_list) > buffer_budget: rx_skb_list.pop() of_count_rx_skb_list += 1 def handle_net_dev_queue(event_info): global of_count_tx_queue_list (name, context, cpu, time, pid, comm, skbaddr, skblen, dev_name) = event_info skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time} tx_queue_list.insert(0, skb) if len(tx_queue_list) > buffer_budget: tx_queue_list.pop() of_count_tx_queue_list += 1 def handle_net_dev_xmit(event_info): global of_count_tx_xmit_list (name, context, cpu, time, pid, comm, skbaddr, skblen, rc, dev_name) = event_info if rc == 0: # NETDEV_TX_OK for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: skb['xmit_t'] = time tx_xmit_list.insert(0, skb) del tx_queue_list[i] if len(tx_xmit_list) > buffer_budget: tx_xmit_list.pop() of_count_tx_xmit_list += 1 return def handle_kfree_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr, protocol, location) = event_info for i in range(len(tx_queue_list)): skb = tx_queue_list[i] if skb['skbaddr'] == skbaddr: del tx_queue_list[i] return for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if rec_data['skbaddr'] == skbaddr: rec_data.update({'handle':"kfree_skb", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return def handle_consume_skb(event_info): (name, context, cpu, time, pid, comm, skbaddr) = event_info for i in range(len(tx_xmit_list)): skb = tx_xmit_list[i] if skb['skbaddr'] == skbaddr: skb['free_t'] = time tx_free_list.append(skb) del tx_xmit_list[i] return def handle_skb_copy_datagram_iovec(event_info): (name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info for i in range(len(rx_skb_list)): rec_data = rx_skb_list[i] if skbaddr == rec_data['skbaddr']: rec_data.update({'handle':"skb_copy_datagram_iovec", 'comm':comm, 'pid':pid, 'comm_t':time}) del rx_skb_list[i] return
gpl-2.0
bittorrent/btc
btc/btc_remove.py
2
1205
import argparse import sys import time from .btc import encoder, decoder, error, list_to_dict, dict_to_list, client _description = 'remove torrent' def main(): parser = argparse.ArgumentParser() parser.add_argument('-d', '--drop-data', default=False, action="store_true") parser.add_argument('-k', '--keep-torrent', default=False, action="store_true") args = parser.parse_args() if sys.stdin.isatty(): parser.error('no input, pipe another btc command output into this command') torrents = sys.stdin.read() if len(torrents.strip()) == 0: exit(1) try: torrents = decoder.decode(torrents) except ValueError: error('unexpected input: %s' % torrents) hashes = [t['hash'] for t in torrents] for h in hashes: client.remove_torrent(h, keep_data=not args.drop_data, keep_torrent=args.keep_torrent) while True: l = client.list_torrents() all_removed = True for t in l: if t['hash'] in hashes: all_removed = False break if all_removed: break time.sleep(1) if __name__ == '__main__': main()
mit
mitodl/odl-video-service
ui/signals.py
1
4043
""" ui model signals """ # pylint: disable=unused-argument from django.conf import settings from django.db.models.signals import pre_delete, post_save from django.dispatch import receiver from django.contrib.auth.signals import user_logged_out from cloudsync.tasks import remove_youtube_video, remove_youtube_caption from ui.constants import StreamSource, YouTubeStatus from ui.models import ( VideoFile, VideoThumbnail, VideoSubtitle, Video, YouTubeVideo, Collection, ) from ui import tasks as ovs_tasks from ui.utils import delete_moira_cache @receiver(pre_delete, sender=VideoFile) @receiver(pre_delete, sender=VideoThumbnail) @receiver(pre_delete, sender=VideoSubtitle) def delete_s3_files(sender, **kwargs): """ Make sure S3 files are deleted along with associated video file/thumbnail object """ kwargs["instance"].delete_from_s3() @receiver(pre_delete, sender=VideoSubtitle) def update_video_permissions(sender, **kwargs): """ Remove public video permissions if the subtitle is about to be deleted and no other subtitles exist. Otherwise just delete the subtitle from Youtube. """ video = kwargs["instance"].video if video.is_public: if ( video.techtvvideo_set.first() is None and len(video.videosubtitle_set.all()) <= 1 ): video.is_public = False video.save() elif YouTubeVideo.objects.filter(video=video).first() is not None: remove_youtube_caption.delay(video.id, kwargs["instance"].language) @receiver(pre_delete, sender=YouTubeVideo) def delete_youtube_video(sender, **kwargs): """ Call the YouTube API to delete a video """ youtube_id = kwargs["instance"].id if youtube_id is not None: remove_youtube_video.delay(youtube_id) @receiver(post_save, sender=Collection) def update_collection_youtube(sender, **kwargs): """ If a collection's stream source is changed, sync YoutubeVideo objects for public Videos """ for video in kwargs["instance"].videos.filter(is_public=True): sync_youtube(video) @receiver(post_save, sender=Collection) def update_collection_retranscodes(sender, **kwargs): """ Sync schedule_retranscode value for all videos in the collection """ if settings.FEATURES.get("RETRANSCODE_ENABLED", False): collection = kwargs["instance"] Video.objects.filter(collection=collection).update( schedule_retranscode=collection.schedule_retranscode ) @receiver(post_save, sender=Video) def update_video_youtube(sender, **kwargs): """ If a video's is_public field is changed, sync associated YoutubeVideo object """ sync_youtube(kwargs["instance"]) @receiver(post_save, sender=VideoFile) def add_hls_to_edx(sender, instance, created, **kwargs): """ If an HLS VideoFile was created and is of the right type, kick off a task to add this video to edX via API. """ if created and instance.can_add_to_edx: ovs_tasks.post_hls_to_edx.delay(instance.id) def sync_youtube(video): """ Delete from youtube if it exists and permissions are not public or collection stream_source == cloudfront. Args: video(ui.models.Video): The video that should be uploaded or deleted from Youtube. """ yt_video = video.youtubevideo if hasattr(video, "youtubevideo") else None if yt_video is not None: if ( video.is_public is False or video.collection.stream_source == StreamSource.CLOUDFRONT or yt_video.status in (YouTubeStatus.FAILED, YouTubeStatus.REJECTED) ): YouTubeVideo.objects.get(id=yt_video.id).delete() def reset_moira(sender, user, request, **kwargs): """ Clear out the user's cached moira lists Args: sender(Object): The sender of the signal user(User): The user logging out request(WSGIRequest): The request to log out """ delete_moira_cache(user) user_logged_out.connect(reset_moira)
bsd-3-clause
snyderr/robotframework
utest/output/test_listeners.py
6
5732
from __future__ import print_function import unittest from robot.output.listeners import Listeners, LibraryListeners from robot.output import LOGGER from robot.utils.asserts import * from robot.utils import JYTHON from robot.running.outputcapture import OutputCapturer LOGGER.unregister_console_logger() class Mock(object): def __getattr__(self, name): return '' class SuiteMock(Mock): def __init__(self): self.name = 'suitemock' self.doc = 'somedoc' self.status = 'PASS' self.tests = self.suites = [] stat_message = 'stat message' full_message = 'full message' class TestMock(Mock): def __init__(self): self.name = 'testmock' self.doc = 'cod' self.tags = ['foo', 'bar'] self.message = 'Expected failure' self.status = 'FAIL' class KwMock(Mock): def __init__(self): self.name = 'kwmock' self.args = ['a1', 'a2'] self.status = 'PASS' self.type = 'kw' class ListenOutputs(object): def output_file(self, path): self._out_file('Output', path) def report_file(self, path): self._out_file('Report', path) def log_file(self, path): self._out_file('Log', path) def debug_file(self, path): self._out_file('Debug', path) def xunit_file(self, path): self._out_file('XUnit', path) def _out_file(self, name, path): print('%s: %s' % (name, path)) class ListenAll(ListenOutputs): ROBOT_LISTENER_API_VERSION = '2' def start_suite(self, name, attrs): print("SUITE START: %s '%s'" % (name, attrs['doc'])) def start_test(self, name, attrs): print("TEST START: %s '%s' %s" % (name, attrs['doc'], ', '.join(attrs['tags']))) def start_keyword(self, name, attrs): args = [str(arg) for arg in attrs['args']] print("KW START: %s %s" % (name, args)) def end_keyword(self, name, attrs): print("KW END: %s" % attrs['status']) def end_test(self, name, attrs): if attrs['status'] == 'PASS': print('TEST END: PASS') else: print("TEST END: %s %s" % (attrs['status'], attrs['message'])) def end_suite(self, name, attrs): print('SUITE END: %s %s' % (attrs['status'], attrs['statistics'])) def close(self): print('Closing...') class TestListeners(unittest.TestCase): listener_name = 'test_listeners.ListenAll' stat_message = 'stat message' def setUp(self): self.listeners = Listeners([self.listener_name]) self.capturer = OutputCapturer() def test_start_suite(self): self.listeners.start_suite(SuiteMock()) self._assert_output("SUITE START: suitemock 'somedoc'") def test_start_test(self): self.listeners.start_test(TestMock()) self._assert_output("TEST START: testmock 'cod' foo, bar") def test_start_keyword(self): self.listeners.start_keyword(KwMock()) self._assert_output("KW START: kwmock ['a1', 'a2']") def test_end_keyword(self): self.listeners.end_keyword(KwMock()) self._assert_output("KW END: PASS") def test_end_test(self): self.listeners.end_test(TestMock()) self._assert_output('TEST END: FAIL Expected failure') def test_end_suite(self): self.listeners.end_suite(SuiteMock()) self._assert_output('SUITE END: PASS ' + self.stat_message) def test_output_file(self): self.listeners.output_file('output', 'path/to/output') self._assert_output('Output: path/to/output') def test_log_file(self): self.listeners.output_file('log', 'path/to/log') self._assert_output('Log: path/to/log') def test_report_file(self): self.listeners.output_file('report', 'path/to/report') self._assert_output('Report: path/to/report') def test_debug_file(self): self.listeners.output_file('debug', 'path/to/debug') self._assert_output('Debug: path/to/debug') def test_xunit_file(self): self.listeners.output_file('XUnit', 'path/to/xunit') self._assert_output('XUnit: path/to/xunit') def test_close(self): self.listeners.close() self._assert_output('Closing...') def _assert_output(self, expected): stdout, stderr = self.capturer._release() assert_equal(stderr, '') assert_equal(stdout.rstrip(), expected) if JYTHON: class TestJavaListeners(TestListeners): listener_name = 'NewStyleJavaListener' stat_message = 'stat message' class TestAttributesAreNotAccessedUnnecessarily(unittest.TestCase): def test_start_and_end_methods(self): for listeners in [Listeners([]), LibraryListeners()]: for name in dir(listeners): if name.startswith(('start_', 'end_')): method = getattr(listeners, name) method(None) def test_message_methods(self): class Message(object): level = 'INFO' for listeners in [Listeners([]), LibraryListeners()]: listeners.log_message(Message) listeners.message(Message) def test_some_methods_implemented(self): class MyListener(object): ROBOT_LISTENER_API_VERSION = 2 def end_suite(self, suite): pass libs = LibraryListeners() libs.new_suite_scope() libs.register([MyListener()], None) for listeners in [Listeners([MyListener()]), libs]: listeners.start_suite(None) assert_raises(AttributeError, listeners.end_suite, None) if __name__ == '__main__': unittest.main()
apache-2.0
REXUS-PIOneERS/Python-PIOneERS
python/Pi_2/RPi_SPI.py
2
6953
import RPi.GPIO as GPIO import time import sys GPIO.setmode(GPIO.BOARD) class SPI_Master(): def __init__(self, CLK, MISO, MOSI, CS, freq=1000): self.CLK = CLK # Clock self.MISO = MISO # Master out, slave in self. MOSI = MOSI # Master in, slave out self.CS = CS # Chip Select GPIO.setup(self.CLK, GPIO.OUT) GPIO.setup(self.MOSI, GPIO.OUT) GPIO.setup(self.MISO, GPIO.IN) GPIO.setup(self.CS, GPIO.OUT) self.freq = freq # Activate the SPI line self.activate_spi_line() def activate_spi_line(self): '''Pull all lines high''' GPIO.output(self.CLK, GPIO.HIGH) GPIO.output(self.MOSI, GPIO.HIGH) GPIO.output(self.CS, GPIO.HIGH) def send_data(self, channel, data, num_bits): ''' Send data to the slave device Structure of command Bit 8: 1 = recieve, 0 = send (MSB) Bit 5-7: Channel of send/recieve Bit 1-4: Number of bits to read/write (LSB) Command, Channel, Bits ''' command = (0b1 << 7 | channel << 4 | num_bits) print("sending command:", bin(command)) # Pull CS Low to prepare for recieving command GPIO.output(self.CS, GPIO.HIGH) GPIO.output(self.CS, GPIO.LOW) # Wait for acknoweldge from Slave time.sleep(0.01) self._sendBitsFromMaster(command, 8) # Sleep for a bit to give the slave time to prepare # time.sleep(0.1) # Send the data self._sendBitsFromMaster(data, num_bits) # Pull CS High to signal end of communication GPIO.output(self.CS, GPIO.HIGH) def request_data(self, channel, num_bits): ''' Request data from the slave device Structure of command Bit 8: 1 = recieve, 0 = send (MSB) Bit 5-7: Channel of send/recieve Bit 1-4: Number of bits to read/write (LSB) Command, Channel, Bits ''' command = (0b0 << 7 | channel << 4 | num_bits) # Pull CS Low to prepare for recieving command GPIO.output(self.CS, GPIO.HIGH) GPIO.output(self.CS, GPIO.LOW) time.sleep(0.01) self._sendBitsFromMaster(command, 8) # Sleep to give slave time to respond # time.sleep(0.1) # Recieve the data GPIO.output(self.CS, GPIO.HIGH) return self._recvBitsFromSlave(num_bits) def _sendBitsFromMaster(self, data, num_bits): '''Send bits to the slave device''' print("Sending bits...",bin(data)) for bit in range(num_bits, 0, -1): bit -= 1 dec_value = 2 ** bit if (data/dec_value >= 1): print("sent bit: 1") GPIO.output(self.MOSI, GPIO.HIGH) data -= dec_value else: print("sent bit: 0") GPIO.output(self.MOSI, GPIO.LOW) # Pulse the clock pin to push data through time.sleep(0.5/self.freq) GPIO.output(self.CLK, GPIO.LOW) GPIO.output(self.CLK, GPIO.HIGH) time.sleep(0.5/self.freq) def _recvBitsFromSlave(self, num_bits): '''Get data from the salve''' data = 0 for bit in range(num_bits): # Pulse the clock to start recieving data time.sleep(0.5/self.freq) GPIO.output(self.CLK, GPIO.LOW) GPIO.output(self.CLK, GPIO.HIGH) time.sleep(0.5/self.freq) # Give the slave time to respond if GPIO.input(self.MISO): data |= 0b1 data <<= 1 data >>= 1 return data class SPI_Slave(): def __init__(self, CLK, MISO, MOSI, CS): # Setup Pins self.CLK = CLK self.MISO = MISO self.MOSI = MOSI self.CS = CS GPIO.setup(self.CLK, GPIO.IN, pull_up_down=GPIO.PUD_UP) GPIO.setup(self.MOSI, GPIO.IN) GPIO.setup(self.MISO, GPIO.OUT) GPIO.setup(self.CS, GPIO.IN, pull_up_down=GPIO.PUD_UP) # Setup data registers self.channels = [0 for i in range(7)] # Activate spi connection self.activate_spi_line() def activate_spi_line(self): '''Pull output pins high, setup detect for CS''' GPIO.output(self.MISO, GPIO.HIGH) GPIO.add_event_detect(self.CS, GPIO.FALLING, callback=self.recieve_command) print("Event detection added") def recieve_command(self, channel): '''Reads data from the master to recieve the command (8-bit)''' GPIO.output(self.MISO, GPIO.LOW) command = self._readBitsFromMaster(8) print("Command recieved:", bin(command)) ''' Structure of command Bit 8: 1 = recieve, 0 = send (MSB) Bit 5-7: Channel of send/recieve Bit 1-4: Number of bits to read/write (LSB) ''' recieve = (command & 0b10000000) >> 7 channel = (command & 0b01110000) >> 4 num_bits = command & 0b00001111 print(recieve, channel, num_bits) if recieve: self._recieve_data(channel, num_bits) else: self._send_data(channel, num_bits) def _readBitsFromMaster(self, num_bits): '''Reads bits from the master''' print("Waiting for bits") data = 0 for bit in range(num_bits): # Wait for the clock to pulse GPIO.wait_for_edge(self.CLK, GPIO.FALLING) if GPIO.input(self.MOSI): print("Recieved bit:", 1) data |= 0b1 else: print("Recieved bit:", 0) data <<= 1 data >>= 1 return data def _recieve_data(self, channel, num_bits): '''Recieve the data and store in the channel''' if 0 > channel or channel > 7: print('Recieve: Invalid SPI channel number, must be in range 0-7', channel) return # Read the data self.channels[channel] = self._readBitsFromMaster(num_bits) def _send_data(self, channel, num_bits): '''Send the data from the channel''' if 0 > channel or channel > 7: print('Send: Invalid SPI channel number, must be in range 0-7', channel) # Send the data self._sendBitsFromSlave(self.channels[channel], num_bits) def _sendBitsFromSlave(self, data, num_bits): '''Sends the data to the Master''' # Send data starting with the MSB for bit in range(num_bits, 0, -1): bit -= 1 dec_value = 2 ** bit # Wait for the clock to pulse GPIO.wait_for_edge(self.CLK, GPIO.FALLING) if data/dec_value >= 1: GPIO.output(self.MISO, GPIO.HIGH) data -= dec_value else: GPIO.output(self.MISO, GPIO.LOW)
mit
ceari/edacc_web
edacc/tests/edacc_tests.py
2
8680
# -*- coding: utf-8 -*- """ EDACC Web Frontend Tests ------------------------ Unit tests of web frontend functions. :copyright: (c) 2010 by Daniel Diepold. :license: MIT, see LICENSE for details. """ import unittest import struct from edacc.tests import fixtures from edacc import config TEST_DATABASE = "EDACCUnitTests" config.DATABASE_HOST = "localhost" def clean_database(db): db.session.query(db.ExperimentResult).delete() db.session.query(db.Experiment).delete() db.session.query(db.Instance).delete() db.session.query(db.Solver).delete() db.session.query(db.InstanceClass).delete() db.session.query(db.ResultCodes).delete() db.session.query(db.StatusCodes).delete() db.session.commit() def float_eq(x, y, eps=1e-10): return y - eps <= x <= y + eps class DatabaseConnectionTestCase(unittest.TestCase): def setUp(self): from edacc import models self.db = models.add_database("edacc", "edaccteam", TEST_DATABASE, TEST_DATABASE) def test_db_connection(self): assert self.db is not None def test_competition_configuration(self): assert self.db.session.query(self.db.DBConfiguration).count() == 1 assert self.db.session.query(self.db.DBConfiguration).get(0) is not None class RankingTestCase(unittest.TestCase): def setUp(self): from edacc import models self.db = db = models.add_database("edacc", "edaccteam", TEST_DATABASE, TEST_DATABASE) clean_database(db) fixtures.setup_ranking_fixture(db) def test_fixture(self): db = self.db assert db.session.query(db.Experiment).count() == 1 assert db.session.query(db.ExperimentResult).count() == 10*10*10 def test_number_of_solved_instances_ranking(self): from edacc import ranking db = self.db experiment = db.session.query(db.Experiment).first() instances = experiment.instances ranked_solver_configs = ranking.number_of_solved_instances_ranking(db, experiment, instances) assert len(ranked_solver_configs) == 10 assert ranked_solver_configs[0].name == u"TestSolver0Configuration" assert ranked_solver_configs[9].name == u"TestSolver9Configuration" ranking_data = ranking.get_ranking_data(db, experiment, ranked_solver_configs, instances, True, True) assert ranking_data[0][0] == u"Virtual Best Solver (VBS)" assert ranking_data[0][1] == 10*10 # 10 instances, 10 runs each assert ranking_data[0][2] == 1.0 assert ranking_data[0][4] == 10*10*1.0 # 10 instances, 10 runs each, best time should be 1.0 on each best = ranking_data[1] assert best[0].name == u"TestSolver0Configuration" assert best[1] == 10*10 assert best[2] == 1.0 assert best[3] == 1.0 assert best[4] == 10*10*1.0 # 10 instances, 10 runs each, best time should be 1.0 on each assert best[5] == 1.0 # avg cpu time per run assert best[6] == 0.0 # avg stddev per instance assert best[7] == 1.0 # par10 second = ranking_data[2] assert second[0].name == u"TestSolver1Configuration" assert second[1] == 10*10 assert second[2] == 1.0 assert second[3] == 1.0 assert second[4] == 10*10*2.0 assert second[5] == 2.0 assert second[6] == 0.0 assert second[7] == 2.0 def tearDown(self): clean_database(self.db) self.db.session.remove() class StatisticsTestCase(unittest.TestCase): def test_probabilistic_domination(self): from edacc.statistics import prob_domination v1 = [1, 2, 3, 4, 5] v2 = [1, 2, 3, 4, 5] assert prob_domination(v1, v2) == 0 assert prob_domination(v2, v1) == 0 assert prob_domination(v1, v1) == 0 v1 = [1, 2, 3, 4, 4.5] v2 = [1, 2, 3, 4, 5] assert prob_domination(v1, v2) == 1 assert prob_domination(v2, v1) == -1 v1 = [1, 2, 3] v2 = [1, 2, 2, 1, 1, 1.5] assert prob_domination(v1, v2) == -1 assert prob_domination(v2, v1) == 1 v1 = [1, 1, 1, 1] v2 = [1, 1, 1, 1, 1, 1, 5] assert prob_domination(v1, v2) == 1 assert prob_domination(v2, v1) == -1 def test_spearman_correlation(self): from edacc.statistics import spearman_correlation rho, p = spearman_correlation([1.0, 2.0, 3.0, 4.0, 5.0, 6.0, 7.0], [2.0, 3.0, 4.0, 5.0, 6.0, 7.0, 8.0]) assert rho > 0.99 and p <= 0.001 def test_pearson_correlation(self): from edacc.statistics import pearson_correlation rho, p = pearson_correlation([1.0, 2.0, 3.0, 4.0], [1.0, 2.0, 3.0, 4.0]) assert rho == 1.0 and p <= 1e-10 def test_kolmogorow_smirnow_2sample_test(self): from edacc.statistics import kolmogorow_smirnow_2sample_test D, p = kolmogorow_smirnow_2sample_test([1.0, 2.0, 3.0, 4.0], [5.0, 6.0, 7.0, 8.0]) assert float_eq(D, 1.0) and float_eq(p, 0.02857, eps=1e-5) D, p = kolmogorow_smirnow_2sample_test([1.0, 2.0, 3.0, 4.0, 10.0], [5.0, 6.0, 7.0, 8.0]) assert float_eq(D, 0.8) and float_eq(p, 0.07937, eps=1e-5) def test_wilcox_test(self): from edacc.statistics import wilcox_test W, p = wilcox_test([1.0, 2.0, 3.0, 3.5, 3.8], [5.0, 6.0, 7.0, 8.0]) assert W == 0 and float_eq(p, 0.01587, eps=1e-5) W, p = wilcox_test([1.0, 2.0, 3.0, 4.0, 10.0], [5.0, 6.0, 7.0, 8.0]) assert W == 4 and float_eq(p, 0.1905, eps=1e-4) class UtilsTestCase(unittest.TestCase): def test_lzma_compression(self): from edacc import utils uncompressed_data = "TestData with \x12\x65 weird bytes \n and everything" compressed_data = utils.lzma_compress(uncompressed_data) decompressed_data = utils.lzma_decompress(compressed_data) assert decompressed_data == uncompressed_data assert len(compressed_data) > 13 # there should always be a 13 bytes header len_bytes = struct.unpack('<Q', compressed_data[5:13])[0] assert len_bytes == len(uncompressed_data) def test_format_output_file(self): from edacc.utils import formatOutputFile assert formatOutputFile("") == "" assert formatOutputFile(None) == "No output" assert formatOutputFile("a" * 4096) == "a" * 4096 assert formatOutputFile("a" * 4097) == "a" * 2048 + "\n\n... [truncated 0 kB]\n\n" + "a" * 2048 def test_newline_split_string(self): from edacc.utils import newline_split_string assert newline_split_string("test", 0) == "test" assert newline_split_string("test", 4) == "test" assert newline_split_string("test", 5) == "test" assert newline_split_string("test", 1) == "t\ne\ns\nt" assert newline_split_string("test", 2) == "te\nst" assert newline_split_string("test", 3) == "tes\nt" def test_download_size(self): from edacc.utils import download_size assert download_size(0) == "0 Bytes" assert download_size(1) == "1 Bytes" assert download_size(1024) == "1.0 kB" assert download_size(1024*1024) == "1.0 MB" def test_parse_parameters(self): from edacc.utils import parse_parameters params = parse_parameters("-p1 5 -p2 -p3 4.0 -p5 abc -p6 2.0") assert ("-p1", "-p1", "5", False, 0) in params assert ("-p2", "-p2", "", True, 2) in params assert ("-p3", "-p3", "4.0", False, 3) in params assert ("-p5", "-p5", "abc", False, 5) in params assert ("-p6", "-p6", "2.0", False, 7) in params params = parse_parameters("-p1 1.0 -i INSTANCE SEED") assert ("-p1", "-p1", "1.0", False, 0) in params assert ("instance", "-i", "", False, 2) in params assert ("seed", "", "", False, 4) in params class AnalysisTestCase(unittest.TestCase): def setUp(self): from edacc import models, config config.DEFAULT_DATABASES = [("edacc", "edaccteam", TEST_DATABASE, TEST_DATABASE, True)] from edacc.web import app self.app = app.test_client() self.db = db = models.add_database("edacc", "edaccteam", TEST_DATABASE, TEST_DATABASE) clean_database(db) fixtures.setup_ranking_fixture(db) def test_solver_ranking(self): exp = self.db.session.query(self.db.Experiment).first() assert "The virtual best solver" in self.app.get('/'+TEST_DATABASE+"/experiment/" + \ str(exp.idExperiment) + "/ranking/").data def tearDown(self): clean_database(self.db) self.db.session.remove() if __name__ == '__main__': unittest.main()
mit
NOMORECOFFEE/thrift
lib/py/src/server/THttpServer.py
53
3147
# # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. # import BaseHTTPServer from thrift.server import TServer from thrift.transport import TTransport class ResponseException(Exception): """Allows handlers to override the HTTP response Normally, THttpServer always sends a 200 response. If a handler wants to override this behavior (e.g., to simulate a misconfigured or overloaded web server during testing), it can raise a ResponseException. The function passed to the constructor will be called with the RequestHandler as its only argument. """ def __init__(self, handler): self.handler = handler class THttpServer(TServer.TServer): """A simple HTTP-based Thrift server This class is not very performant, but it is useful (for example) for acting as a mock version of an Apache-based PHP Thrift endpoint. """ def __init__(self, processor, server_address, inputProtocolFactory, outputProtocolFactory=None, server_class=BaseHTTPServer.HTTPServer): """Set up protocol factories and HTTP server. See BaseHTTPServer for server_address. See TServer for protocol factories. """ if outputProtocolFactory is None: outputProtocolFactory = inputProtocolFactory TServer.TServer.__init__(self, processor, None, None, None, inputProtocolFactory, outputProtocolFactory) thttpserver = self class RequestHander(BaseHTTPServer.BaseHTTPRequestHandler): def do_POST(self): # Don't care about the request path. itrans = TTransport.TFileObjectTransport(self.rfile) otrans = TTransport.TFileObjectTransport(self.wfile) itrans = TTransport.TBufferedTransport( itrans, int(self.headers['Content-Length'])) otrans = TTransport.TMemoryBuffer() iprot = thttpserver.inputProtocolFactory.getProtocol(itrans) oprot = thttpserver.outputProtocolFactory.getProtocol(otrans) try: thttpserver.processor.process(iprot, oprot) except ResponseException as exn: exn.handler(self) else: self.send_response(200) self.send_header("content-type", "application/x-thrift") self.end_headers() self.wfile.write(otrans.getvalue()) self.httpd = server_class(server_address, RequestHander) def serve(self): self.httpd.serve_forever()
apache-2.0
brianmay/python-tldap-debian
tldap/filter.py
1
1859
""" filters.py - misc stuff for handling LDAP filter strings (see RFC2254) See http://www.python-ldap.org/ for details. \$Id: filter.py,v 1.9 2011/07/22 07:20:53 stroeder Exp $ Compability: - Tested with Python 2.0+ """ import six def escape_filter_chars(assertion_value, escape_mode=0): """ Replace all special characters found in assertion_value by quoted notation. escape_mode If 0 only special chars mentioned in RFC 4515 are escaped. If 1 all NON-ASCII chars are escaped. If 2 all chars are escaped. """ if isinstance(assertion_value, six.text_type): assertion_value = assertion_value.encode("utf_8") s = [] for c in assertion_value: do_escape = False if str != bytes: # Python 3 pass else: # Python 2 c = ord(c) if escape_mode == 0: if c == ord('\\') or c == ord('*') \ or c == ord('(') or c == ord(')') \ or c == ord('\x00'): do_escape = True elif escape_mode == 1: if c < '0' or c > 'z' or c in "\\*()": do_escape = True elif escape_mode == 2: do_escape = True else: raise ValueError('escape_mode must be 0, 1 or 2.') if do_escape: s.append("\\%02x" % c) else: s.append(chr(c)) return ''.join(s) def filter_format(filter_template, assertion_values): """ filter_template String containing %s as placeholder for assertion values. assertion_values List or tuple of assertion values. Length must match count of %s in filter_template. """ assert isinstance(filter_template, six.string_types) return filter_template % ( tuple(map(escape_filter_chars, assertion_values)))
gpl-3.0
rvalyi/geraldo
site/newsite/django_1_0/tests/regressiontests/forms/localflavor/pl.py
12
22474
# -*- coding: utf-8 -*- # Tests for the contrib/localflavor/ PL form fields. tests = r""" # PLVoivodeshipSelect ########################################################## >>> from django.contrib.localflavor.pl.forms import PLVoivodeshipSelect >>> f = PLVoivodeshipSelect() >>> f.render('voivodeships','pomerania') u'<select name="voivodeships">\n<option value="lower_silesia">Lower Silesia</option>\n<option value="kuyavia-pomerania">Kuyavia-Pomerania</option>\n<option value="lublin">Lublin</option>\n<option value="lubusz">Lubusz</option>\n<option value="lodz">Lodz</option>\n<option value="lesser_poland">Lesser Poland</option>\n<option value="masovia">Masovia</option>\n<option value="opole">Opole</option>\n<option value="subcarpatia">Subcarpatia</option>\n<option value="podlasie">Podlasie</option>\n<option value="pomerania" selected="selected">Pomerania</option>\n<option value="silesia">Silesia</option>\n<option value="swietokrzyskie">Swietokrzyskie</option>\n<option value="warmia-masuria">Warmia-Masuria</option>\n<option value="greater_poland">Greater Poland</option>\n<option value="west_pomerania">West Pomerania</option>\n</select>' # PLAdministrativeUnitSelect ########################################################## >>> from django.contrib.localflavor.pl.forms import PLAdministrativeUnitSelect >>> f = PLAdministrativeUnitSelect() >>> f.render('administrativeunit','katowice') u'<select name="administrativeunit">\n<option value="wroclaw">Wroc\u0142aw</option>\n<option value="jeleniagora">Jelenia G\xf3ra</option>\n<option value="legnica">Legnica</option>\n<option value="boleslawiecki">boles\u0142awiecki</option>\n<option value="dzierzoniowski">dzier\u017coniowski</option>\n<option value="glogowski">g\u0142ogowski</option>\n<option value="gorowski">g\xf3rowski</option>\n<option value="jaworski">jaworski</option>\n<option value="jeleniogorski">jeleniog\xf3rski</option>\n<option value="kamiennogorski">kamiennog\xf3rski</option>\n<option value="klodzki">k\u0142odzki</option>\n<option value="legnicki">legnicki</option>\n<option value="lubanski">luba\u0144ski</option>\n<option value="lubinski">lubi\u0144ski</option>\n<option value="lwowecki">lw\xf3wecki</option>\n<option value="milicki">milicki</option>\n<option value="olesnicki">ole\u015bnicki</option>\n<option value="olawski">o\u0142awski</option>\n<option value="polkowicki">polkowicki</option>\n<option value="strzelinski">strzeli\u0144ski</option>\n<option value="sredzki">\u015bredzki</option>\n<option value="swidnicki">\u015bwidnicki</option>\n<option value="trzebnicki">trzebnicki</option>\n<option value="walbrzyski">wa\u0142brzyski</option>\n<option value="wolowski">wo\u0142owski</option>\n<option value="wroclawski">wroc\u0142awski</option>\n<option value="zabkowicki">z\u0105bkowicki</option>\n<option value="zgorzelecki">zgorzelecki</option>\n<option value="zlotoryjski">z\u0142otoryjski</option>\n<option value="bydgoszcz">Bydgoszcz</option>\n<option value="torun">Toru\u0144</option>\n<option value="wloclawek">W\u0142oc\u0142awek</option>\n<option value="grudziadz">Grudzi\u0105dz</option>\n<option value="aleksandrowski">aleksandrowski</option>\n<option value="brodnicki">brodnicki</option>\n<option value="bydgoski">bydgoski</option>\n<option value="chelminski">che\u0142mi\u0144ski</option>\n<option value="golubsko-dobrzynski">golubsko-dobrzy\u0144ski</option>\n<option value="grudziadzki">grudzi\u0105dzki</option>\n<option value="inowroclawski">inowroc\u0142awski</option>\n<option value="lipnowski">lipnowski</option>\n<option value="mogilenski">mogile\u0144ski</option>\n<option value="nakielski">nakielski</option>\n<option value="radziejowski">radziejowski</option>\n<option value="rypinski">rypi\u0144ski</option>\n<option value="sepolenski">s\u0119pole\u0144ski</option>\n<option value="swiecki">\u015bwiecki</option>\n<option value="torunski">toru\u0144ski</option>\n<option value="tucholski">tucholski</option>\n<option value="wabrzeski">w\u0105brzeski</option>\n<option value="wloclawski">wroc\u0142awski</option>\n<option value="zninski">\u017ani\u0144ski</option>\n<option value="lublin">Lublin</option>\n<option value="biala-podlaska">Bia\u0142a Podlaska</option>\n<option value="chelm">Che\u0142m</option>\n<option value="zamosc">Zamo\u015b\u0107</option>\n<option value="bialski">bialski</option>\n<option value="bilgorajski">bi\u0142gorajski</option>\n<option value="chelmski">che\u0142mski</option>\n<option value="hrubieszowski">hrubieszowski</option>\n<option value="janowski">janowski</option>\n<option value="krasnostawski">krasnostawski</option>\n<option value="krasnicki">kra\u015bnicki</option>\n<option value="lubartowski">lubartowski</option>\n<option value="lubelski">lubelski</option>\n<option value="leczynski">\u0142\u0119czy\u0144ski</option>\n<option value="lukowski">\u0142ukowski</option>\n<option value="opolski">opolski</option>\n<option value="parczewski">parczewski</option>\n<option value="pulawski">pu\u0142awski</option>\n<option value="radzynski">radzy\u0144ski</option>\n<option value="rycki">rycki</option>\n<option value="swidnicki">\u015bwidnicki</option>\n<option value="tomaszowski">tomaszowski</option>\n<option value="wlodawski">w\u0142odawski</option>\n<option value="zamojski">zamojski</option>\n<option value="gorzow-wielkopolski">Gorz\xf3w Wielkopolski</option>\n<option value="zielona-gora">Zielona G\xf3ra</option>\n<option value="gorzowski">gorzowski</option>\n<option value="krosnienski">kro\u015bnie\u0144ski</option>\n<option value="miedzyrzecki">mi\u0119dzyrzecki</option>\n<option value="nowosolski">nowosolski</option>\n<option value="slubicki">s\u0142ubicki</option>\n<option value="strzelecko-drezdenecki">strzelecko-drezdenecki</option>\n<option value="sulecinski">sule\u0144ci\u0144ski</option>\n<option value="swiebodzinski">\u015bwiebodzi\u0144ski</option>\n<option value="wschowski">wschowski</option>\n<option value="zielonogorski">zielonog\xf3rski</option>\n<option value="zaganski">\u017caga\u0144ski</option>\n<option value="zarski">\u017carski</option>\n<option value="lodz">\u0141\xf3d\u017a</option>\n<option value="piotrkow-trybunalski">Piotrk\xf3w Trybunalski</option>\n<option value="skierniewice">Skierniewice</option>\n<option value="belchatowski">be\u0142chatowski</option>\n<option value="brzezinski">brzezi\u0144ski</option>\n<option value="kutnowski">kutnowski</option>\n<option value="laski">\u0142aski</option>\n<option value="leczycki">\u0142\u0119czycki</option>\n<option value="lowicki">\u0142owicki</option>\n<option value="lodzki wschodni">\u0142\xf3dzki wschodni</option>\n<option value="opoczynski">opoczy\u0144ski</option>\n<option value="pabianicki">pabianicki</option>\n<option value="pajeczanski">paj\u0119cza\u0144ski</option>\n<option value="piotrkowski">piotrkowski</option>\n<option value="poddebicki">podd\u0119bicki</option>\n<option value="radomszczanski">radomszcza\u0144ski</option>\n<option value="rawski">rawski</option>\n<option value="sieradzki">sieradzki</option>\n<option value="skierniewicki">skierniewicki</option>\n<option value="tomaszowski">tomaszowski</option>\n<option value="wielunski">wielu\u0144ski</option>\n<option value="wieruszowski">wieruszowski</option>\n<option value="zdunskowolski">zdu\u0144skowolski</option>\n<option value="zgierski">zgierski</option>\n<option value="krakow">Krak\xf3w</option>\n<option value="tarnow">Tarn\xf3w</option>\n<option value="nowy-sacz">Nowy S\u0105cz</option>\n<option value="bochenski">boche\u0144ski</option>\n<option value="brzeski">brzeski</option>\n<option value="chrzanowski">chrzanowski</option>\n<option value="dabrowski">d\u0105browski</option>\n<option value="gorlicki">gorlicki</option>\n<option value="krakowski">krakowski</option>\n<option value="limanowski">limanowski</option>\n<option value="miechowski">miechowski</option>\n<option value="myslenicki">my\u015blenicki</option>\n<option value="nowosadecki">nowos\u0105decki</option>\n<option value="nowotarski">nowotarski</option>\n<option value="olkuski">olkuski</option>\n<option value="oswiecimski">o\u015bwi\u0119cimski</option>\n<option value="proszowicki">proszowicki</option>\n<option value="suski">suski</option>\n<option value="tarnowski">tarnowski</option>\n<option value="tatrzanski">tatrza\u0144ski</option>\n<option value="wadowicki">wadowicki</option>\n<option value="wielicki">wielicki</option>\n<option value="warszawa">Warszawa</option>\n<option value="ostroleka">Ostro\u0142\u0119ka</option>\n<option value="plock">P\u0142ock</option>\n<option value="radom">Radom</option>\n<option value="siedlce">Siedlce</option>\n<option value="bialobrzeski">bia\u0142obrzeski</option>\n<option value="ciechanowski">ciechanowski</option>\n<option value="garwolinski">garwoli\u0144ski</option>\n<option value="gostyninski">gostyni\u0144ski</option>\n<option value="grodziski">grodziski</option>\n<option value="grojecki">gr\xf3jecki</option>\n<option value="kozienicki">kozenicki</option>\n<option value="legionowski">legionowski</option>\n<option value="lipski">lipski</option>\n<option value="losicki">\u0142osicki</option>\n<option value="makowski">makowski</option>\n<option value="minski">mi\u0144ski</option>\n<option value="mlawski">m\u0142awski</option>\n<option value="nowodworski">nowodworski</option>\n<option value="ostrolecki">ostro\u0142\u0119cki</option>\n<option value="ostrowski">ostrowski</option>\n<option value="otwocki">otwocki</option>\n<option value="piaseczynski">piaseczy\u0144ski</option>\n<option value="plocki">p\u0142ocki</option>\n<option value="plonski">p\u0142o\u0144ski</option>\n<option value="pruszkowski">pruszkowski</option>\n<option value="przasnyski">przasnyski</option>\n<option value="przysuski">przysuski</option>\n<option value="pultuski">pu\u0142tuski</option>\n<option value="radomski">radomski</option>\n<option value="siedlecki">siedlecki</option>\n<option value="sierpecki">sierpecki</option>\n<option value="sochaczewski">sochaczewski</option>\n<option value="sokolowski">soko\u0142owski</option>\n<option value="szydlowiecki">szyd\u0142owiecki</option>\n<option value="warszawski-zachodni">warszawski zachodni</option>\n<option value="wegrowski">w\u0119growski</option>\n<option value="wolominski">wo\u0142omi\u0144ski</option>\n<option value="wyszkowski">wyszkowski</option>\n<option value="zwolenski">zwole\u0144ski</option>\n<option value="zurominski">\u017curomi\u0144ski</option>\n<option value="zyrardowski">\u017cyrardowski</option>\n<option value="opole">Opole</option>\n<option value="brzeski">brzeski</option>\n<option value="glubczycki">g\u0142ubczyski</option>\n<option value="kedzierzynsko-kozielski">k\u0119dzierzy\u0144ski-kozielski</option>\n<option value="kluczborski">kluczborski</option>\n<option value="krapkowicki">krapkowicki</option>\n<option value="namyslowski">namys\u0142owski</option>\n<option value="nyski">nyski</option>\n<option value="oleski">oleski</option>\n<option value="opolski">opolski</option>\n<option value="prudnicki">prudnicki</option>\n<option value="strzelecki">strzelecki</option>\n<option value="rzeszow">Rzesz\xf3w</option>\n<option value="krosno">Krosno</option>\n<option value="przemysl">Przemy\u015bl</option>\n<option value="tarnobrzeg">Tarnobrzeg</option>\n<option value="bieszczadzki">bieszczadzki</option>\n<option value="brzozowski">brzozowski</option>\n<option value="debicki">d\u0119bicki</option>\n<option value="jaroslawski">jaros\u0142awski</option>\n<option value="jasielski">jasielski</option>\n<option value="kolbuszowski">kolbuszowski</option>\n<option value="krosnienski">kro\u015bnie\u0144ski</option>\n<option value="leski">leski</option>\n<option value="lezajski">le\u017cajski</option>\n<option value="lubaczowski">lubaczowski</option>\n<option value="lancucki">\u0142a\u0144cucki</option>\n<option value="mielecki">mielecki</option>\n<option value="nizanski">ni\u017ca\u0144ski</option>\n<option value="przemyski">przemyski</option>\n<option value="przeworski">przeworski</option>\n<option value="ropczycko-sedziszowski">ropczycko-s\u0119dziszowski</option>\n<option value="rzeszowski">rzeszowski</option>\n<option value="sanocki">sanocki</option>\n<option value="stalowowolski">stalowowolski</option>\n<option value="strzyzowski">strzy\u017cowski</option>\n<option value="tarnobrzeski">tarnobrzeski</option>\n<option value="bialystok">Bia\u0142ystok</option>\n<option value="lomza">\u0141om\u017ca</option>\n<option value="suwalki">Suwa\u0142ki</option>\n<option value="augustowski">augustowski</option>\n<option value="bialostocki">bia\u0142ostocki</option>\n<option value="bielski">bielski</option>\n<option value="grajewski">grajewski</option>\n<option value="hajnowski">hajnowski</option>\n<option value="kolnenski">kolne\u0144ski</option>\n<option value="\u0142omzynski">\u0142om\u017cy\u0144ski</option>\n<option value="moniecki">moniecki</option>\n<option value="sejnenski">sejne\u0144ski</option>\n<option value="siemiatycki">siematycki</option>\n<option value="sokolski">sok\xf3lski</option>\n<option value="suwalski">suwalski</option>\n<option value="wysokomazowiecki">wysokomazowiecki</option>\n<option value="zambrowski">zambrowski</option>\n<option value="gdansk">Gda\u0144sk</option>\n<option value="gdynia">Gdynia</option>\n<option value="slupsk">S\u0142upsk</option>\n<option value="sopot">Sopot</option>\n<option value="bytowski">bytowski</option>\n<option value="chojnicki">chojnicki</option>\n<option value="czluchowski">cz\u0142uchowski</option>\n<option value="kartuski">kartuski</option>\n<option value="koscierski">ko\u015bcierski</option>\n<option value="kwidzynski">kwidzy\u0144ski</option>\n<option value="leborski">l\u0119borski</option>\n<option value="malborski">malborski</option>\n<option value="nowodworski">nowodworski</option>\n<option value="gdanski">gda\u0144ski</option>\n<option value="pucki">pucki</option>\n<option value="slupski">s\u0142upski</option>\n<option value="starogardzki">starogardzki</option>\n<option value="sztumski">sztumski</option>\n<option value="tczewski">tczewski</option>\n<option value="wejherowski">wejcherowski</option>\n<option value="katowice" selected="selected">Katowice</option>\n<option value="bielsko-biala">Bielsko-Bia\u0142a</option>\n<option value="bytom">Bytom</option>\n<option value="chorzow">Chorz\xf3w</option>\n<option value="czestochowa">Cz\u0119stochowa</option>\n<option value="dabrowa-gornicza">D\u0105browa G\xf3rnicza</option>\n<option value="gliwice">Gliwice</option>\n<option value="jastrzebie-zdroj">Jastrz\u0119bie Zdr\xf3j</option>\n<option value="jaworzno">Jaworzno</option>\n<option value="myslowice">Mys\u0142owice</option>\n<option value="piekary-slaskie">Piekary \u015al\u0105skie</option>\n<option value="ruda-slaska">Ruda \u015al\u0105ska</option>\n<option value="rybnik">Rybnik</option>\n<option value="siemianowice-slaskie">Siemianowice \u015al\u0105skie</option>\n<option value="sosnowiec">Sosnowiec</option>\n<option value="swietochlowice">\u015awi\u0119toch\u0142owice</option>\n<option value="tychy">Tychy</option>\n<option value="zabrze">Zabrze</option>\n<option value="zory">\u017bory</option>\n<option value="bedzinski">b\u0119dzi\u0144ski</option>\n<option value="bielski">bielski</option>\n<option value="bierunsko-ledzinski">bieru\u0144sko-l\u0119dzi\u0144ski</option>\n<option value="cieszynski">cieszy\u0144ski</option>\n<option value="czestochowski">cz\u0119stochowski</option>\n<option value="gliwicki">gliwicki</option>\n<option value="klobucki">k\u0142obucki</option>\n<option value="lubliniecki">lubliniecki</option>\n<option value="mikolowski">miko\u0142owski</option>\n<option value="myszkowski">myszkowski</option>\n<option value="pszczynski">pszczy\u0144ski</option>\n<option value="raciborski">raciborski</option>\n<option value="rybnicki">rybnicki</option>\n<option value="tarnogorski">tarnog\xf3rski</option>\n<option value="wodzislawski">wodzis\u0142awski</option>\n<option value="zawiercianski">zawiercia\u0144ski</option>\n<option value="zywiecki">\u017cywiecki</option>\n<option value="kielce">Kielce</option>\n<option value="buski">buski</option>\n<option value="jedrzejowski">j\u0119drzejowski</option>\n<option value="kazimierski">kazimierski</option>\n<option value="kielecki">kielecki</option>\n<option value="konecki">konecki</option>\n<option value="opatowski">opatowski</option>\n<option value="ostrowiecki">ostrowiecki</option>\n<option value="pinczowski">pi\u0144czowski</option>\n<option value="sandomierski">sandomierski</option>\n<option value="skarzyski">skar\u017cyski</option>\n<option value="starachowicki">starachowicki</option>\n<option value="staszowski">staszowski</option>\n<option value="wloszczowski">w\u0142oszczowski</option>\n<option value="olsztyn">Olsztyn</option>\n<option value="elblag">Elbl\u0105g</option>\n<option value="bartoszycki">bartoszycki</option>\n<option value="braniewski">braniewski</option>\n<option value="dzialdowski">dzia\u0142dowski</option>\n<option value="elblaski">elbl\u0105ski</option>\n<option value="elcki">e\u0142cki</option>\n<option value="gizycki">gi\u017cycki</option>\n<option value="goldapski">go\u0142dapski</option>\n<option value="ilawski">i\u0142awski</option>\n<option value="ketrzynski">k\u0119trzy\u0144ski</option>\n<option value="lidzbarski">lidzbarski</option>\n<option value="mragowski">mr\u0105gowski</option>\n<option value="nidzicki">nidzicki</option>\n<option value="nowomiejski">nowomiejski</option>\n<option value="olecki">olecki</option>\n<option value="olsztynski">olszty\u0144ski</option>\n<option value="ostrodzki">ostr\xf3dzki</option>\n<option value="piski">piski</option>\n<option value="szczycienski">szczycie\u0144ski</option>\n<option value="wegorzewski">w\u0119gorzewski</option>\n<option value="poznan">Pozna\u0144</option>\n<option value="kalisz">Kalisz</option>\n<option value="konin">Konin</option>\n<option value="leszno">Leszno</option>\n<option value="chodzieski">chodziejski</option>\n<option value="czarnkowsko-trzcianecki">czarnkowsko-trzcianecki</option>\n<option value="gnieznienski">gnie\u017anie\u0144ski</option>\n<option value="gostynski">gosty\u0144ski</option>\n<option value="grodziski">grodziski</option>\n<option value="jarocinski">jaroci\u0144ski</option>\n<option value="kaliski">kaliski</option>\n<option value="kepinski">k\u0119pi\u0144ski</option>\n<option value="kolski">kolski</option>\n<option value="koninski">koni\u0144ski</option>\n<option value="koscianski">ko\u015bcia\u0144ski</option>\n<option value="krotoszynski">krotoszy\u0144ski</option>\n<option value="leszczynski">leszczy\u0144ski</option>\n<option value="miedzychodzki">mi\u0119dzychodzki</option>\n<option value="nowotomyski">nowotomyski</option>\n<option value="obornicki">obornicki</option>\n<option value="ostrowski">ostrowski</option>\n<option value="ostrzeszowski">ostrzeszowski</option>\n<option value="pilski">pilski</option>\n<option value="pleszewski">pleszewski</option>\n<option value="poznanski">pozna\u0144ski</option>\n<option value="rawicki">rawicki</option>\n<option value="slupecki">s\u0142upecki</option>\n<option value="szamotulski">szamotulski</option>\n<option value="sredzki">\u015bredzki</option>\n<option value="sremski">\u015bremski</option>\n<option value="turecki">turecki</option>\n<option value="wagrowiecki">w\u0105growiecki</option>\n<option value="wolsztynski">wolszty\u0144ski</option>\n<option value="wrzesinski">wrzesi\u0144ski</option>\n<option value="zlotowski">z\u0142otowski</option>\n<option value="bialogardzki">bia\u0142ogardzki</option>\n<option value="choszczenski">choszcze\u0144ski</option>\n<option value="drawski">drawski</option>\n<option value="goleniowski">goleniowski</option>\n<option value="gryficki">gryficki</option>\n<option value="gryfinski">gryfi\u0144ski</option>\n<option value="kamienski">kamie\u0144ski</option>\n<option value="kolobrzeski">ko\u0142obrzeski</option>\n<option value="koszalinski">koszali\u0144ski</option>\n<option value="lobeski">\u0142obeski</option>\n<option value="mysliborski">my\u015bliborski</option>\n<option value="policki">policki</option>\n<option value="pyrzycki">pyrzycki</option>\n<option value="slawienski">s\u0142awie\u0144ski</option>\n<option value="stargardzki">stargardzki</option>\n<option value="szczecinecki">szczecinecki</option>\n<option value="swidwinski">\u015bwidwi\u0144ski</option>\n<option value="walecki">wa\u0142ecki</option>\n</select>' # PLPostalCodeField ############################################################## >>> from django.contrib.localflavor.pl.forms import PLPostalCodeField >>> f = PLPostalCodeField() >>> f.clean('43--434') Traceback (most recent call last): ... ValidationError: [u'Enter a postal code in the format XX-XXX.'] >>> f.clean('41-403') u'41-403' # PLTaxNumberField ############################################################### >>> from django.contrib.localflavor.pl.forms import PLTaxNumberField >>> f = PLTaxNumberField() >>> f.clean('43-343-234-323') Traceback (most recent call last): ... ValidationError: [u'Enter a tax number field (NIP) in the format XXX-XXX-XX-XX or XX-XX-XXX-XXX.'] >>> f.clean('64-62-414-124') u'6462414124' >>> f.clean('646-241-41-24') u'6462414124' >>> f.clean('646-241-41-23') Traceback (most recent call last): ... ValidationError: [u'Wrong checksum for the Tax Number (NIP).'] # PLNationalIdentificationNumberField ############################################ >>> from django.contrib.localflavor.pl.forms import PLNationalIdentificationNumberField >>> f = PLNationalIdentificationNumberField() >>> f.clean('80071610614') u'80071610614' >>> f.clean('80071610610') Traceback (most recent call last): ... ValidationError: [u'Wrong checksum for the National Identification Number.'] >>> f.clean('80') Traceback (most recent call last): ... ValidationError: [u'National Identification Number consists of 11 digits.'] >>> f.clean('800716106AA') Traceback (most recent call last): ... ValidationError: [u'National Identification Number consists of 11 digits.'] # PLNationalBusinessRegisterField ################################################ >>> from django.contrib.localflavor.pl.forms import PLNationalBusinessRegisterField >>> f = PLNationalBusinessRegisterField() >>> f.clean('590096454') u'590096454' >>> f.clean('590096453') Traceback (most recent call last): ... ValidationError: [u'Wrong checksum for the National Business Register Number (REGON).'] >>> f.clean('590096') Traceback (most recent call last): ... ValidationError: [u'National Business Register Number (REGON) consists of 7 or 9 digits.'] """
lgpl-3.0
calvinleenyc/zulip
zerver/management/commands/show_admins.py
38
1163
from __future__ import absolute_import from __future__ import print_function from typing import Any from argparse import ArgumentParser from django.core.management.base import BaseCommand from zerver.models import get_realm, Realm import sys class Command(BaseCommand): help = """Show the admins in a realm.""" def add_arguments(self, parser): # type: (ArgumentParser) -> None parser.add_argument('realm', metavar='<realm>', type=str, help="realm to show admins for") def handle(self, *args, **options): # type: (*Any, **str) -> None realm_name = options['realm'] try: realm = get_realm(realm_name) except Realm.DoesNotExist: print('There is no realm called %s.' % (realm_name,)) sys.exit(1) users = realm.get_admin_users() if users: print('Admins:\n') for user in users: print(' %s (%s)' % (user.email, user.full_name)) else: print('There are no admins for this realm!') print('\nYou can use the "knight" management command to knight admins.')
apache-2.0
viswimmer1/PythonGenerator
data/python_files/30423103/encoding.py
71
7294
import types import urllib import locale import datetime import codecs from decimal import Decimal from django.utils.functional import Promise class DjangoUnicodeDecodeError(UnicodeDecodeError): def __init__(self, obj, *args): self.obj = obj UnicodeDecodeError.__init__(self, *args) def __str__(self): original = UnicodeDecodeError.__str__(self) return '%s. You passed in %r (%s)' % (original, self.obj, type(self.obj)) class StrAndUnicode(object): """ A class whose __str__ returns its __unicode__ as a UTF-8 bytestring. Useful as a mix-in. """ def __str__(self): return self.__unicode__().encode('utf-8') def smart_unicode(s, encoding='utf-8', strings_only=False, errors='strict'): """ Returns a unicode object representing 's'. Treats bytestrings using the 'encoding' codec. If strings_only is True, don't convert (some) non-string-like objects. """ if isinstance(s, Promise): # The input is the result of a gettext_lazy() call. return s return force_unicode(s, encoding, strings_only, errors) def is_protected_type(obj): """Determine if the object instance is of a protected type. Objects of protected types are preserved as-is when passed to force_unicode(strings_only=True). """ return isinstance(obj, ( types.NoneType, int, long, datetime.datetime, datetime.date, datetime.time, float, Decimal) ) def force_unicode(s, encoding='utf-8', strings_only=False, errors='strict'): """ Similar to smart_unicode, except that lazy instances are resolved to strings, rather than kept as lazy objects. If strings_only is True, don't convert (some) non-string-like objects. """ # Handle the common case first, saves 30-40% in performance when s # is an instance of unicode. This function gets called often in that # setting. if isinstance(s, unicode): return s if strings_only and is_protected_type(s): return s try: if not isinstance(s, basestring,): if hasattr(s, '__unicode__'): s = unicode(s) else: try: s = unicode(str(s), encoding, errors) except UnicodeEncodeError: if not isinstance(s, Exception): raise # If we get to here, the caller has passed in an Exception # subclass populated with non-ASCII data without special # handling to display as a string. We need to handle this # without raising a further exception. We do an # approximation to what the Exception's standard str() # output should be. s = ' '.join([force_unicode(arg, encoding, strings_only, errors) for arg in s]) elif not isinstance(s, unicode): # Note: We use .decode() here, instead of unicode(s, encoding, # errors), so that if s is a SafeString, it ends up being a # SafeUnicode at the end. s = s.decode(encoding, errors) except UnicodeDecodeError, e: if not isinstance(s, Exception): raise DjangoUnicodeDecodeError(s, *e.args) else: # If we get to here, the caller has passed in an Exception # subclass populated with non-ASCII bytestring data without a # working unicode method. Try to handle this without raising a # further exception by individually forcing the exception args # to unicode. s = ' '.join([force_unicode(arg, encoding, strings_only, errors) for arg in s]) return s def smart_str(s, encoding='utf-8', strings_only=False, errors='strict'): """ Returns a bytestring version of 's', encoded as specified in 'encoding'. If strings_only is True, don't convert (some) non-string-like objects. """ if strings_only and isinstance(s, (types.NoneType, int)): return s if isinstance(s, Promise): return unicode(s).encode(encoding, errors) elif not isinstance(s, basestring): try: return str(s) except UnicodeEncodeError: if isinstance(s, Exception): # An Exception subclass containing non-ASCII data that doesn't # know how to print itself properly. We shouldn't raise a # further exception. return ' '.join([smart_str(arg, encoding, strings_only, errors) for arg in s]) return unicode(s).encode(encoding, errors) elif isinstance(s, unicode): return s.encode(encoding, errors) elif s and encoding != 'utf-8': return s.decode('utf-8', errors).encode(encoding, errors) else: return s def iri_to_uri(iri): """ Convert an Internationalized Resource Identifier (IRI) portion to a URI portion that is suitable for inclusion in a URL. This is the algorithm from section 3.1 of RFC 3987. However, since we are assuming input is either UTF-8 or unicode already, we can simplify things a little from the full method. Returns an ASCII string containing the encoded result. """ # The list of safe characters here is constructed from the "reserved" and # "unreserved" characters specified in sections 2.2 and 2.3 of RFC 3986: # reserved = gen-delims / sub-delims # gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@" # sub-delims = "!" / "$" / "&" / "'" / "(" / ")" # / "*" / "+" / "," / ";" / "=" # unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~" # Of the unreserved characters, urllib.quote already considers all but # the ~ safe. # The % character is also added to the list of safe characters here, as the # end of section 3.1 of RFC 3987 specifically mentions that % must not be # converted. if iri is None: return iri return urllib.quote(smart_str(iri), safe="/#%[]=:;$&()+,!?*@'~") def filepath_to_uri(path): """Convert an file system path to a URI portion that is suitable for inclusion in a URL. We are assuming input is either UTF-8 or unicode already. This method will encode certain chars that would normally be recognized as special chars for URIs. Note that this method does not encode the ' character, as it is a valid character within URIs. See encodeURIComponent() JavaScript function for more details. Returns an ASCII string containing the encoded result. """ if path is None: return path # I know about `os.sep` and `os.altsep` but I want to leave # some flexibility for hardcoding separators. return urllib.quote(smart_str(path).replace("\\", "/"), safe="/~!*()'") # The encoding of the default system locale but falls back to the # given fallback encoding if the encoding is unsupported by python or could # not be determined. See tickets #10335 and #5846 try: DEFAULT_LOCALE_ENCODING = locale.getdefaultlocale()[1] or 'ascii' codecs.lookup(DEFAULT_LOCALE_ENCODING) except: DEFAULT_LOCALE_ENCODING = 'ascii'
gpl-2.0
synasius/django
tests/template_tests/filter_tests/test_slice.py
428
1317
from django.template.defaultfilters import slice_filter from django.test import SimpleTestCase from django.utils.safestring import mark_safe from ..utils import setup class SliceTests(SimpleTestCase): @setup({'slice01': '{{ a|slice:"1:3" }} {{ b|slice:"1:3" }}'}) def test_slice01(self): output = self.engine.render_to_string('slice01', {'a': 'a&b', 'b': mark_safe('a&b')}) self.assertEqual(output, '&amp;b &b') @setup({'slice02': '{% autoescape off %}{{ a|slice:"1:3" }} {{ b|slice:"1:3" }}{% endautoescape %}'}) def test_slice02(self): output = self.engine.render_to_string('slice02', {'a': 'a&b', 'b': mark_safe('a&b')}) self.assertEqual(output, '&b &b') class FunctionTests(SimpleTestCase): def test_zero_length(self): self.assertEqual(slice_filter('abcdefg', '0'), '') def test_index(self): self.assertEqual(slice_filter('abcdefg', '1'), 'a') def test_negative_index(self): self.assertEqual(slice_filter('abcdefg', '-1'), 'abcdef') def test_range(self): self.assertEqual(slice_filter('abcdefg', '1:2'), 'b') def test_range_multiple(self): self.assertEqual(slice_filter('abcdefg', '1:3'), 'bc') def test_range_step(self): self.assertEqual(slice_filter('abcdefg', '0::2'), 'aceg')
bsd-3-clause
mricon/grokmirror
grokmirror/dumb_pull.py
2
7295
# -*- coding: utf-8 -*- # Copyright (C) 2013-2018 by The Linux Foundation and contributors # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import os import grokmirror import logging import fnmatch import subprocess logger = logging.getLogger(__name__) def git_rev_parse_all(gitdir): args = ['rev-parse', '--all'] retcode, output, error = grokmirror.run_git_command(gitdir, args) if error: # Put things we recognize into debug debug = list() warn = list() for line in error.split('\n'): warn.append(line) if debug: logger.debug('Stderr: %s', '\n'.join(debug)) if warn: logger.warning('Stderr: %s', '\n'.join(warn)) return output def git_remote_update(args, fullpath): retcode, output, error = grokmirror.run_git_command(fullpath, args) if error: # Put things we recognize into debug debug = list() warn = list() for line in error.split('\n'): if line.find('From ') == 0: debug.append(line) elif line.find('-> ') > 0: debug.append(line) else: warn.append(line) if debug: logger.debug('Stderr: %s', '\n'.join(debug)) if warn: logger.warning('Stderr: %s', '\n'.join(warn)) def dumb_pull_repo(gitdir, remotes, svn=False): # verify it's a git repo and fetch all remotes logger.debug('Will pull %s with following remotes: %s', gitdir, remotes) old_revs = git_rev_parse_all(gitdir) try: grokmirror.lock_repo(gitdir, nonblocking=True) except IOError: logger.info('Could not obtain exclusive lock on %s', gitdir) logger.info('\tAssuming another process is running.') return False if svn: logger.debug('Using git-svn for %s', gitdir) for remote in remotes: # arghie-argh-argh if remote == '*': remote = '--all' logger.info('Running git-svn fetch %s in %s', remote, gitdir) args = ['svn', 'fetch', remote] git_remote_update(args, gitdir) else: # Not an svn remote myremotes = grokmirror.list_repo_remotes(gitdir) if not len(myremotes): logger.info('Repository %s has no defined remotes!', gitdir) return False logger.debug('existing remotes: %s', myremotes) for remote in remotes: remotefound = False for myremote in myremotes: if fnmatch.fnmatch(myremote, remote): remotefound = True logger.debug('existing remote %s matches %s', myremote, remote) args = ['remote', 'update', myremote, '--prune'] logger.info('Updating remote %s in %s', myremote, gitdir) git_remote_update(args, gitdir) if not remotefound: logger.info('Could not find any remotes matching %s in %s', remote, gitdir) new_revs = git_rev_parse_all(gitdir) grokmirror.unlock_repo(gitdir) if old_revs == new_revs: logger.debug('No new revs, no updates') return False logger.debug('New revs found -- new content pulled') return True def run_post_update_hook(hookscript, gitdir): if hookscript == '': return if not os.access(hookscript, os.X_OK): logger.warning('post_update_hook %s is not executable', hookscript) return args = [hookscript, gitdir] logger.debug('Running: %s', ' '.join(args)) (output, error) = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate() error = error.decode().strip() output = output.decode().strip() if error: # Put hook stderror into warning logger.warning('Hook Stderr: %s', error) if output: # Put hook stdout into info logger.info('Hook Stdout: %s', output) def parse_args(): import argparse # noinspection PyTypeChecker op = argparse.ArgumentParser(prog='grok-dumb-pull', description='Fetch remotes in repositories not managed by grokmirror', formatter_class=argparse.ArgumentDefaultsHelpFormatter) op.add_argument('-v', '--verbose', dest='verbose', action='store_true', default=False, help='Be verbose and tell us what you are doing') op.add_argument('-s', '--svn', dest='svn', action='store_true', default=False, help='The remotes for these repositories are Subversion') op.add_argument('-r', '--remote-names', dest='remotes', action='append', default=None, help='Only fetch remotes matching this name (accepts shell globbing)') op.add_argument('-u', '--post-update-hook', dest='posthook', default='', help='Run this hook after each repository is updated.') op.add_argument('-l', '--logfile', dest='logfile', default=None, help='Put debug logs into this file') op.add_argument('--version', action='version', version=grokmirror.VERSION) op.add_argument('paths', nargs='+', help='Full path(s) of the repos to pull') opts = op.parse_args() if not len(opts.paths): op.error('You must provide at least a path to the repos to pull') return opts def dumb_pull(paths, verbose=False, svn=False, remotes=None, posthook='', logfile=None): global logger loglevel = logging.INFO logger = grokmirror.init_logger('dumb-pull', logfile, loglevel, verbose) if remotes is None: remotes = ['*'] # Find all repositories we are to pull for entry in paths: if entry[-4:] == '.git': if not os.path.exists(entry): logger.critical('%s does not exist', entry) continue logger.debug('Found %s', entry) didwork = dumb_pull_repo(entry, remotes, svn=svn) if didwork: run_post_update_hook(posthook, entry) else: logger.debug('Finding all git repos in %s', entry) for founddir in grokmirror.find_all_gitdirs(entry): didwork = dumb_pull_repo(founddir, remotes, svn=svn) if didwork: run_post_update_hook(posthook, founddir) def command(): opts = parse_args() return dumb_pull( opts.paths, verbose=opts.verbose, svn=opts.svn, remotes=opts.remotes, posthook=opts.posthook, logfile=opts.logfile) if __name__ == '__main__': command()
gpl-3.0
drufat/sympy
sympy/printing/tests/test_tableform.py
120
5650
from sympy import TableForm, S from sympy.printing.latex import latex from sympy.abc import x from sympy.functions.elementary.miscellaneous import sqrt from sympy.functions.elementary.trigonometric import sin from sympy.utilities.pytest import raises from textwrap import dedent def test_TableForm(): s = str(TableForm([["a", "b"], ["c", "d"], ["e", 0]], headings="automatic")) assert s == ( ' | 1 2\n' '-------\n' '1 | a b\n' '2 | c d\n' '3 | e ' ) s = str(TableForm([["a", "b"], ["c", "d"], ["e", 0]], headings="automatic", wipe_zeros=False)) assert s == dedent('''\ | 1 2 ------- 1 | a b 2 | c d 3 | e 0''') s = str(TableForm([[x**2, "b"], ["c", x**2], ["e", "f"]], headings=("automatic", None))) assert s == ( '1 | x**2 b \n' '2 | c x**2\n' '3 | e f ' ) s = str(TableForm([["a", "b"], ["c", "d"], ["e", "f"]], headings=(None, "automatic"))) assert s == dedent('''\ 1 2 --- a b c d e f''') s = str(TableForm([[5, 7], [4, 2], [10, 3]], headings=[["Group A", "Group B", "Group C"], ["y1", "y2"]])) assert s == ( ' | y1 y2\n' '---------------\n' 'Group A | 5 7 \n' 'Group B | 4 2 \n' 'Group C | 10 3 ' ) raises( ValueError, lambda: TableForm( [[5, 7], [4, 2], [10, 3]], headings=[["Group A", "Group B", "Group C"], ["y1", "y2"]], alignments="middle") ) s = str(TableForm([[5, 7], [4, 2], [10, 3]], headings=[["Group A", "Group B", "Group C"], ["y1", "y2"]], alignments="right")) assert s == dedent('''\ | y1 y2 --------------- Group A | 5 7 Group B | 4 2 Group C | 10 3''') # other alignment permutations d = [[1, 100], [100, 1]] s = TableForm(d, headings=(('xxx', 'x'), None), alignments='l') assert str(s) == ( 'xxx | 1 100\n' ' x | 100 1 ' ) s = TableForm(d, headings=(('xxx', 'x'), None), alignments='lr') assert str(s) == dedent('''\ xxx | 1 100 x | 100 1''') s = TableForm(d, headings=(('xxx', 'x'), None), alignments='clr') assert str(s) == dedent('''\ xxx | 1 100 x | 100 1''') s = TableForm(d, headings=(('xxx', 'x'), None)) assert str(s) == ( 'xxx | 1 100\n' ' x | 100 1 ' ) raises(ValueError, lambda: TableForm(d, alignments='clr')) #pad s = str(TableForm([[None, "-", 2], [1]], pad='?')) assert s == dedent('''\ ? - 2 1 ? ?''') def test_TableForm_latex(): s = latex(TableForm([[0, x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]], wipe_zeros=True, headings=("automatic", "automatic"))) assert s == ( '\\begin{tabular}{r l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & & $x^{3}$ \\\\\n' '2 & $c$ & $\\frac{1}{4}$ \\\\\n' '3 & $\\sqrt{x}$ & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' ) s = latex(TableForm([[0, x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]], wipe_zeros=True, headings=("automatic", "automatic"), alignments='l')) assert s == ( '\\begin{tabular}{r l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & & $x^{3}$ \\\\\n' '2 & $c$ & $\\frac{1}{4}$ \\\\\n' '3 & $\\sqrt{x}$ & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' ) s = latex(TableForm([[0, x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]], wipe_zeros=True, headings=("automatic", "automatic"), alignments='l'*3)) assert s == ( '\\begin{tabular}{l l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & & $x^{3}$ \\\\\n' '2 & $c$ & $\\frac{1}{4}$ \\\\\n' '3 & $\\sqrt{x}$ & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' ) s = latex(TableForm([["a", x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]], headings=("automatic", "automatic"))) assert s == ( '\\begin{tabular}{r l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & $a$ & $x^{3}$ \\\\\n' '2 & $c$ & $\\frac{1}{4}$ \\\\\n' '3 & $\\sqrt{x}$ & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' ) s = latex(TableForm([["a", x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]], formats=['(%s)', None], headings=("automatic", "automatic"))) assert s == ( '\\begin{tabular}{r l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & (a) & $x^{3}$ \\\\\n' '2 & (c) & $\\frac{1}{4}$ \\\\\n' '3 & (sqrt(x)) & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' ) def neg_in_paren(x, i, j): if i % 2: return ('(%s)' if x < 0 else '%s') % x else: pass # use default print s = latex(TableForm([[-1, 2], [-3, 4]], formats=[neg_in_paren]*2, headings=("automatic", "automatic"))) assert s == ( '\\begin{tabular}{r l l}\n' ' & 1 & 2 \\\\\n' '\\hline\n' '1 & -1 & 2 \\\\\n' '2 & (-3) & 4 \\\\\n' '\\end{tabular}' ) s = latex(TableForm([["a", x**3], ["c", S(1)/4], [sqrt(x), sin(x**2)]])) assert s == ( '\\begin{tabular}{l l}\n' '$a$ & $x^{3}$ \\\\\n' '$c$ & $\\frac{1}{4}$ \\\\\n' '$\\sqrt{x}$ & $\\sin{\\left (x^{2} \\right )}$ \\\\\n' '\\end{tabular}' )
bsd-3-clause
fujunwei/chromium-crosswalk
native_client_sdk/src/build_tools/dsc2gyp.py
28
11228
#!/usr/bin/env python # Copyright (c) 2012 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import argparse import StringIO import sys import os SCRIPT_DIR = os.path.dirname(os.path.abspath(__file__)) sys.path.append(os.path.join(os.path.dirname(SCRIPT_DIR), 'tools')) import getos valid_tools = ['newlib', 'glibc', getos.GetPlatform()] def Error(msg): print(msg) sys.exit(1) PREAMBLE = """\ { 'includes': ['%s/build_tools/nacl.gypi'], """ NEXE_TARGET = """\ { 'target_name': '%(NAME)s_x86_32%(EXT)s', 'product_name': '%(NAME)s_x86_32%(EXT)s', 'type': '%(GYP_TYPE)s', 'sources': %(SOURCES)s, 'libraries': %(LIBS)s, 'include_dirs': %(INCLUDES)s, 'cflags': ['-m32', '-pedantic'] + %(CFLAGS)s, 'make_valid_configurations': ['newlib-debug', 'newlib-release', 'glibc-debug', 'glibc-release'], 'ldflags': ['-m32', '-L../../lib/x86_32/<(CONFIGURATION_NAME)'], 'toolset': 'target', %(CONFIGS)s }, { 'target_name': '%(NAME)s_x86_64%(EXT)s', 'product_name': '%(NAME)s_x86_64%(EXT)s', 'type': '%(GYP_TYPE)s', 'sources': %(SOURCES)s, 'libraries': %(LIBS)s, 'include_dirs': %(INCLUDES)s, 'make_valid_configurations': ['newlib-debug', 'newlib-release', 'glibc-debug', 'glibc-release'], 'cflags': ['-m64', '-pedantic'] + %(CFLAGS)s, 'ldflags': ['-m64', '-L../../lib/x86_64/<(CONFIGURATION_NAME)'], 'toolset': 'target', %(CONFIGS)s }, """ NLIB_TARGET = """\ { 'target_name': '%(NAME)s_x86_32%(EXT)s', 'product_name': 'lib%(NAME)s%(EXT)s', 'product_dir': '../../lib/x86_32/<(CONFIGURATION_NAME)', 'type': '%(GYP_TYPE)s', 'sources': %(SOURCES)s, 'libraries': %(LIBS)s, 'include_dirs': %(INCLUDES)s, 'cflags': ['-m32', '-pedantic'] + %(CFLAGS)s, 'make_valid_configurations': ['newlib-debug', 'newlib-release', 'glibc-debug', 'glibc-release'], 'ldflags': ['-m32'], 'toolset': 'target', %(CONFIGS)s }, { 'target_name': '%(NAME)s_x86_64%(EXT)s', 'product_name': 'lib%(NAME)s%(EXT)s', 'product_dir': '../../lib/x86_64/<(CONFIGURATION_NAME)', 'type': '%(GYP_TYPE)s', 'sources': %(SOURCES)s, 'libraries': %(LIBS)s, 'include_dirs': %(INCLUDES)s, 'make_valid_configurations': ['newlib-debug', 'newlib-release', 'glibc-debug', 'glibc-release'], 'cflags': ['-m64', '-pedantic'] + %(CFLAGS)s, 'ldflags': ['-m64'], 'toolset': 'target', %(CONFIGS)s }, """ HOST_LIB_TARGET = """\ { 'target_name': '%(NAME)s%(EXT)s', 'type': '%(GYP_TYPE)s', 'toolset': 'host', 'sources': %(SOURCES)s, 'cflags': %(CFLAGS)s, 'cflags_c': ['-std=gnu99'], 'include_dirs': %(INCLUDES)s, 'make_valid_configurations': ['host-debug', 'host-release'], 'product_dir': '../../lib/%(ARCH)s/<(CONFIGURATION_NAME)', 'product_name': '%(NAME)s%(EXT)s', %(CONFIGS)s }, """ HOST_EXE_TARGET = """\ { 'target_name': '%(NAME)s%(EXT)s', 'type': '%(GYP_TYPE)s', 'toolset': 'host', 'sources': %(SOURCES)s, 'cflags': %(CFLAGS)s, 'cflags_c': ['-std=gnu99'], 'ldflags': ['-L../../lib/%(ARCH)s/<(CONFIGURATION_NAME)'], 'libraries': %(LIBS)s, 'include_dirs': %(INCLUDES)s, 'make_valid_configurations': ['host-debug', 'host-release'], 'msvs_settings': { 'VCLinkerTool': { 'AdditionalLibraryDirectories': ['../../lib/%(ARCH)s/<(CONFIGURATION_NAME)'], } }, %(CONFIGS)s }, """ NMF_TARGET = """\ { 'target_name': '%(NAME)s_%(TOOLCHAIN)s.nmf', 'product_name': '%(NAME)s.nmf', 'product_dir': '<(PRODUCT_DIR)/%(TOOLCHAIN)s', 'type': 'none', 'make_valid_configurations': ['%(TOOLCHAIN)s-debug', '%(TOOLCHAIN)s-release'], 'actions': [ { 'action_name': 'nmf', 'inputs': ['<(PRODUCT_DIR)/%(NAME)s_x86_32.nexe', '<(PRODUCT_DIR)/%(NAME)s_x86_64.nexe'] + %(SODEPS)s, 'outputs': ['<(PRODUCT_DIR)/%(NAME)s.nmf'], 'action': ['../../tools/create_nmf.py', '-t', '%(TOOLCHAIN)s', '-s', '<(PRODUCT_DIR)'] + %(NMFACTION)s, }, ] }, """ TOOLCHAIN_CONFIG = """\ '%(toolchain)s-release' : { 'cflags' : ['-O2'], }, '%(toolchain)s-debug' : { 'cflags' : ['-g', '-O0'], }, """ NEXE_CONFIG = """\ '%(toolchain)s-release' : { 'cflags' : ['--%(toolchain)s', '-O2', '-idirafter', '../../include'], 'ldflags' : ['--%(toolchain)s'], 'arflags' : ['--%(toolchain)s'], }, '%(toolchain)s-debug' : { 'cflags' : ['--%(toolchain)s', '-g', '-O0', '-idirafter', '../../include'], 'ldflags' : ['--%(toolchain)s'], 'arflags' : ['--%(toolchain)s'], }, """ WIN32_CONFIGS = """\ 'target_defaults': { 'default_configuration': 'Debug_PPAPI', 'configurations': { 'Debug_PPAPI': { 'msvs_configuration_platform': 'PPAPI', 'msbuild_configuration_attributes': { 'ConfigurationType': 'DynamicLibrary' }, 'include_dirs': ['../../include/win'], 'defines': ['_WINDOWS', '_DEBUG', 'WIN32'], }, 'Release_PPAPI': { 'msvs_configuration_platform': 'PPAPI', 'msbuild_configuration_attributes': { 'ConfigurationType': 'DynamicLibrary' }, 'include_dirs': ['../../include/win'], 'defines': ['_WINDOWS', 'NDEBUG', 'WIN32'], }, 'Debug_NaCl': { 'msvs_configuration_platform': 'NaCl', 'msbuild_configuration_attributes': { 'ConfigurationType': 'Application' }, }, 'Release_NaCl': { 'msvs_configuration_platform': 'NaCl', 'msbuild_configuration_attributes': { 'ConfigurationType': 'Application' }, }, }, }, """ def WriteNaClTargets(output, target, tools): configs = "'configurations' : {\n" for tc in tools: if tc not in valid_tools: continue if tc in ['newlib', 'glibc']: configs += NEXE_CONFIG % {'toolchain': tc} configs += " }" target['CONFIGS'] = configs if target['TYPE'] == 'lib': output.write(NLIB_TARGET % target) else: output.write(NEXE_TARGET % target) def ConfigName(toolchain): if toolchain == getos.GetPlatform(): return 'host' else: return toolchain def ProcessDSC(filename, outfile=None): if not os.path.exists(filename): Error("file not found: %s" % filename) desc = open(filename).read() desc = eval(desc, {}, {}) if not desc.get('TARGETS'): Error("no TARGETS found in dsc") if not outfile: outfile = desc['NAME'] + '.gyp' outfile = os.path.join(os.path.dirname(filename), outfile) output = StringIO.StringIO() srcdir = os.path.dirname(SCRIPT_DIR) output.write(PREAMBLE % srcdir.replace("\\", '/')) win32 = sys.platform in ('win32', 'cygwin') if win32: output.write(WIN32_CONFIGS) else: for tc in desc['TOOLS']: if tc in valid_tools: default = '%s-debug' % ConfigName(tc) break output.write("""\ 'target_defaults': { 'default_configuration': '%s', 'configurations' : {\n""" % default) for tc in desc['TOOLS']: if tc not in valid_tools: continue output.write(TOOLCHAIN_CONFIG % {'toolchain': ConfigName(tc)}) output.write(" }\n },\n") output.write("\n 'targets': [\n") # make a list of all the so target names so that the nmf rules # can depend on them all sofiles = [] soremap = [] for target in desc['TARGETS']: if target['TYPE'] == 'so': name = target['NAME'] sofiles.append('<(PRODUCT_DIR)/%s_x86_64.so' % name) sofiles.append('<(PRODUCT_DIR)/%s_x86_32.so' % name) soremap += ['-n', '%s_x86_64.so,%s.so' % (name, name)] soremap += ['-n', '%s_x86_32.so,%s.so' % (name, name)] # iterate through dsc targets generating gyp targets for target in desc['TARGETS']: target.setdefault('INCLUDES', []) target['INCLUDES'] = [x.replace("$(NACL_SDK_ROOT)", "../..") for x in target['INCLUDES']] libs = target.get('LIBS', []) if win32: libs = [l for l in libs if l not in ('ppapi', 'ppapi_cpp')] target['LIBS'] = ['-l' + l + '.lib' for l in libs] else: target['LIBS'] = ['-l' + l for l in libs] if target['TYPE'] == 'so': if win32: target['EXT'] = '' else: target['EXT'] = '.so' target['GYP_TYPE'] = 'shared_library' elif target['TYPE'] == 'lib': if win32: target['EXT'] = '' else: target['EXT'] = '.a' target['GYP_TYPE'] = 'static_library' elif target['TYPE'] == 'main': target['EXT'] = '.nexe' target['GYP_TYPE'] = 'executable' else: Error("unknown type: %s" % target['TYPE']) target['CFLAGS'] = target.get('CXXFLAGS', []) if not win32 and ('newlib' in desc['TOOLS'] or 'glibc' in desc['TOOLS']): WriteNaClTargets(output, target, desc['TOOLS']) if target['TYPE'] == 'main': target['SODEPS'] = sofiles target['NMFACTION'] = ['-o', '<@(_outputs)', '-L<(NMF_PATH1)', '-L<(NMF_PATH2)', '-D', '<(OBJDUMP)', '<@(_inputs)'] target['NMFACTION'] += soremap if 'newlib' in desc['TOOLS']: target['TOOLCHAIN'] = 'newlib' output.write(NMF_TARGET % target) if 'glibc' in desc['TOOLS']: target['TOOLCHAIN'] = 'glibc' output.write(NMF_TARGET % target) if win32 or getos.GetPlatform() in desc['TOOLS']: target['ARCH'] = 'x86_32' target['INCLUDES'].append('../../include') if win32: target['HOST'] = 'win' target['CONFIGS'] = '' target['CFLAGS'] = [] else: target['CONFIGS'] = '' target['HOST'] = 'linux' target['CFLAGS'].append('-fPIC') if target['TYPE'] == 'main': target['GYP_TYPE'] = 'shared_library' if win32: target['EXT'] = '' else: target['EXT'] = '.so' output.write(HOST_EXE_TARGET % target) else: output.write(HOST_LIB_TARGET % target) output.write(' ],\n}\n') print('Writing: ' + outfile) open(outfile, 'w').write(output.getvalue()) def main(args): parser = argparse.ArgumentParser() parser.add_argument('-o', help='Set output filename.', dest='output') parser.add_argument('dsc', help='dsc to convert') options = parser.parse_args(args) if options.output: outdir = os.path.dirname(options.output) if not os.path.exists(outdir): os.makedirs(outdir) ProcessDSC(options.dsc, options.output) return 0 if __name__ == '__main__': sys.exit(main(sys.argv[1:]))
bsd-3-clause
Letractively/rdflib
rdflib/plugins/serializers/xmlwriter.py
1
3201
import codecs from xml.sax.saxutils import quoteattr, escape __all__ = ['XMLWriter'] class XMLWriter(object): def __init__(self, stream, namespace_manager, encoding=None, decl=1, extra_ns={}): encoding = encoding or 'utf-8' encoder, decoder, stream_reader, stream_writer = codecs.lookup(encoding) self.stream = stream = stream_writer(stream) if decl: stream.write('<?xml version="1.0" encoding="%s"?>' % encoding) self.element_stack = [] self.nm = namespace_manager self.extra_ns=extra_ns self.closed = True def __get_indent(self): return " " * len(self.element_stack) indent = property(__get_indent) def __close_start_tag(self): if not self.closed: # TODO: self.closed = True self.stream.write(">") def push(self, uri): self.__close_start_tag() write = self.stream.write write("\n") write(self.indent) write("<%s" % self.qname(uri)) self.element_stack.append(uri) self.closed = False self.parent = False def pop(self, uri=None): top = self.element_stack.pop() if uri: assert uri == top write = self.stream.write if not self.closed: self.closed = True write("/>") else: if self.parent: write("\n") write(self.indent) write("</%s>" % self.qname(top)) self.parent = True def element(self, uri, content, attributes={}): """Utility method for adding a complete simple element""" self.push(uri) for k, v in attributes.iteritems(): self.attribute(k,v) self.text(content) self.pop() def namespaces(self, namespaces=None): if not namespaces: namespaces=self.nm.namespaces() write = self.stream.write write("\n") for prefix, namespace in namespaces: if prefix: write(' xmlns:%s="%s"\n' % (prefix, namespace)) else: write(' xmlns="%s"\n' % namespace) for prefix, namespace in self.extra_ns.items(): if prefix: write(' xmlns:%s="%s"\n' % (prefix, namespace)) else: write(' xmlns="%s"\n' % namespace) def attribute(self, uri, value): write = self.stream.write write(" %s=%s" % (self.qname(uri), quoteattr(value))) def text(self, text): self.__close_start_tag() if "<" in text and ">" in text and not "]]>" in text: self.stream.write("<![CDATA[") self.stream.write(text) self.stream.write("]]>") else: self.stream.write(escape(text)) def qname(self,uri): """Compute qname for a uri using our extra namespaces, or the given namespace manager""" for pre,ns in self.extra_ns.items(): if uri.startswith(ns): if pre!="": return ":".join(pre,uri[len(ns):]) else: return uri[len(ns):] return self.nm.qname(uri)
bsd-3-clause
sean-abbott/auto_build_env
src/abe/_version.py
1
15772
# This file helps to compute a version number in source trees obtained from # git-archive tarball (such as those provided by githubs download-from-tag # feature). Distribution tarballs (built by setup.py sdist) and build # directories (produced by setup.py build) will contain a much shorter file # that just contains the computed version number. # This file is released into the public domain. Generated by # versioneer-0.15 (https://github.com/warner/python-versioneer) import errno import os import re import subprocess import sys def get_keywords(): # these strings will be replaced by git during git-archive. # setup.py/versioneer.py will grep for the variable names, so they must # each be defined on a line of their own. _version.py will just call # get_keywords(). git_refnames = "$Format:%d$" git_full = "$Format:%H$" keywords = {"refnames": git_refnames, "full": git_full} return keywords class VersioneerConfig: pass def get_config(): # these strings are filled in when 'setup.py versioneer' creates # _version.py cfg = VersioneerConfig() cfg.VCS = "git" cfg.style = "pep440" cfg.tag_prefix = "" cfg.parentdir_prefix = "auto_build_env-" cfg.versionfile_source = "src/abe/_version.py" cfg.verbose = False return cfg class NotThisMethod(Exception): pass LONG_VERSION_PY = {} HANDLERS = {} def register_vcs_handler(vcs, method): # decorator def decorate(f): if vcs not in HANDLERS: HANDLERS[vcs] = {} HANDLERS[vcs][method] = f return f return decorate def run_command(commands, args, cwd=None, verbose=False, hide_stderr=False): assert isinstance(commands, list) p = None for c in commands: try: dispcmd = str([c] + args) # remember shell=False, so use git.cmd on windows, not just git p = subprocess.Popen([c] + args, cwd=cwd, stdout=subprocess.PIPE, stderr=(subprocess.PIPE if hide_stderr else None)) break except EnvironmentError: e = sys.exc_info()[1] if e.errno == errno.ENOENT: continue if verbose: print("unable to run %s" % dispcmd) print(e) return None else: if verbose: print("unable to find command, tried %s" % (commands,)) return None stdout = p.communicate()[0].strip() if sys.version_info[0] >= 3: stdout = stdout.decode() if p.returncode != 0: if verbose: print("unable to run %s (error)" % dispcmd) return None return stdout def versions_from_parentdir(parentdir_prefix, root, verbose): # Source tarballs conventionally unpack into a directory that includes # both the project name and a version string. dirname = os.path.basename(root) if not dirname.startswith(parentdir_prefix): if verbose: print("guessing rootdir is '%s', but '%s' doesn't start with " "prefix '%s'" % (root, dirname, parentdir_prefix)) raise NotThisMethod("rootdir doesn't start with parentdir_prefix") return {"version": dirname[len(parentdir_prefix):], "full-revisionid": None, "dirty": False, "error": None} @register_vcs_handler("git", "get_keywords") def git_get_keywords(versionfile_abs): # the code embedded in _version.py can just fetch the value of these # keywords. When used from setup.py, we don't want to import _version.py, # so we do it with a regexp instead. This function is not used from # _version.py. keywords = {} try: f = open(versionfile_abs, "r") for line in f.readlines(): if line.strip().startswith("git_refnames ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["refnames"] = mo.group(1) if line.strip().startswith("git_full ="): mo = re.search(r'=\s*"(.*)"', line) if mo: keywords["full"] = mo.group(1) f.close() except EnvironmentError: pass return keywords @register_vcs_handler("git", "keywords") def git_versions_from_keywords(keywords, tag_prefix, verbose): if not keywords: raise NotThisMethod("no keywords at all, weird") refnames = keywords["refnames"].strip() if refnames.startswith("$Format"): if verbose: print("keywords are unexpanded, not using") raise NotThisMethod("unexpanded keywords, not a git-archive tarball") refs = set([r.strip() for r in refnames.strip("()").split(",")]) # starting in git-1.8.3, tags are listed as "tag: foo-1.0" instead of # just "foo-1.0". If we see a "tag: " prefix, prefer those. TAG = "tag: " tags = set([r[len(TAG):] for r in refs if r.startswith(TAG)]) if not tags: # Either we're using git < 1.8.3, or there really are no tags. We use # a heuristic: assume all version tags have a digit. The old git %d # expansion behaves like git log --decorate=short and strips out the # refs/heads/ and refs/tags/ prefixes that would let us distinguish # between branches and tags. By ignoring refnames without digits, we # filter out many common branch names like "release" and # "stabilization", as well as "HEAD" and "master". tags = set([r for r in refs if re.search(r'\d', r)]) if verbose: print("discarding '%s', no digits" % ",".join(refs-tags)) if verbose: print("likely tags: %s" % ",".join(sorted(tags))) for ref in sorted(tags): # sorting will prefer e.g. "2.0" over "2.0rc1" if ref.startswith(tag_prefix): r = ref[len(tag_prefix):] if verbose: print("picking %s" % r) return {"version": r, "full-revisionid": keywords["full"].strip(), "dirty": False, "error": None } # no suitable tags, so version is "0+unknown", but full hex is still there if verbose: print("no suitable tags, using unknown + full revision id") return {"version": "0+unknown", "full-revisionid": keywords["full"].strip(), "dirty": False, "error": "no suitable tags"} @register_vcs_handler("git", "pieces_from_vcs") def git_pieces_from_vcs(tag_prefix, root, verbose, run_command=run_command): # this runs 'git' from the root of the source tree. This only gets called # if the git-archive 'subst' keywords were *not* expanded, and # _version.py hasn't already been rewritten with a short version string, # meaning we're inside a checked out source tree. if not os.path.exists(os.path.join(root, ".git")): if verbose: print("no .git in %s" % root) raise NotThisMethod("no .git directory") GITS = ["git"] if sys.platform == "win32": GITS = ["git.cmd", "git.exe"] # if there is a tag, this yields TAG-NUM-gHEX[-dirty] # if there are no tags, this yields HEX[-dirty] (no NUM) describe_out = run_command(GITS, ["describe", "--tags", "--dirty", "--always", "--long"], cwd=root) # --long was added in git-1.5.5 if describe_out is None: raise NotThisMethod("'git describe' failed") describe_out = describe_out.strip() full_out = run_command(GITS, ["rev-parse", "HEAD"], cwd=root) if full_out is None: raise NotThisMethod("'git rev-parse' failed") full_out = full_out.strip() pieces = {} pieces["long"] = full_out pieces["short"] = full_out[:7] # maybe improved later pieces["error"] = None # parse describe_out. It will be like TAG-NUM-gHEX[-dirty] or HEX[-dirty] # TAG might have hyphens. git_describe = describe_out # look for -dirty suffix dirty = git_describe.endswith("-dirty") pieces["dirty"] = dirty if dirty: git_describe = git_describe[:git_describe.rindex("-dirty")] # now we have TAG-NUM-gHEX or HEX if "-" in git_describe: # TAG-NUM-gHEX mo = re.search(r'^(.+)-(\d+)-g([0-9a-f]+)$', git_describe) if not mo: # unparseable. Maybe git-describe is misbehaving? pieces["error"] = ("unable to parse git-describe output: '%s'" % describe_out) return pieces # tag full_tag = mo.group(1) if not full_tag.startswith(tag_prefix): if verbose: fmt = "tag '%s' doesn't start with prefix '%s'" print(fmt % (full_tag, tag_prefix)) pieces["error"] = ("tag '%s' doesn't start with prefix '%s'" % (full_tag, tag_prefix)) return pieces pieces["closest-tag"] = full_tag[len(tag_prefix):] # distance: number of commits since tag pieces["distance"] = int(mo.group(2)) # commit: short hex revision ID pieces["short"] = mo.group(3) else: # HEX: no tags pieces["closest-tag"] = None count_out = run_command(GITS, ["rev-list", "HEAD", "--count"], cwd=root) pieces["distance"] = int(count_out) # total number of commits return pieces def plus_or_dot(pieces): if "+" in pieces.get("closest-tag", ""): return "." return "+" def render_pep440(pieces): # now build up version string, with post-release "local version # identifier". Our goal: TAG[+DISTANCE.gHEX[.dirty]] . Note that if you # get a tagged build and then dirty it, you'll get TAG+0.gHEX.dirty # exceptions: # 1: no tags. git_describe was just HEX. 0+untagged.DISTANCE.gHEX[.dirty] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += plus_or_dot(pieces) rendered += "%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" else: # exception #1 rendered = "0+untagged.%d.g%s" % (pieces["distance"], pieces["short"]) if pieces["dirty"]: rendered += ".dirty" return rendered def render_pep440_pre(pieces): # TAG[.post.devDISTANCE] . No -dirty # exceptions: # 1: no tags. 0.post.devDISTANCE if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += ".post.dev%d" % pieces["distance"] else: # exception #1 rendered = "0.post.dev%d" % pieces["distance"] return rendered def render_pep440_post(pieces): # TAG[.postDISTANCE[.dev0]+gHEX] . The ".dev0" means dirty. Note that # .dev0 sorts backwards (a dirty tree will appear "older" than the # corresponding clean one), but you shouldn't be releasing software with # -dirty anyways. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += plus_or_dot(pieces) rendered += "g%s" % pieces["short"] else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" rendered += "+g%s" % pieces["short"] return rendered def render_pep440_old(pieces): # TAG[.postDISTANCE[.dev0]] . The ".dev0" means dirty. # exceptions: # 1: no tags. 0.postDISTANCE[.dev0] if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"] or pieces["dirty"]: rendered += ".post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" else: # exception #1 rendered = "0.post%d" % pieces["distance"] if pieces["dirty"]: rendered += ".dev0" return rendered def render_git_describe(pieces): # TAG[-DISTANCE-gHEX][-dirty], like 'git describe --tags --dirty # --always' # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] if pieces["distance"]: rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render_git_describe_long(pieces): # TAG-DISTANCE-gHEX[-dirty], like 'git describe --tags --dirty # --always -long'. The distance/hash is unconditional. # exceptions: # 1: no tags. HEX[-dirty] (note: no 'g' prefix) if pieces["closest-tag"]: rendered = pieces["closest-tag"] rendered += "-%d-g%s" % (pieces["distance"], pieces["short"]) else: # exception #1 rendered = pieces["short"] if pieces["dirty"]: rendered += "-dirty" return rendered def render(pieces, style): if pieces["error"]: return {"version": "unknown", "full-revisionid": pieces.get("long"), "dirty": None, "error": pieces["error"]} if not style or style == "default": style = "pep440" # the default if style == "pep440": rendered = render_pep440(pieces) elif style == "pep440-pre": rendered = render_pep440_pre(pieces) elif style == "pep440-post": rendered = render_pep440_post(pieces) elif style == "pep440-old": rendered = render_pep440_old(pieces) elif style == "git-describe": rendered = render_git_describe(pieces) elif style == "git-describe-long": rendered = render_git_describe_long(pieces) else: raise ValueError("unknown style '%s'" % style) return {"version": rendered, "full-revisionid": pieces["long"], "dirty": pieces["dirty"], "error": None} def get_versions(): # I am in _version.py, which lives at ROOT/VERSIONFILE_SOURCE. If we have # __file__, we can work backwards from there to the root. Some # py2exe/bbfreeze/non-CPython implementations don't do __file__, in which # case we can only use expanded keywords. cfg = get_config() verbose = cfg.verbose try: return git_versions_from_keywords(get_keywords(), cfg.tag_prefix, verbose) except NotThisMethod: pass try: root = os.path.realpath(__file__) # versionfile_source is the relative path from the top of the source # tree (where the .git directory might live) to this file. Invert # this to find the root from __file__. for i in cfg.versionfile_source.split('/'): root = os.path.dirname(root) except NameError: return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to find root of source tree"} try: pieces = git_pieces_from_vcs(cfg.tag_prefix, root, verbose) return render(pieces, cfg.style) except NotThisMethod: pass try: if cfg.parentdir_prefix: return versions_from_parentdir(cfg.parentdir_prefix, root, verbose) except NotThisMethod: pass return {"version": "0+unknown", "full-revisionid": None, "dirty": None, "error": "unable to compute version"}
mit
ch1huizong/dj
onlineshop/myshop/orders/models.py
1
1856
from decimal import Decimal from django.db import models from django.core.validators import ( MinValueValidator, MaxValueValidator, ) from shop.models import Product from coupons.models import Coupon class Order(models.Model): first_name = models.CharField(max_length=50) second_name = models.CharField(max_length=50) email = models.EmailField() address = models.CharField(max_length=250) postal_code = models.CharField(max_length=20) city = models.CharField(max_length=100) created = models.DateTimeField(auto_now_add=True) updated = models.DateTimeField(auto_now=True) paid = models.BooleanField(default=False) braintree_id = models.CharField(max_length=150, blank=True) coupon = models.ForeignKey( Coupon, related_name='orders', null=True, blank=True, on_delete=models.SET_NULL ) discount = models.IntegerField( default=0, validators=[ MinValueValidator(0), MaxValueValidator(100), ] ) class Meta: ordering = ('-created',) def __str__(self): return 'Order {}'.format(self.id) def get_total_cost(self): total_cost = sum(item.get_cost() for item in self.items.all()) return total_cost - total_cost * (self.discount / Decimal('100')) class OrderItem(models.Model): order = models.ForeignKey( Order, related_name='items', on_delete=models.CASCADE ) product = models.ForeignKey( Product, related_name='order_items', on_delete=models.CASCADE ) price = models.DecimalField(max_digits=10, decimal_places=2) quantity = models.PositiveIntegerField(default=1) def __str__(self): return '{}'.format(self.id) def get_cost(self): return self.price * self.quantity
unlicense
kkintaro/termite-data-server
demo.py
4
3176
#!/usr/bin/env python # -*- coding: utf-8 -*- import argparse import subprocess DEFAULT_DATASET = 'infovis' DATASETS = [ DEFAULT_DATASET, '20newsgroups', 'nsfgrants', 'nsf25k', 'nsf10k', 'nsf1k', 'poliblogs', 'gjp', 'fomc', 'CR_financial_collapse', 'CR_stock_market_plunge', 'FCIC_final_report', 'FCIC_first_hearing', 'FR_federal_open_market_committee', 'FR_monetary_policy_hearings' ] DEFAULT_MODEL = 'mallet' MODELS = [ DEFAULT_MODEL, 'treetm', 'stmt', 'stm', 'gensim' ] def Shell(command): p = subprocess.Popen(command, stdout = subprocess.PIPE, stderr = subprocess.STDOUT) while p.poll() is None: line = p.stdout.readline().rstrip('\n') if len(line) > 0: print line def Demonstrate(dataset, model, is_quiet, force_overwrite): database_folder = 'data/demo/{}/corpus'.format(dataset) corpus_folder = 'data/demo/{}/corpus'.format(dataset) model_folder = 'data/demo/{}/model-{}'.format(dataset, model) app_name = '{}_{}'.format(dataset, model) def PrepareDataset(): executable = 'bin/fetch_dataset.sh' Shell([executable, dataset]) def PrepareModel(): executable = 'bin/setup_{}.sh'.format(model) command = [executable] Shell(command) def PrepareOthers(): executable = 'bin/setup_mallet.sh' command = [executable] Shell(command) executable = 'bin/setup_corenlp.sh' command = [executable] Shell(command) def TrainModel(): executable = 'bin/train_{}.py'.format(model) command = [executable, corpus_folder, model_folder] if is_quiet: command.append('--quiet') if force_overwrite: command.append('--overwrite') Shell(command) def ImportModel(): executable = 'bin/read_{}.py'.format(model) command = [executable, app_name, model_folder, corpus_folder, database_folder] if is_quiet: command.append('--quiet') if force_overwrite: command.append('--overwrite') Shell(command) print '--------------------------------------------------------------------------------' print 'Build a topic model ({}) using a demo dataset ({})'.format(model, dataset) print ' database = {}'.format(database_folder) print ' corpus = {}'.format(corpus_folder) print ' model = {}'.format(model_folder) print ' app = {}'.format(app_name) print '--------------------------------------------------------------------------------' PrepareDataset() PrepareModel() PrepareOthers() TrainModel() ImportModel() def main(): parser = argparse.ArgumentParser( description = 'Import a MALLET topic model as a web2py application.' ) parser.add_argument( 'dataset' , nargs = '?', type = str, default = DEFAULT_DATASET, choices = DATASETS, help = 'Dataset identifier' ) parser.add_argument( 'model' , nargs = '?', type = str, default = DEFAULT_MODEL , choices = MODELS , help = 'Model type' ) parser.add_argument( '--quiet' , const = True, default = False, action = 'store_const', help = 'Show fewer debugging messages' ) parser.add_argument( '--overwrite' , const = True, default = False, action = 'store_const', help = 'Overwrite any existing model' ) args = parser.parse_args() Demonstrate( args.dataset, args.model, args.quiet, args.overwrite ) if __name__ == '__main__': main()
bsd-3-clause
28harishkumar/django-diary
static/includes/ionicons-2.0.1/builder/scripts/generate_font.py
348
5381
# Font generation script from FontCustom # https://github.com/FontCustom/fontcustom/ # http://fontcustom.com/ import fontforge import os import md5 import subprocess import tempfile import json import copy SCRIPT_PATH = os.path.dirname(os.path.abspath(__file__)) INPUT_SVG_DIR = os.path.join(SCRIPT_PATH, '..', '..', 'src') OUTPUT_FONT_DIR = os.path.join(SCRIPT_PATH, '..', '..', 'fonts') MANIFEST_PATH = os.path.join(SCRIPT_PATH, '..', 'manifest.json') BUILD_DATA_PATH = os.path.join(SCRIPT_PATH, '..', 'build_data.json') AUTO_WIDTH = True KERNING = 15 cp = 0xf100 m = md5.new() f = fontforge.font() f.encoding = 'UnicodeFull' f.design_size = 16 f.em = 512 f.ascent = 448 f.descent = 64 manifest_file = open(MANIFEST_PATH, 'r') manifest_data = json.loads(manifest_file.read()) manifest_file.close() print "Load Manifest, Icons: %s" % ( len(manifest_data['icons']) ) build_data = copy.deepcopy(manifest_data) build_data['icons'] = [] font_name = manifest_data['name'] m.update(font_name + ';') m.update(manifest_data['prefix'] + ';') for dirname, dirnames, filenames in os.walk(INPUT_SVG_DIR): for filename in filenames: name, ext = os.path.splitext(filename) filePath = os.path.join(dirname, filename) size = os.path.getsize(filePath) if ext in ['.svg', '.eps']: # see if this file is already in the manifest chr_code = None for ionicon in manifest_data['icons']: if ionicon['name'] == name: chr_code = ionicon['code'] break if chr_code is None: # this is a new src icon print 'New Icon: \n - %s' % (name) while True: chr_code = '0x%x' % (cp) already_exists = False for ionicon in manifest_data['icons']: if ionicon.get('code') == chr_code: already_exists = True cp += 1 chr_code = '0x%x' % (cp) continue if not already_exists: break print ' - %s' % chr_code manifest_data['icons'].append({ 'name': name, 'code': chr_code }) build_data['icons'].append({ 'name': name, 'code': chr_code }) if ext in ['.svg']: # hack removal of <switch> </switch> tags svgfile = open(filePath, 'r+') tmpsvgfile = tempfile.NamedTemporaryFile(suffix=ext, delete=False) svgtext = svgfile.read() svgfile.seek(0) # replace the <switch> </switch> tags with 'nothing' svgtext = svgtext.replace('<switch>', '') svgtext = svgtext.replace('</switch>', '') tmpsvgfile.file.write(svgtext) svgfile.close() tmpsvgfile.file.close() filePath = tmpsvgfile.name # end hack m.update(name + str(size) + ';') glyph = f.createChar( int(chr_code, 16) ) glyph.importOutlines(filePath) # if we created a temporary file, let's clean it up if tmpsvgfile: os.unlink(tmpsvgfile.name) # set glyph size explicitly or automatically depending on autowidth if AUTO_WIDTH: glyph.left_side_bearing = glyph.right_side_bearing = 0 glyph.round() # resize glyphs if autowidth is enabled if AUTO_WIDTH: f.autoWidth(0, 0, 512) fontfile = '%s/ionicons' % (OUTPUT_FONT_DIR) build_hash = m.hexdigest() if build_hash == manifest_data.get('build_hash'): print "Source files unchanged, did not rebuild fonts" else: manifest_data['build_hash'] = build_hash f.fontname = font_name f.familyname = font_name f.fullname = font_name f.generate(fontfile + '.ttf') f.generate(fontfile + '.svg') # Fix SVG header for webkit # from: https://github.com/fontello/font-builder/blob/master/bin/fontconvert.py svgfile = open(fontfile + '.svg', 'r+') svgtext = svgfile.read() svgfile.seek(0) svgfile.write(svgtext.replace('''<svg>''', '''<svg xmlns="http://www.w3.org/2000/svg">''')) svgfile.close() scriptPath = os.path.dirname(os.path.realpath(__file__)) try: subprocess.Popen([scriptPath + '/sfnt2woff', fontfile + '.ttf'], stdout=subprocess.PIPE) except OSError: # If the local version of sfnt2woff fails (i.e., on Linux), try to use the # global version. This allows us to avoid forcing OS X users to compile # sfnt2woff from source, simplifying install. subprocess.call(['sfnt2woff', fontfile + '.ttf']) # eotlitetool.py script to generate IE7-compatible .eot fonts subprocess.call('python ' + scriptPath + '/eotlitetool.py ' + fontfile + '.ttf -o ' + fontfile + '.eot', shell=True) subprocess.call('mv ' + fontfile + '.eotlite ' + fontfile + '.eot', shell=True) # Hint the TTF file subprocess.call('ttfautohint -s -f -n ' + fontfile + '.ttf ' + fontfile + '-hinted.ttf > /dev/null 2>&1 && mv ' + fontfile + '-hinted.ttf ' + fontfile + '.ttf', shell=True) manifest_data['icons'] = sorted(manifest_data['icons'], key=lambda k: k['name']) build_data['icons'] = sorted(build_data['icons'], key=lambda k: k['name']) print "Save Manifest, Icons: %s" % ( len(manifest_data['icons']) ) f = open(MANIFEST_PATH, 'w') f.write( json.dumps(manifest_data, indent=2, separators=(',', ': ')) ) f.close() print "Save Build, Icons: %s" % ( len(build_data['icons']) ) f = open(BUILD_DATA_PATH, 'w') f.write( json.dumps(build_data, indent=2, separators=(',', ': ')) ) f.close()
apache-2.0
ubc/edx-platform
pavelib/quality.py
20
18683
""" Check code quality using pep8, pylint, and diff_quality. """ from paver.easy import sh, task, cmdopts, needs, BuildFailure import os import re from .utils.envs import Env ALL_SYSTEMS = 'lms,cms,common,openedx,pavelib' def top_python_dirs(dirname): """ Find the directories to start from in order to find all the Python files in `dirname`. """ top_dirs = [] dir_init = os.path.join(dirname, "__init__.py") if os.path.exists(dir_init): top_dirs.append(dirname) for directory in ['djangoapps', 'lib']: subdir = os.path.join(dirname, directory) subdir_init = os.path.join(subdir, "__init__.py") if os.path.exists(subdir) and not os.path.exists(subdir_init): dirs = os.listdir(subdir) top_dirs.extend(d for d in dirs if os.path.isdir(os.path.join(subdir, d))) return top_dirs @task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([ ("system=", "s", "System to act on"), ]) def find_fixme(options): """ Run pylint on system code, only looking for fixme items. """ num_fixme = 0 systems = getattr(options, 'system', ALL_SYSTEMS).split(',') for system in systems: # Directory to put the pylint report in. # This makes the folder if it doesn't already exist. report_dir = (Env.REPORT_DIR / system).makedirs_p() apps_list = ' '.join(top_python_dirs(system)) pythonpath_prefix = ( "PYTHONPATH={system}:{system}/lib" "common/djangoapps:common/lib".format( system=system ) ) sh( "{pythonpath_prefix} pylint --disable R,C,W,E --enable=fixme " "--msg-template={msg_template} {apps} " "| tee {report_dir}/pylint_fixme.report".format( pythonpath_prefix=pythonpath_prefix, msg_template='"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"', apps=apps_list, report_dir=report_dir ) ) num_fixme += _count_pylint_violations( "{report_dir}/pylint_fixme.report".format(report_dir=report_dir)) print "Number of pylint fixmes: " + str(num_fixme) @task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([ ("system=", "s", "System to act on"), ("errors", "e", "Check for errors only"), ("limit=", "l", "limit for number of acceptable violations"), ]) def run_pylint(options): """ Run pylint on system code. When violations limit is passed in, fail the task if too many violations are found. """ num_violations = 0 violations_limit = int(getattr(options, 'limit', -1)) errors = getattr(options, 'errors', False) systems = getattr(options, 'system', ALL_SYSTEMS).split(',') # Make sure the metrics subdirectory exists Env.METRICS_DIR.makedirs_p() for system in systems: # Directory to put the pylint report in. # This makes the folder if it doesn't already exist. report_dir = (Env.REPORT_DIR / system).makedirs_p() flags = [] if errors: flags.append("--errors-only") apps_list = ' '.join(top_python_dirs(system)) pythonpath_prefix = ( "PYTHONPATH={system}:{system}/djangoapps:{system}/" "lib:common/djangoapps:common/lib".format( system=system ) ) sh( "{pythonpath_prefix} pylint {flags} --msg-template={msg_template} {apps} | " "tee {report_dir}/pylint.report".format( pythonpath_prefix=pythonpath_prefix, flags=" ".join(flags), msg_template='"{path}:{line}: [{msg_id}({symbol}), {obj}] {msg}"', apps=apps_list, report_dir=report_dir ) ) num_violations += _count_pylint_violations( "{report_dir}/pylint.report".format(report_dir=report_dir)) # Print number of violations to log violations_count_str = "Number of pylint violations: " + str(num_violations) print violations_count_str # Also write the number of violations to a file with open(Env.METRICS_DIR / "pylint", "w") as f: f.write(violations_count_str) # Fail number of violations is greater than the limit if num_violations > violations_limit > -1: raise Exception("Failed. Too many pylint violations. " "The limit is {violations_limit}.".format(violations_limit=violations_limit)) def _count_pylint_violations(report_file): """ Parses a pylint report line-by-line and determines the number of violations reported """ num_violations_report = 0 # An example string: # common/lib/xmodule/xmodule/tests/test_conditional.py:21: [C0111(missing-docstring), DummySystem] Missing docstring # More examples can be found in the unit tests for this method pylint_pattern = re.compile(r".(\d+):\ \[(\D\d+.+\]).") for line in open(report_file): violation_list_for_line = pylint_pattern.split(line) # If the string is parsed into four parts, then we've found a violation. Example of split parts: # test file, line number, violation name, violation details if len(violation_list_for_line) == 4: num_violations_report += 1 return num_violations_report def _get_pep8_violations(): """ Runs pep8. Returns a tuple of (number_of_violations, violations_string) where violations_string is a string of all pep8 violations found, separated by new lines. """ report_dir = (Env.REPORT_DIR / 'pep8') report_dir.rmtree(ignore_errors=True) report_dir.makedirs_p() # Make sure the metrics subdirectory exists Env.METRICS_DIR.makedirs_p() sh('pep8 . | tee {report_dir}/pep8.report -a'.format(report_dir=report_dir)) count, violations_list = _pep8_violations( "{report_dir}/pep8.report".format(report_dir=report_dir) ) return (count, violations_list) def _pep8_violations(report_file): """ Returns a tuple of (num_violations, violations_list) for all pep8 violations in the given report_file. """ with open(report_file) as f: violations_list = f.readlines() num_lines = len(violations_list) return num_lines, violations_list @task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([ ("system=", "s", "System to act on"), ]) def run_pep8(options): # pylint: disable=unused-argument """ Run pep8 on system code. Fail the task if any violations are found. """ (count, violations_list) = _get_pep8_violations() violations_list = ''.join(violations_list) # Print number of violations to log violations_count_str = "Number of pep8 violations: {count}".format(count=count) print violations_count_str print violations_list # Also write the number of violations to a file with open(Env.METRICS_DIR / "pep8", "w") as f: f.write(violations_count_str + '\n\n') f.write(violations_list) # Fail if any violations are found if count: failure_string = "Too many pep8 violations. " + violations_count_str failure_string += "\n\nViolations:\n{violations_list}".format(violations_list=violations_list) raise Exception(failure_string) @task @needs('pavelib.prereqs.install_python_prereqs') def run_complexity(): """ Uses radon to examine cyclomatic complexity. For additional details on radon, see http://radon.readthedocs.org/ """ system_string = 'cms/ lms/ common/ openedx/' complexity_report_dir = (Env.REPORT_DIR / "complexity") complexity_report = complexity_report_dir / "python_complexity.log" # Ensure directory structure is in place: metrics dir, and an empty complexity report dir. Env.METRICS_DIR.makedirs_p() _prepare_report_dir(complexity_report_dir) print "--> Calculating cyclomatic complexity of python files..." try: sh( "radon cc {system_string} --total-average > {complexity_report}".format( system_string=system_string, complexity_report=complexity_report ) ) complexity_metric = _get_count_from_last_line(complexity_report, "python_complexity") _write_metric( complexity_metric, (Env.METRICS_DIR / "python_complexity") ) print "--> Python cyclomatic complexity report complete." print "radon cyclomatic complexity score: {metric}".format(metric=str(complexity_metric)) except BuildFailure: print "ERROR: Unable to calculate python-only code-complexity." @task @needs('pavelib.prereqs.install_node_prereqs') @cmdopts([ ("limit=", "l", "limit for number of acceptable violations"), ]) def run_jshint(options): """ Runs jshint on static asset directories """ violations_limit = int(getattr(options, 'limit', -1)) jshint_report_dir = (Env.REPORT_DIR / "jshint") jshint_report = jshint_report_dir / "jshint.report" _prepare_report_dir(jshint_report_dir) jshint_directories = ["common/static/js", "cms/static/js", "lms/static/js"] sh( "jshint {list} --config .jshintrc >> {jshint_report}".format( list=(" ".join(jshint_directories)), jshint_report=jshint_report ), ignore_error=True ) try: num_violations = int(_get_count_from_last_line(jshint_report, "jshint")) except TypeError: raise BuildFailure( "Error. Number of jshint violations could not be found in {jshint_report}".format( jshint_report=jshint_report ) ) # Record the metric _write_metric(num_violations, (Env.METRICS_DIR / "jshint")) # Fail if number of violations is greater than the limit if num_violations > violations_limit > -1: raise Exception( "JSHint Failed. Too many violations ({count}).\nThe limit is {violations_limit}.".format( count=num_violations, violations_limit=violations_limit ) ) def _write_metric(metric, filename): """ Write a given metric to a given file Used for things like reports/metrics/jshint, which will simply tell you the number of jshint violations found """ with open(filename, "w") as metric_file: metric_file.write(str(metric)) def _prepare_report_dir(dir_name): """ Sets a given directory to a created, but empty state """ dir_name.rmtree_p() dir_name.mkdir_p() def _get_last_report_line(filename): """ Returns the last line of a given file. Used for getting output from quality output files. """ file_not_found_message = "The following log file could not be found: {file}".format(file=filename) if os.path.isfile(filename): with open(filename, 'r') as report_file: lines = report_file.readlines() return lines[len(lines) - 1] else: # Raise a build error if the file is not found raise BuildFailure(file_not_found_message) def _get_count_from_last_line(filename, file_type): """ This will return the number in the last line of a file. It is returning only the value (as a floating number). """ last_line = _get_last_report_line(filename) if file_type is "python_complexity": # Example of the last line of a complexity report: "Average complexity: A (1.93953443446)" regex = r'\d+.\d+' else: # Example of the last line of a jshint report (for example): "3482 errors" regex = r'^\d+' try: return float(re.search(regex, last_line).group(0)) # An AttributeError will occur if the regex finds no matches. # A ValueError will occur if the returned regex cannot be cast as a float. except (AttributeError, ValueError): return None @task @needs('pavelib.prereqs.install_python_prereqs') @cmdopts([ ("compare-branch=", "b", "Branch to compare against, defaults to origin/master"), ("percentage=", "p", "fail if diff-quality is below this percentage"), ]) def run_quality(options): """ Build the html diff quality reports, and print the reports to the console. :param: b, the branch to compare against, defaults to origin/master :param: p, diff-quality will fail if the quality percentage calculated is below this percentage. For example, if p is set to 80, and diff-quality finds quality of the branch vs the compare branch is less than 80%, then this task will fail. This threshold would be applied to both pep8 and pylint. """ # Directory to put the diff reports in. # This makes the folder if it doesn't already exist. dquality_dir = (Env.REPORT_DIR / "diff_quality").makedirs_p() # Save the pass variable. It will be set to false later if failures are detected. diff_quality_percentage_pass = True def _pep8_output(count, violations_list, is_html=False): """ Given a count & list of pep8 violations, pretty-print the pep8 output. If `is_html`, will print out with HTML markup. """ if is_html: lines = ['<body>\n'] sep = '-------------<br/>\n' title = "<h1>Quality Report: pep8</h1>\n" violations_bullets = ''.join( ['<li>{violation}</li><br/>\n'.format(violation=violation) for violation in violations_list] ) violations_str = '<ul>\n{bullets}</ul>\n'.format(bullets=violations_bullets) violations_count_str = "<b>Violations</b>: {count}<br/>\n" fail_line = "<b>FAILURE</b>: pep8 count should be 0<br/>\n" else: lines = [] sep = '-------------\n' title = "Quality Report: pep8\n" violations_str = ''.join(violations_list) violations_count_str = "Violations: {count}\n" fail_line = "FAILURE: pep8 count should be 0\n" violations_count_str = violations_count_str.format(count=count) lines.extend([sep, title, sep, violations_str, sep, violations_count_str]) if count > 0: lines.append(fail_line) lines.append(sep + '\n') if is_html: lines.append('</body>') return ''.join(lines) # Run pep8 directly since we have 0 violations on master (count, violations_list) = _get_pep8_violations() # Print number of violations to log print _pep8_output(count, violations_list) # Also write the number of violations to a file with open(dquality_dir / "diff_quality_pep8.html", "w") as f: f.write(_pep8_output(count, violations_list, is_html=True)) if count > 0: diff_quality_percentage_pass = False # ----- Set up for diff-quality pylint call ----- # Set the string, if needed, to be used for the diff-quality --compare-branch switch. compare_branch = getattr(options, 'compare_branch', None) compare_branch_string = u'' if compare_branch: compare_branch_string = u'--compare-branch={0}'.format(compare_branch) # Set the string, if needed, to be used for the diff-quality --fail-under switch. diff_threshold = int(getattr(options, 'percentage', -1)) percentage_string = u'' if diff_threshold > -1: percentage_string = u'--fail-under={0}'.format(diff_threshold) # Generate diff-quality html report for pylint, and print to console # If pylint reports exist, use those # Otherwise, `diff-quality` will call pylint itself pylint_files = get_violations_reports("pylint") pylint_reports = u' '.join(pylint_files) pythonpath_prefix = ( "PYTHONPATH=$PYTHONPATH:lms:lms/djangoapps:lms/lib:cms:cms/djangoapps:cms/lib:" "common:common/djangoapps:common/lib" ) # run diff-quality for pylint. if not run_diff_quality( violations_type="pylint", prefix=pythonpath_prefix, reports=pylint_reports, percentage_string=percentage_string, branch_string=compare_branch_string, dquality_dir=dquality_dir ): diff_quality_percentage_pass = False # run diff-quality for jshint. if not run_diff_quality( violations_type="jshint", prefix=pythonpath_prefix, reports=pylint_reports, percentage_string=percentage_string, branch_string=compare_branch_string, dquality_dir=dquality_dir ): diff_quality_percentage_pass = False # If one of the quality runs fails, then paver exits with an error when it is finished if not diff_quality_percentage_pass: raise BuildFailure("Diff-quality failure(s).") def run_diff_quality( violations_type=None, prefix=None, reports=None, percentage_string=None, branch_string=None, dquality_dir=None ): """ This executes the diff-quality commandline tool for the given violation type (e.g., pylint, jshint). If diff-quality fails due to quality issues, this method returns False. """ try: sh( "{pythonpath_prefix} diff-quality --violations={type} " "{reports} {percentage_string} {compare_branch_string} " "--html-report {dquality_dir}/diff_quality_{type}.html ".format( type=violations_type, pythonpath_prefix=prefix, reports=reports, percentage_string=percentage_string, compare_branch_string=branch_string, dquality_dir=dquality_dir, ) ) return True except BuildFailure, error_message: if is_percentage_failure(error_message): return False else: raise BuildFailure(error_message) def is_percentage_failure(error_message): """ When diff-quality is run with a threshold percentage, it ends with an exit code of 1. This bubbles up to paver with a subprocess return code error. If the subprocess exits with anything other than 1, raise a paver exception. """ if "Subprocess return code: 1" not in error_message: return False else: return True def get_violations_reports(violations_type): """ Finds violations reports files by naming convention (e.g., all "pep8.report" files) """ violations_files = [] for subdir, _dirs, files in os.walk(os.path.join(Env.REPORT_DIR)): for f in files: if f == "{violations_type}.report".format(violations_type=violations_type): violations_files.append(os.path.join(subdir, f)) return violations_files
agpl-3.0
martinwicke/tensorflow
tensorflow/python/client/events_writer_test.py
23
2677
# Copyright 2015 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Tests for the SWIG-wrapped events writer.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os.path from tensorflow.core.framework import summary_pb2 from tensorflow.core.util import event_pb2 from tensorflow.python import pywrap_tensorflow from tensorflow.python.lib.io import tf_record from tensorflow.python.framework import errors from tensorflow.python.framework import test_util from tensorflow.python.platform import googletest from tensorflow.python.util import compat class PywrapeventsWriterTest(test_util.TensorFlowTestCase): def testWriteEvents(self): file_prefix = os.path.join(self.get_temp_dir(), "events") writer = pywrap_tensorflow.EventsWriter(compat.as_bytes(file_prefix)) filename = compat.as_text(writer.FileName()) event_written = event_pb2.Event( wall_time=123.45, step=67, summary=summary_pb2.Summary( value=[summary_pb2.Summary.Value(tag="foo", simple_value=89.0)])) writer.WriteEvent(event_written) writer.Flush() writer.Close() with self.assertRaises(errors.NotFoundError): for r in tf_record.tf_record_iterator(filename + "DOES_NOT_EXIST"): self.assertTrue(False) reader = tf_record.tf_record_iterator(filename) event_read = event_pb2.Event() event_read.ParseFromString(next(reader)) self.assertTrue(event_read.HasField("file_version")) event_read.ParseFromString(next(reader)) # Second event self.assertProtoEquals(""" wall_time: 123.45 step: 67 summary { value { tag: 'foo' simple_value: 89.0 } } """, event_read) with self.assertRaises(StopIteration): next(reader) def testWriteEventInvalidType(self): class _Invalid(object): def __str__(self): return "Invalid" with self.assertRaisesRegexp(TypeError, "Invalid"): pywrap_tensorflow.EventsWriter(b"foo").WriteEvent(_Invalid()) if __name__ == "__main__": googletest.main()
apache-2.0