repo_name
stringlengths
5
100
path
stringlengths
4
375
copies
stringclasses
991 values
size
stringlengths
4
7
content
stringlengths
666
1M
license
stringclasses
15 values
aleksandra-tarkowska/openmicroscopy
components/tools/OmeroPy/test/integration/test_itimeline.py
9
9352
#!/usr/bin/env python # -*- coding: utf-8 -*- """ Integration test focused on the omero.api.IShare interface a running server. Copyright 2008-2014 Glencoe Software, Inc. All rights reserved. Use is subject to license terms supplied in LICENSE.txt """ import time import pytest import library as lib import omero from omero.rtypes import rint, rlong, rstring, rtime class TestITimeline(lib.ITest): DEFAULT_PERMS = 'rwr---' def testGeneral(self): uuid = self.ctx.sessionUuid timeline = self.sf.getTimelineService() im_ids = dict() for i in range(0, 10): # create image acquired = long(time.time() * 1000) img = self.make_image(name='test-img-%s' % uuid, date=acquired) im_ids[i] = [img.id.val, acquired] # Here we assume that this test is not run within the last 1 second start = acquired - 86400 end = acquired + 1 p = omero.sys.Parameters() p.map = {} f = omero.sys.Filter() f.ownerId = rlong(self.ctx.userId) f.groupId = rlong(self.ctx.groupId) p.theFilter = f M = timeline.countByPeriod A = rtime(long(start)) B = rtime(long(end)) counter = M(['Image'], A, B, p) assert counter['Image'] == 10 # And with #9609 counter = M(['Image'], A, B, p, {"omero.group": "-1"}) assert counter['Image'] == 10 p2 = omero.sys.Parameters() p2.map = {} f2 = omero.sys.Filter() f2.ownerId = rlong(self.ctx.userId) f2.groupId = rlong(self.ctx.groupId) f2.limit = rint(5) p2.theFilter = f2 # p.map["start"] = rtime(long(start)) # p.map["end"] = rtime(long(end)) M = timeline.getMostRecentObjects res = M(['Image'], p2, False)["Image"] assert 5 == len(res) # And with #9609 res = M(['Image'], p2, False, {"omero.group": "-1"})["Image"] assert 5 == len(res) # 1st element should be the 9th from the im_ids assert im_ids[9][0] == res[0].id.val # 2nd element should be the 8th from the im_ids assert im_ids[8][0] == res[1].id.val # 3rd element should be the 7th from the im_ids assert im_ids[7][0] == res[2].id.val # 4th element should be the 6th from the im_ids assert im_ids[6][0] == res[3].id.val # 5th element should be the 5th from the im_ids assert im_ids[5][0] == res[4].id.val def testCollaborativeTimeline(self): """ Create some images as one user - test if another user can see these events in timeline. """ client2, user2 = self.new_client_and_user(group=self.group) # log in as first user & create images timeline2 = client2.sf.getTimelineService() im_ids = dict() for i in range(0, 10): # create image acquired = long(time.time() * 1000) img = self.make_image(name='test-img-%s' % client2.sf, date=acquired, client=client2) im_ids[i] = [img.id.val, acquired] # Here we assume that this test is not run within the last 1 second start = acquired - 86400 end = acquired + 1 ownerId = rlong(user2.id.val) groupId = rlong(self.group.id.val) def assert_timeline(timeline, start, end, ownerId=None, groupId=None): p = omero.sys.Parameters() p.map = {} f = omero.sys.Filter() if ownerId is not None: f.ownerId = ownerId if groupId is not None: f.groupId = groupId p.theFilter = f counter = timeline.countByPeriod( ['Image'], rtime(long(start)), rtime(long(end)), p) assert 10 == counter['Image'] data = timeline.getByPeriod( ['Image'], rtime(long(start)), rtime(long(end)), p, False) assert 10 == len(data['Image']) assert_timeline(timeline2, start, end, ownerId, groupId) # now log in as another user (default group is same as user-created # images above) assert_timeline( self.sf.getTimelineService(), start, end, ownerId, groupId) def test1173(self): uuid = self.root.sf.getAdminService().getEventContext().sessionUuid timeline = self.root.sf.getTimelineService() # create image ds = self.make_dataset(name='test1173-ds-%s' % uuid, client=self.root) ds.unload() # Here we assume that this test is not run within the last 1 second start = long(time.time() * 1000 - 86400) end = long(time.time() * 1000 + 86400) p = omero.sys.Parameters() p.map = {} f = omero.sys.Filter() f.ownerId = rlong(self.new_user().id.val) p.theFilter = f M = timeline.getEventLogsByPeriod A = rtime(long(start)) B = rtime(long(end)) rv = M(A, B, p) assert rv > 0 # And now for #9609 rv = M(A, B, p, {"omero.group": "-1"}) assert rv > 0 def test1175(self): uuid = self.root.sf.getAdminService().getEventContext().sessionUuid update = self.root.sf.getUpdateService() timeline = self.root.sf.getTimelineService() # create dataset ds = self.make_dataset(name='test1154-ds-%s' % (uuid), client=self.root) ds.unload() # create tag ann = omero.model.TagAnnotationI() ann.textValue = rstring('tag-%s' % (uuid)) ann.setDescription(rstring('tag-%s' % (uuid))) t_ann = omero.model.DatasetAnnotationLinkI() t_ann.setParent(ds) t_ann.setChild(ann) update.saveObject(t_ann) p = omero.sys.Parameters() p.map = {} f = omero.sys.Filter() f.ownerId = rlong(0) f.limit = rint(10) p.theFilter = f M = timeline.getMostRecentAnnotationLinks res = M(None, ['TagAnnotation'], None, p) assert len(res) > 0 # And now for #9609 res = M(None, ['TagAnnotation'], None, p, {"omero.group": "-1"}) assert len(res) > 0 # This test relates to a ticket that has not yet been resolved # http://trac.openmicroscopy.org/ome/ticket/1225 # If the ticket is still valid then this test should presumably pass # after the ticket is closed but not before then. If the issue is not # to be addressed then this test should be removed. @pytest.mark.broken(ticket="1225") def test1225(self): uuid = self.root.sf.getAdminService().getEventContext().sessionUuid update = self.root.sf.getUpdateService() timeline = self.root.sf.getTimelineService() query = self.root.sf.getQueryService() # create dataset to_save = list() for i in range(0, 10): to_save.append(self.new_dataset(name="ds-%i-%s" % (i, uuid))) dss = update.saveAndReturnArray(to_save) # create tag for i in range(0, 10): ds1 = query.get("Dataset", dss[i].id.val) ann = omero.model.TagAnnotationI() ann.textValue = rstring('tag-%i-%s' % (i, uuid)) ann.setDescription(rstring('desc-%i-%s' % (i, uuid))) t_ann = omero.model.DatasetAnnotationLinkI() t_ann.setParent(ds1) t_ann.setChild(ann) update.saveObject(t_ann) p = omero.sys.Parameters() p.map = {} f = omero.sys.Filter() f.ownerId = rlong(0) f.limit = rint(10) p.theFilter = f M = timeline.getMostRecentAnnotationLinks tagids = set([e.child.id.val for e in M(None, ['TagAnnotation'], None, p)]) assert len(tagids) == 10 # And under #9609 tagids = set([e.child.id.val for e in M(None, ['TagAnnotation'], None, p, {"omero.group": "-1"})]) assert len(tagids) == 10 ann = omero.model.TagAnnotationI() ann.textValue = rstring('tag-%s' % (uuid)) ann.setDescription(rstring('desc-%s' % (uuid))) ann = update.saveAndReturnObject(ann) for i in range(0, 10): ds1 = query.get("Dataset", dss[i].id.val) ann1 = query.get("TagAnnotation", ann.id.val) t_ann = omero.model.DatasetAnnotationLinkI() t_ann.setParent(ds1) t_ann.setChild(ann1) update.saveObject(t_ann) tids = set([e.child.id.val for e in M(None, ['TagAnnotation'], None, p)]) assert len(tids) == 10 # And again #9609 tids = set([e.child.id.val for e in M(None, ['TagAnnotation'], None, p, {"omero.group": "-1"})]) assert len(tids) == 10 def test3234(self): user_object = omero.model.ExperimenterI(self.ctx.userId, False) share = self.root.sf.getShareService() share.createShare( "description", None, None, [user_object], None, True) timeline = self.sf.getTimelineService() timeline.getMostRecentShareCommentLinks(None) timeline.getMostRecentShareCommentLinks(None, {"omero.group": "-1"})
gpl-2.0
noironetworks/nova
nova/tests/functional/api_sample_tests/test_extended_availability_zone.py
21
2379
# Copyright 2012 Nebula, Inc. # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from nova.tests.functional.api_sample_tests import test_servers CONF = cfg.CONF CONF.import_opt('osapi_compute_extension', 'nova.api.openstack.compute.legacy_v2.extensions') class ExtendedAvailabilityZoneJsonTests(test_servers.ServersSampleBase): extension_name = "os-extended-availability-zone" def _get_flags(self): f = super(ExtendedAvailabilityZoneJsonTests, self)._get_flags() f['osapi_compute_extension'] = CONF.osapi_compute_extension[:] f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.keypairs.Keypairs') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_availability_zone.' 'Extended_availability_zone') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips.Extended_ips') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips_mac.' 'Extended_ips_mac') return f def test_show(self): uuid = self._post_server() response = self._do_get('servers/%s' % uuid) subs = self._get_regexes() subs['hostid'] = '[a-f0-9]+' subs['access_ip_v4'] = '1.2.3.4' subs['access_ip_v6'] = '80fe::' self._verify_response('server-get-resp', subs, response, 200) def test_detail(self): self._post_server() response = self._do_get('servers/detail') subs = self._get_regexes() subs['hostid'] = '[a-f0-9]+' subs['access_ip_v4'] = '1.2.3.4' subs['access_ip_v6'] = '80fe::' self._verify_response('servers-detail-resp', subs, response, 200)
apache-2.0
palaniyappanBala/thug
src/ActiveX/modules/SnapshotViewer.py
7
1435
# Microsoft Access Snapshot Viewer # CVE-2008-2463 import logging log = logging.getLogger("Thug") def PrintSnapshot(self, SnapshotPath = '', CompressedPath = ''): if SnapshotPath: self.SnapshotPath = SnapshotPath if CompressedPath: self.CompressedPath = CompressedPath msg = '[Microsoft Access Snapshot Viewer ActiveX] SnapshotPath : %s, CompressedPath: %s' % (self.SnapshotPath, self.CompressedPath, ) log.ThugLogging.add_behavior_warn(msg, 'CVE-2008-2463') log.ThugLogging.log_exploit_event(self._window.url, "Microsoft Access Snapshot Viewer ActiveX", "Print Snapshot", forward = False, cve = 'CVE-2008-2463', data = { "SnapshotPath" : self.SnapshotPath, "CompressedPath": self.CompressedPath } ) url = self.SnapshotPath try: self._window._navigator.fetch(url, redirect_type = "CVE-2008-2463") except: log.ThugLogging.add_behavior_warn('[Microsoft Access Snapshot Viewer ActiveX] Fetch failed')
gpl-2.0
ovnicraft/openerp-restaurant
base_report_designer/base_report_designer.py
44
3471
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import base64 from StringIO import StringIO from openerp.modules.module import get_module_resource import openerp.modules.registry from openerp.osv import osv from openerp_sxw2rml import sxw2rml class report_xml(osv.osv): _inherit = 'ir.actions.report.xml' def sxwtorml(self, cr, uid, file_sxw, file_type): ''' The use of this function is to get rml file from sxw file. ''' sxwval = StringIO(base64.decodestring(file_sxw)) if file_type=='sxw': fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb') if file_type=='odt': fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb') return {'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read()))} def upload_report(self, cr, uid, report_id, file_sxw, file_type, context=None): ''' Untested function ''' sxwval = StringIO(base64.decodestring(file_sxw)) if file_type=='sxw': fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_oo2rml.xsl'),'rb') if file_type=='odt': fp = open(get_module_resource('base_report_designer','openerp_sxw2rml', 'normalized_odt2rml.xsl'),'rb') report = self.pool['ir.actions.report.xml'].write(cr, uid, [report_id], { 'report_sxw_content': base64.decodestring(file_sxw), 'report_rml_content': str(sxw2rml(sxwval, xsl=fp.read())), }) return True def report_get(self, cr, uid, report_id, context=None): if context is None: context = {} # skip osv.fields.sanitize_binary_value() because we want the raw bytes in all cases context.update(bin_raw=True) report = self.browse(cr, uid, report_id, context=context) sxw_data = report.report_sxw_content rml_data = report.report_rml_content if isinstance(sxw_data, unicode): sxw_data = sxw_data.encode("iso-8859-1", "replace") if isinstance(rml_data, unicode): rml_data = rml_data.encode("iso-8859-1", "replace") return { 'file_type' : report.report_type, 'report_sxw_content': sxw_data and base64.encodestring(sxw_data) or False, 'report_rml_content': rml_data and base64.encodestring(rml_data) or False } # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
Titan-C/scikit-learn
sklearn/datasets/tests/test_kddcup99.py
42
1278
"""Test kddcup99 loader. Only 'percent10' mode is tested, as the full data is too big to use in unit-testing. The test is skipped if the data wasn't previously fetched and saved to scikit-learn data folder. """ from sklearn.datasets import fetch_kddcup99 from sklearn.utils.testing import assert_equal, SkipTest def test_percent10(): try: data = fetch_kddcup99(download_if_missing=False) except IOError: raise SkipTest("kddcup99 dataset can not be loaded.") assert_equal(data.data.shape, (494021, 41)) assert_equal(data.target.shape, (494021,)) data_shuffled = fetch_kddcup99(shuffle=True, random_state=0) assert_equal(data.data.shape, data_shuffled.data.shape) assert_equal(data.target.shape, data_shuffled.target.shape) data = fetch_kddcup99('SA') assert_equal(data.data.shape, (100655, 41)) assert_equal(data.target.shape, (100655,)) data = fetch_kddcup99('SF') assert_equal(data.data.shape, (73237, 4)) assert_equal(data.target.shape, (73237,)) data = fetch_kddcup99('http') assert_equal(data.data.shape, (58725, 3)) assert_equal(data.target.shape, (58725,)) data = fetch_kddcup99('smtp') assert_equal(data.data.shape, (9571, 3)) assert_equal(data.target.shape, (9571,))
bsd-3-clause
faouellet/clang-grade
correctnessgb.py
2
3075
from PyQt5.QtWidgets import QGroupBox, QLabel, QLineEdit, QPushButton, QGridLayout, QMessageBox, QFileDialog from PyQt5.QtGui import QIntValidator import os class CorrectnessGroupBox(QGroupBox): def __init__(self, test_data_dir='', exe='', max_deduction=0): def getDir(entry): dname = QFileDialog.getExistingDirectory(self) entry.setText(dname) def getExe(entry): ename, _ = QFileDialog.getOpenFileName(self) entry.setText(ename) def onEditDeduction(entry): try: self.max_deduction = int(entry.text()) except ValueError: self.max_deduction = 0 def onEditDir(entry): new_dir = entry.text() if not new_dir: return if not os.path.isdir(new_dir): err = QMessageBox(QMessageBox.Critical, 'Error', '%s is not a directory' % new_dir, QMessageBox.Ok, self) err.show() entry.setText('') return self.test_data_dir = new_dir def onEditExe(entry): new_exe = entry.text() if not new_exe: return elif not os.access(new_exe, os.X_OK): err = QMessageBox(QMessageBox.Critical, 'Error', '%s is not an executable program' % new_exe, QMessageBox.Ok, self) err.show() entry.setText('') return self.exe = new_exe super().__init__() self.test_data_dir = test_data_dir self.exe = exe self.max_deduction = max_deduction test_data_label = QLabel('Test data directory') self.test_data_edit = QLineEdit() self.test_data_edit.textChanged.connect(lambda: onEditDir(self.test_data_edit)) test_data_btn = QPushButton('Browse', self) test_data_btn.clicked.connect(lambda: getDir(self.test_data_edit)) exe_label = QLabel('Answer program') self.exe_edit = QLineEdit() self.exe_edit.textChanged.connect(lambda: onEditExe(self.exe_edit)) exe_btn = QPushButton('Browse', self) exe_btn.clicked.connect(lambda: getExe(self.exe_edit)) point_label = QLabel('Maximum points deduction:') self.point_edit = QLineEdit() self.point_edit.setMaximumWidth(50) point_validator = QIntValidator() self.point_edit.setValidator(point_validator) self.point_edit.textChanged.connect(lambda: onEditDeduction(self.point_edit)) gb_grid = QGridLayout() gb_grid.addWidget(test_data_label, 0, 0, 1, 1) gb_grid.addWidget(self.test_data_edit, 0, 1, 1, 4) gb_grid.addWidget(test_data_btn, 0, 5, 1, 1) gb_grid.addWidget(exe_label, 1, 0, 1, 1) gb_grid.addWidget(self.exe_edit, 1, 1, 1, 4) gb_grid.addWidget(exe_btn, 1, 5, 1, 1) gb_grid.addWidget(point_label, 2, 0, 1, 1) gb_grid.addWidget(self.point_edit, 2, 1, 1, 1) self.setLayout(gb_grid) self.setTitle('Correctness')
bsd-3-clause
isaksky/selenium
py/test/selenium/webdriver/common/proxy_tests.py
24
5588
#!/usr/bin/python # Copyright 2012 Software Freedom Conservancy. # # Licensed under the Apache License, Version 2.0 (the "License") # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS. # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import unittest from selenium.webdriver.common.proxy import Proxy, ProxyType class ProxyTests(unittest.TestCase): MANUAL_PROXY = { 'httpProxy': 'some.url:1234', 'ftpProxy': 'ftp.proxy', 'noProxy': 'localhost, foo.localhost', 'sslProxy': 'ssl.proxy:1234', 'socksProxy': 'socks.proxy:65555', 'socksUsername': 'test', 'socksPassword': 'test', } PAC_PROXY = { 'proxyAutoconfigUrl': 'http://pac.url:1234', } AUTODETECT_PROXY = { 'autodetect': True, } def testCanAddManualProxyToDesiredCapabilities(self): proxy = Proxy() proxy.http_proxy = self.MANUAL_PROXY['httpProxy'] proxy.ftp_proxy = self.MANUAL_PROXY['ftpProxy'] proxy.no_proxy = self.MANUAL_PROXY['noProxy'] proxy.sslProxy = self.MANUAL_PROXY['sslProxy'] proxy.socksProxy = self.MANUAL_PROXY['socksProxy'] proxy.socksUsername = self.MANUAL_PROXY['socksUsername'] proxy.socksPassword = self.MANUAL_PROXY['socksPassword'] desired_capabilities = {} proxy.add_to_capabilities(desired_capabilities) proxy_capabilities = self.MANUAL_PROXY.copy() proxy_capabilities['proxyType'] = 'MANUAL' expected_capabilities = {'proxy': proxy_capabilities} self.assertEqual(expected_capabilities, desired_capabilities) def testCanAddAutodetectProxyToDesiredCapabilities(self): proxy = Proxy() proxy.auto_detect = self.AUTODETECT_PROXY['autodetect'] desired_capabilities = {} proxy.add_to_capabilities(desired_capabilities) proxy_capabilities = self.AUTODETECT_PROXY.copy() proxy_capabilities['proxyType'] = 'AUTODETECT' expected_capabilities = {'proxy': proxy_capabilities} self.assertEqual(expected_capabilities, desired_capabilities) def testCanAddPACProxyToDesiredCapabilities(self): proxy = Proxy() proxy.proxy_autoconfig_url = self.PAC_PROXY['proxyAutoconfigUrl'] desired_capabilities = {} proxy.add_to_capabilities(desired_capabilities) proxy_capabilities = self.PAC_PROXY.copy() proxy_capabilities['proxyType'] = 'PAC' expected_capabilities = {'proxy': proxy_capabilities} self.assertEqual(expected_capabilities, desired_capabilities) def testCanNotChangeInitializedProxyType(self): proxy = Proxy(raw={'proxyType': 'direct'}) try: proxy.proxy_type = ProxyType.SYSTEM raise Exception("Change of already initialized proxy type should raise exception") except Exception as e: pass proxy = Proxy(raw={'proxyType': ProxyType.DIRECT}) try: proxy.proxy_type = ProxyType.SYSTEM raise Exception("Change of already initialized proxy type should raise exception") except Exception as e: pass def testCanInitManualProxy(self): proxy = Proxy(raw=self.MANUAL_PROXY) self.assertEqual(ProxyType.MANUAL, proxy.proxy_type) self.assertEqual(self.MANUAL_PROXY['httpProxy'], proxy.http_proxy) self.assertEqual(self.MANUAL_PROXY['ftpProxy'], proxy.ftp_proxy) self.assertEqual(self.MANUAL_PROXY['noProxy'], proxy.no_proxy) self.assertEqual(self.MANUAL_PROXY['sslProxy'], proxy.sslProxy) self.assertEqual(self.MANUAL_PROXY['socksProxy'], proxy.socksProxy) self.assertEqual(self.MANUAL_PROXY['socksUsername'], proxy.socksUsername) self.assertEqual(self.MANUAL_PROXY['socksPassword'], proxy.socksPassword) def testCanAddAutodetectProxyToDesiredCapabilities(self): proxy = Proxy(raw=self.AUTODETECT_PROXY) self.assertEqual(ProxyType.AUTODETECT, proxy.proxy_type) self.assertEqual(self.AUTODETECT_PROXY['autodetect'], proxy.auto_detect) def testCanAddPACProxyToDesiredCapabilities(self): proxy = Proxy(raw=self.PAC_PROXY) self.assertEqual(ProxyType.PAC, proxy.proxy_type) self.assertEqual(self.PAC_PROXY['proxyAutoconfigUrl'], proxy.proxy_autoconfig_url) def testCanInitEmptyProxy(self): proxy = Proxy() self.assertEqual(ProxyType.UNSPECIFIED, proxy.proxy_type) self.assertEqual('', proxy.http_proxy) self.assertEqual('', proxy.ftp_proxy) self.assertEqual('', proxy.no_proxy) self.assertEqual('', proxy.sslProxy) self.assertEqual('', proxy.socksProxy) self.assertEqual('', proxy.socksUsername) self.assertEqual('', proxy.socksPassword) self.assertEqual(False, proxy.auto_detect) self.assertEqual('', proxy.proxy_autoconfig_url) desired_capabilities = {} proxy.add_to_capabilities(desired_capabilities) proxy_capabilities = {} proxy_capabilities['proxyType'] = 'UNSPECIFIED' expected_capabilities = {'proxy': proxy_capabilities} self.assertEqual(expected_capabilities, desired_capabilities)
apache-2.0
stansonhealth/ansible-modules-core
cloud/amazon/elasticache_subnet_group.py
53
5273
#!/usr/bin/python # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. DOCUMENTATION = ''' --- module: elasticache_subnet_group version_added: "2.0" short_description: manage Elasticache subnet groups description: - Creates, modifies, and deletes Elasticache subnet groups. This module has a dependency on python-boto >= 2.5. options: state: description: - Specifies whether the subnet should be present or absent. required: true default: present choices: [ 'present' , 'absent' ] name: description: - Database subnet group identifier. required: true description: description: - Elasticache subnet group description. Only set when a new group is added. required: false default: null subnets: description: - List of subnet IDs that make up the Elasticache subnet group. required: false default: null author: "Tim Mahoney (@timmahoney)" extends_documentation_fragment: - aws - ec2 ''' EXAMPLES = ''' # Add or change a subnet group - elasticache_subnet_group state: present name: norwegian-blue description: My Fancy Ex Parrot Subnet Group subnets: - subnet-aaaaaaaa - subnet-bbbbbbbb # Remove a subnet group - elasticache_subnet_group: state: absent name: norwegian-blue ''' try: import boto from boto.elasticache.layer1 import ElastiCacheConnection from boto.regioninfo import RegionInfo from boto.exception import BotoServerError HAS_BOTO = True except ImportError: HAS_BOTO = False def main(): argument_spec = ec2_argument_spec() argument_spec.update(dict( state = dict(required=True, choices=['present', 'absent']), name = dict(required=True), description = dict(required=False), subnets = dict(required=False, type='list'), ) ) module = AnsibleModule(argument_spec=argument_spec) if not HAS_BOTO: module.fail_json(msg='boto required for this module') state = module.params.get('state') group_name = module.params.get('name').lower() group_description = module.params.get('description') group_subnets = module.params.get('subnets') or {} if state == 'present': for required in ['name', 'description', 'subnets']: if not module.params.get(required): module.fail_json(msg = str("Parameter %s required for state='present'" % required)) else: for not_allowed in ['description', 'subnets']: if module.params.get(not_allowed): module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed)) # Retrieve any AWS settings from the environment. region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module) if not region: module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set.")) """Get an elasticache connection""" try: endpoint = "elasticache.%s.amazonaws.com" % region connect_region = RegionInfo(name=region, endpoint=endpoint) conn = ElastiCacheConnection(region=connect_region, **aws_connect_kwargs) except boto.exception.NoAuthHandlerFound as e: module.fail_json(msg=e.message) try: changed = False exists = False try: matching_groups = conn.describe_cache_subnet_groups(group_name, max_records=100) exists = len(matching_groups) > 0 except BotoServerError as e: if e.error_code != 'CacheSubnetGroupNotFoundFault': module.fail_json(msg = e.error_message) if state == 'absent': if exists: conn.delete_cache_subnet_group(group_name) changed = True else: if not exists: new_group = conn.create_cache_subnet_group(group_name, cache_subnet_group_description=group_description, subnet_ids=group_subnets) changed = True else: changed_group = conn.modify_cache_subnet_group(group_name, cache_subnet_group_description=group_description, subnet_ids=group_subnets) changed = True except BotoServerError as e: if e.error_message != 'No modifications were requested.': module.fail_json(msg = e.error_message) else: changed = False module.exit_json(changed=changed) # import module snippets from ansible.module_utils.basic import * from ansible.module_utils.ec2 import * main()
gpl-3.0
denys-duchier/django
tests/settings_tests/tests.py
21
14457
import os import sys import unittest import warnings from types import ModuleType from django.conf import ENVIRONMENT_VARIABLE, LazySettings, Settings, settings from django.core.exceptions import ImproperlyConfigured from django.http import HttpRequest from django.test import ( SimpleTestCase, TestCase, TransactionTestCase, modify_settings, override_settings, signals, ) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], ITEMS_OUTER=[1, 2, 3], TEST='override', TEST_OUTER='outer') class FullyDecoratedTranTestCase(TransactionTestCase): available_apps = [] def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) self.assertEqual(settings.TEST, 'override') self.assertEqual(settings.TEST_OUTER, 'outer') @modify_settings(ITEMS={ 'append': ['e', 'f'], 'prepend': ['a'], 'remove': ['d', 'c'], }) def test_method_list_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'e', 'f']) self.assertEqual(settings.ITEMS_OUTER, [1, 2, 3]) @modify_settings(ITEMS={ 'append': ['b'], 'prepend': ['d'], 'remove': ['a', 'c', 'e'], }) def test_method_list_override_no_ops(self): self.assertEqual(settings.ITEMS, ['b', 'd']) @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) def test_method_list_override_strings(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) @modify_settings(ITEMS={'remove': ['b', 'd']}) @modify_settings(ITEMS={'append': ['b'], 'prepend': ['d']}) def test_method_list_override_nested_order(self): self.assertEqual(settings.ITEMS, ['d', 'c', 'b']) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') self.assertEqual(settings.TEST_OUTER, 'outer') def test_decorated_testcase_name(self): self.assertEqual(FullyDecoratedTranTestCase.__name__, 'FullyDecoratedTranTestCase') def test_decorated_testcase_module(self): self.assertEqual(FullyDecoratedTranTestCase.__module__, __name__) @modify_settings(ITEMS={ 'prepend': ['b'], 'append': ['d'], 'remove': ['a', 'e'] }) @override_settings(ITEMS=['a', 'c', 'e'], TEST='override') class FullyDecoratedTestCase(TestCase): def test_override(self): self.assertEqual(settings.ITEMS, ['b', 'c', 'd']) self.assertEqual(settings.TEST, 'override') @modify_settings(ITEMS={ 'append': 'e', 'prepend': 'a', 'remove': 'c', }) @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.ITEMS, ['a', 'b', 'd', 'e']) self.assertEqual(settings.TEST, 'override2') class ClassDecoratedTestCaseSuper(TestCase): """ Dummy class for testing max recursion error in child class call to super(). Refs #17011. """ def test_max_recursion_error(self): pass @override_settings(TEST='override') class ClassDecoratedTestCase(ClassDecoratedTestCaseSuper): @classmethod def setUpClass(cls): super().setUpClass() cls.foo = getattr(settings, 'TEST', 'BUG') def test_override(self): self.assertEqual(settings.TEST, 'override') def test_setupclass_override(self): """Settings are overridden within setUpClass (#21281).""" self.assertEqual(self.foo, 'override') @override_settings(TEST='override2') def test_method_override(self): self.assertEqual(settings.TEST, 'override2') def test_max_recursion_error(self): """ Overriding a method on a super class and then calling that method on the super class should not trigger infinite recursion. See #17011. """ super().test_max_recursion_error() @modify_settings(ITEMS={'append': 'mother'}) @override_settings(ITEMS=['father'], TEST='override-parent') class ParentDecoratedTestCase(TestCase): pass @modify_settings(ITEMS={'append': ['child']}) @override_settings(TEST='override-child') class ChildDecoratedTestCase(ParentDecoratedTestCase): def test_override_settings_inheritance(self): self.assertEqual(settings.ITEMS, ['father', 'mother', 'child']) self.assertEqual(settings.TEST, 'override-child') class SettingsTests(SimpleTestCase): def setUp(self): self.testvalue = None signals.setting_changed.connect(self.signal_callback) def tearDown(self): signals.setting_changed.disconnect(self.signal_callback) def signal_callback(self, sender, setting, value, **kwargs): if setting == 'TEST': self.testvalue = value def test_override(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) self.assertEqual('test', settings.TEST) del settings.TEST def test_override_change(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test2' self.assertEqual('test', settings.TEST) del settings.TEST def test_override_doesnt_leak(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual('override', settings.TEST) settings.TEST = 'test' with self.assertRaises(AttributeError): getattr(settings, 'TEST') @override_settings(TEST='override') def test_decorator(self): self.assertEqual('override', settings.TEST) def test_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') override = override_settings(TEST='override') with self.assertRaises(AttributeError): getattr(settings, 'TEST') override.enable() self.assertEqual('override', settings.TEST) override.disable() with self.assertRaises(AttributeError): getattr(settings, 'TEST') def test_class_decorator(self): # SimpleTestCase can be decorated by override_settings, but not ut.TestCase class SimpleTestCaseSubclass(SimpleTestCase): pass class UnittestTestCaseSubclass(unittest.TestCase): pass decorated = override_settings(TEST='override')(SimpleTestCaseSubclass) self.assertIsInstance(decorated, type) self.assertTrue(issubclass(decorated, SimpleTestCase)) with self.assertRaisesMessage(Exception, "Only subclasses of Django SimpleTestCase"): decorated = override_settings(TEST='override')(UnittestTestCaseSubclass) def test_signal_callback_context_manager(self): with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.settings(TEST='override'): self.assertEqual(self.testvalue, 'override') self.assertIsNone(self.testvalue) @override_settings(TEST='override') def test_signal_callback_decorator(self): self.assertEqual(self.testvalue, 'override') # # Regression tests for #10130: deleting settings. # def test_settings_delete(self): settings.TEST = 'test' self.assertEqual('test', settings.TEST) del settings.TEST with self.assertRaises(AttributeError): getattr(settings, 'TEST') def test_settings_delete_wrapped(self): with self.assertRaises(TypeError): delattr(settings, '_wrapped') def test_override_settings_delete(self): """ Allow deletion of a setting in an overridden settings set (#18824) """ previous_i18n = settings.USE_I18N previous_l10n = settings.USE_L10N with self.settings(USE_I18N=False): del settings.USE_I18N with self.assertRaises(AttributeError): getattr(settings, 'USE_I18N') # Should also work for a non-overridden setting del settings.USE_L10N with self.assertRaises(AttributeError): getattr(settings, 'USE_L10N') self.assertNotIn('USE_I18N', dir(settings)) self.assertNotIn('USE_L10N', dir(settings)) self.assertEqual(settings.USE_I18N, previous_i18n) self.assertEqual(settings.USE_L10N, previous_l10n) def test_override_settings_nested(self): """ override_settings uses the actual _wrapped attribute at runtime, not when it was instantiated. """ with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') inner = override_settings(TEST2='override') with override_settings(TEST='override'): self.assertEqual('override', settings.TEST) with inner: self.assertEqual('override', settings.TEST) self.assertEqual('override', settings.TEST2) # inner's __exit__ should have restored the settings of the outer # context manager, not those when the class was instantiated self.assertEqual('override', settings.TEST) with self.assertRaises(AttributeError): getattr(settings, 'TEST2') with self.assertRaises(AttributeError): getattr(settings, 'TEST') with self.assertRaises(AttributeError): getattr(settings, 'TEST2') class TestComplexSettingOverride(SimpleTestCase): def setUp(self): self.old_warn_override_settings = signals.COMPLEX_OVERRIDE_SETTINGS.copy() signals.COMPLEX_OVERRIDE_SETTINGS.add('TEST_WARN') def tearDown(self): signals.COMPLEX_OVERRIDE_SETTINGS = self.old_warn_override_settings self.assertNotIn('TEST_WARN', signals.COMPLEX_OVERRIDE_SETTINGS) def test_complex_override_warning(self): """Regression test for #19031""" with warnings.catch_warnings(record=True) as w: warnings.simplefilter("always") with override_settings(TEST_WARN='override'): self.assertEqual(settings.TEST_WARN, 'override') self.assertEqual(len(w), 1) self.assertEqual(w[0].filename, __file__) self.assertEqual(str(w[0].message), 'Overriding setting TEST_WARN can lead to unexpected behavior.') class SecureProxySslHeaderTest(SimpleTestCase): @override_settings(SECURE_PROXY_SSL_HEADER=None) def test_none(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_without_xheader(self): req = HttpRequest() self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_with_xheader_wrong(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'wrongvalue' self.assertIs(req.is_secure(), False) @override_settings(SECURE_PROXY_SSL_HEADER=('HTTP_X_FORWARDED_PROTOCOL', 'https')) def test_set_with_xheader_right(self): req = HttpRequest() req.META['HTTP_X_FORWARDED_PROTOCOL'] = 'https' self.assertIs(req.is_secure(), True) class IsOverriddenTest(SimpleTestCase): def test_configure(self): s = LazySettings() s.configure(SECRET_KEY='foo') self.assertTrue(s.is_overridden('SECRET_KEY')) def test_module(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' sys.modules['fake_settings_module'] = settings_module try: s = Settings('fake_settings_module') self.assertTrue(s.is_overridden('SECRET_KEY')) self.assertFalse(s.is_overridden('ALLOWED_HOSTS')) finally: del sys.modules['fake_settings_module'] def test_override(self): self.assertFalse(settings.is_overridden('ALLOWED_HOSTS')) with override_settings(ALLOWED_HOSTS=[]): self.assertTrue(settings.is_overridden('ALLOWED_HOSTS')) def test_unevaluated_lazysettings_repr(self): lazy_settings = LazySettings() expected = '<LazySettings [Unevaluated]>' self.assertEqual(repr(lazy_settings), expected) def test_evaluated_lazysettings_repr(self): lazy_settings = LazySettings() module = os.environ.get(ENVIRONMENT_VARIABLE) expected = '<LazySettings "%s">' % module # Force evaluation of the lazy object. lazy_settings.APPEND_SLASH self.assertEqual(repr(lazy_settings), expected) def test_usersettingsholder_repr(self): lazy_settings = LazySettings() lazy_settings.configure(APPEND_SLASH=False) expected = '<UserSettingsHolder>' self.assertEqual(repr(lazy_settings._wrapped), expected) def test_settings_repr(self): module = os.environ.get(ENVIRONMENT_VARIABLE) lazy_settings = Settings(module) expected = '<Settings "%s">' % module self.assertEqual(repr(lazy_settings), expected) class TestListSettings(unittest.TestCase): """ Make sure settings that should be lists or tuples throw ImproperlyConfigured if they are set to a string instead of a list or tuple. """ list_or_tuple_settings = ( "INSTALLED_APPS", "TEMPLATE_DIRS", "LOCALE_PATHS", ) def test_tuple_settings(self): settings_module = ModuleType('fake_settings_module') settings_module.SECRET_KEY = 'foo' for setting in self.list_or_tuple_settings: setattr(settings_module, setting, ('non_list_or_tuple_value')) sys.modules['fake_settings_module'] = settings_module try: with self.assertRaises(ImproperlyConfigured): Settings('fake_settings_module') finally: del sys.modules['fake_settings_module'] delattr(settings_module, setting)
bsd-3-clause
TeslaProject/external_chromium_org
mojo/public/tools/bindings/pylib/mojom/generate/pack.py
32
4915
# Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import module as mojom # This module provides a mechanism for determining the packed order and offsets # of a mojom.Struct. # # ps = pack.PackedStruct(struct) # ps.packed_fields will access a list of PackedField objects, each of which # will have an offset, a size and a bit (for mojom.BOOLs). class PackedField(object): kind_to_size = { mojom.BOOL: 1, mojom.INT8: 1, mojom.UINT8: 1, mojom.INT16: 2, mojom.UINT16: 2, mojom.INT32: 4, mojom.UINT32: 4, mojom.FLOAT: 4, mojom.HANDLE: 4, mojom.MSGPIPE: 4, mojom.SHAREDBUFFER: 4, mojom.DCPIPE: 4, mojom.DPPIPE: 4, mojom.NULLABLE_HANDLE: 4, mojom.NULLABLE_MSGPIPE: 4, mojom.NULLABLE_SHAREDBUFFER: 4, mojom.NULLABLE_DCPIPE: 4, mojom.NULLABLE_DPPIPE: 4, mojom.INT64: 8, mojom.UINT64: 8, mojom.DOUBLE: 8, mojom.STRING: 8, mojom.NULLABLE_STRING: 8 } @classmethod def GetSizeForKind(cls, kind): if isinstance(kind, (mojom.Array, mojom.Struct, mojom.FixedArray)): return 8 if isinstance(kind, mojom.Interface) or \ isinstance(kind, mojom.InterfaceRequest): kind = mojom.MSGPIPE if isinstance(kind, mojom.Enum): # TODO(mpcomplete): what about big enums? return cls.kind_to_size[mojom.INT32] if not kind in cls.kind_to_size: raise Exception("Invalid kind: %s" % kind.spec) return cls.kind_to_size[kind] def __init__(self, field, ordinal): self.field = field self.ordinal = ordinal self.size = self.GetSizeForKind(field.kind) self.offset = None self.bit = None # Returns the pad necessary to reserve space for alignment of |size|. def GetPad(offset, size): return (size - (offset % size)) % size # Returns a 2-tuple of the field offset and bit (for BOOLs) def GetFieldOffset(field, last_field): if field.field.kind == mojom.BOOL and \ last_field.field.kind == mojom.BOOL and \ last_field.bit < 7: return (last_field.offset, last_field.bit + 1) offset = last_field.offset + last_field.size pad = GetPad(offset, field.size) return (offset + pad, 0) class PackedStruct(object): def __init__(self, struct): self.struct = struct self.packed_fields = [] # No fields. if (len(struct.fields) == 0): return # Start by sorting by ordinal. src_fields = [] ordinal = 0 for field in struct.fields: if field.ordinal is not None: ordinal = field.ordinal src_fields.append(PackedField(field, ordinal)) ordinal += 1 src_fields.sort(key=lambda field: field.ordinal) src_field = src_fields[0] src_field.offset = 0 src_field.bit = 0 # dst_fields will contain each of the fields, in increasing offset order. dst_fields = self.packed_fields dst_fields.append(src_field) # Then find first slot that each field will fit. for src_field in src_fields[1:]: last_field = dst_fields[0] for i in xrange(1, len(dst_fields)): next_field = dst_fields[i] offset, bit = GetFieldOffset(src_field, last_field) if offset + src_field.size <= next_field.offset: # Found hole. src_field.offset = offset src_field.bit = bit dst_fields.insert(i, src_field) break last_field = next_field if src_field.offset is None: # Add to end src_field.offset, src_field.bit = GetFieldOffset(src_field, last_field) dst_fields.append(src_field) def GetTotalSize(self): if not self.packed_fields: return 0 last_field = self.packed_fields[-1] offset = last_field.offset + last_field.size pad = GetPad(offset, 8) return offset + pad class ByteInfo(object): def __init__(self): self.is_padding = False self.packed_fields = [] def GetByteLayout(packed_struct): bytes = [ByteInfo() for i in xrange(packed_struct.GetTotalSize())] limit_of_previous_field = 0 for packed_field in packed_struct.packed_fields: for i in xrange(limit_of_previous_field, packed_field.offset): bytes[i].is_padding = True bytes[packed_field.offset].packed_fields.append(packed_field) limit_of_previous_field = packed_field.offset + packed_field.size for i in xrange(limit_of_previous_field, len(bytes)): bytes[i].is_padding = True for byte in bytes: # A given byte cannot both be padding and have a fields packed into it. assert not (byte.is_padding and byte.packed_fields) return bytes
bsd-3-clause
ahmadio/edx-platform
cms/djangoapps/contentstore/features/component_settings_editor_helpers.py
61
9419
# disable missing docstring # pylint: disable=missing-docstring from lettuce import world from nose.tools import assert_equal, assert_in # pylint: disable=no-name-in-module from terrain.steps import reload_the_page from common import type_in_codemirror from selenium.webdriver.common.keys import Keys @world.absorb def create_component_instance(step, category, component_type=None, is_advanced=False, advanced_component=None): """ Create a new component in a Unit. Parameters ---------- category: component type (discussion, html, problem, video, advanced) component_type: for components with multiple templates, the link text in the menu is_advanced: for problems, is the desired component under the advanced menu? advanced_component: for advanced components, the related value of policy key 'advanced_modules' """ assert_in(category, ['advanced', 'problem', 'html', 'video', 'discussion']) component_button_css = 'span.large-{}-icon'.format(category.lower()) if category == 'problem': module_css = 'div.xmodule_CapaModule' elif category == 'advanced': module_css = 'div.xmodule_{}Module'.format(advanced_component.title()) else: module_css = 'div.xmodule_{}Module'.format(category.title()) # Count how many of that module is on the page. Later we will # assert that one more was added. # We need to use world.browser.find_by_css instead of world.css_find # because it's ok if there are currently zero of them. module_count_before = len(world.browser.find_by_css(module_css)) # Disable the jquery animation for the transition to the menus. world.disable_jquery_animations() world.css_click(component_button_css) if category in ('problem', 'html', 'advanced'): world.wait_for_invisible(component_button_css) click_component_from_menu(category, component_type, is_advanced) expected_count = module_count_before + 1 world.wait_for( lambda _: len(world.css_find(module_css)) == expected_count, timeout=20 ) @world.absorb def click_new_component_button(step, component_button_css): step.given('I have clicked the new unit button') world.css_click(component_button_css) def _click_advanced(): css = 'ul.problem-type-tabs a[href="#tab2"]' world.css_click(css) # Wait for the advanced tab items to be displayed tab2_css = 'div.ui-tabs-panel#tab2' world.wait_for_visible(tab2_css) def _find_matching_link(category, component_type): """ Find the link with the specified text. There should be one and only one. """ # The tab shows links for the given category links = world.css_find('div.new-component-{} a'.format(category)) # Find the link whose text matches what you're looking for matched_links = [link for link in links if link.text == component_type] # There should be one and only one assert_equal(len(matched_links), 1) return matched_links[0] def click_component_from_menu(category, component_type, is_advanced): """ Creates a component for a category with more than one template, i.e. HTML and Problem. For some problem types, it is necessary to click to the Advanced tab. The component_type is the link text, e.g. "Blank Common Problem" """ if is_advanced: # Sometimes this click does not work if you go too fast. world.retry_on_exception( _click_advanced, ignored_exceptions=AssertionError, ) # Retry this in case the list is empty because you tried too fast. link = world.retry_on_exception( lambda: _find_matching_link(category, component_type), ignored_exceptions=AssertionError ) # Wait for the link to be clickable. If you go too fast it is not. world.retry_on_exception(lambda: link.click()) @world.absorb def edit_component_and_select_settings(): world.edit_component() world.ensure_settings_visible() @world.absorb def ensure_settings_visible(): # Select the 'settings' tab if there is one (it isn't displayed if it is the only option) settings_button = world.browser.find_by_css('.settings-button') if len(settings_button) > 0: world.css_click('.settings-button') @world.absorb def edit_component(index=0): # Verify that the "loading" indication has been hidden. world.wait_for_loading() # Verify that the "edit" button is present. world.wait_for(lambda _driver: world.css_visible('a.edit-button')) world.css_click('a.edit-button', index) world.wait_for_ajax_complete() @world.absorb def select_editor_tab(tab_name): editor_tabs = world.browser.find_by_css('.editor-tabs a') expected_tab_text = tab_name.strip().upper() matching_tabs = [tab for tab in editor_tabs if tab.text.upper() == expected_tab_text] assert len(matching_tabs) == 1 tab = matching_tabs[0] tab.click() world.wait_for_ajax_complete() def enter_xml_in_advanced_problem(step, text): """ Edits an advanced problem (assumes only on page), types the provided XML, and saves the component. """ world.edit_component() type_in_codemirror(0, text) world.save_component() @world.absorb def verify_setting_entry(setting, display_name, value, explicitly_set): """ Verify the capa module fields are set as expected in the Advanced Settings editor. Parameters ---------- setting: the WebDriverElement object found in the browser display_name: the string expected as the label html: the expected field value explicitly_set: True if the value is expected to have been explicitly set for the problem, rather than derived from the defaults. This is verified by the existence of a "Clear" button next to the field value. """ assert_equal(display_name, setting.find_by_css('.setting-label')[0].html.strip()) # Check if the web object is a list type # If so, we use a slightly different mechanism for determining its value if setting.has_class('metadata-list-enum') or setting.has_class('metadata-dict') or setting.has_class('metadata-video-translations'): list_value = ', '.join(ele.value for ele in setting.find_by_css('.list-settings-item')) assert_equal(value, list_value) elif setting.has_class('metadata-videolist-enum'): list_value = ', '.join(ele.find_by_css('input')[0].value for ele in setting.find_by_css('.videolist-settings-item')) assert_equal(value, list_value) else: assert_equal(value, setting.find_by_css('.setting-input')[0].value) # VideoList doesn't have clear button if not setting.has_class('metadata-videolist-enum'): settingClearButton = setting.find_by_css('.setting-clear')[0] assert_equal(explicitly_set, settingClearButton.has_class('active')) assert_equal(not explicitly_set, settingClearButton.has_class('inactive')) @world.absorb def verify_all_setting_entries(expected_entries): settings = world.browser.find_by_css('.wrapper-comp-setting') assert_equal(len(expected_entries), len(settings)) for (counter, setting) in enumerate(settings): world.verify_setting_entry( setting, expected_entries[counter][0], expected_entries[counter][1], expected_entries[counter][2] ) @world.absorb def save_component(): world.css_click("a.action-save") world.wait_for_ajax_complete() @world.absorb def save_component_and_reopen(step): save_component() # We have a known issue that modifications are still shown within the edit window after cancel (though) # they are not persisted. Refresh the browser to make sure the changes WERE persisted after Save. reload_the_page(step) edit_component_and_select_settings() @world.absorb def cancel_component(step): world.css_click("a.action-cancel") # We have a known issue that modifications are still shown within the edit window after cancel (though) # they are not persisted. Refresh the browser to make sure the changes were not persisted. reload_the_page(step) @world.absorb def revert_setting_entry(label): get_setting_entry(label).find_by_css('.setting-clear')[0].click() @world.absorb def get_setting_entry(label): def get_setting(): settings = world.css_find('.wrapper-comp-setting') for setting in settings: if setting.find_by_css('.setting-label')[0].value == label: return setting return None return world.retry_on_exception(get_setting) @world.absorb def get_setting_entry_index(label): def get_index(): settings = world.css_find('.metadata_edit .wrapper-comp-setting') for index, setting in enumerate(settings): if setting.find_by_css('.setting-label')[0].value == label: return index return None return world.retry_on_exception(get_index) @world.absorb def set_field_value(index, value): """ Set the field to the specified value. Note: we cannot use css_fill here because the value is not set until after you move away from that field. Instead we will find the element, set its value, then hit the Tab key to get to the next field. """ elem = world.css_find('.metadata_edit div.wrapper-comp-setting input.setting-input')[index] elem.value = value elem.type(Keys.TAB)
agpl-3.0
sbellem/django
django/core/files/utils.py
395
1338
from django.utils import six class FileProxyMixin(object): """ A mixin class used to forward file methods to an underlaying file object. The internal file object has to be called "file":: class FileProxy(FileProxyMixin): def __init__(self, file): self.file = file """ encoding = property(lambda self: self.file.encoding) fileno = property(lambda self: self.file.fileno) flush = property(lambda self: self.file.flush) isatty = property(lambda self: self.file.isatty) newlines = property(lambda self: self.file.newlines) read = property(lambda self: self.file.read) readinto = property(lambda self: self.file.readinto) readline = property(lambda self: self.file.readline) readlines = property(lambda self: self.file.readlines) seek = property(lambda self: self.file.seek) softspace = property(lambda self: self.file.softspace) tell = property(lambda self: self.file.tell) truncate = property(lambda self: self.file.truncate) write = property(lambda self: self.file.write) writelines = property(lambda self: self.file.writelines) xreadlines = property(lambda self: self.file.xreadlines) if six.PY3: seekable = property(lambda self: self.file.seekable) def __iter__(self): return iter(self.file)
bsd-3-clause
carroarmato0/jenkins-job-builder
tests/builders/test_builders.py
37
1098
# Joint copyright: # - Copyright 2012,2013 Wikimedia Foundation # - Copyright 2012,2013 Antoine "hashar" Musso # - Copyright 2013 Arnaud Fabre # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import os from testtools import TestCase from testscenarios.testcase import TestWithScenarios from jenkins_jobs.modules import builders from tests.base import get_scenarios, BaseTestCase class TestCaseModuleBuilders(TestWithScenarios, TestCase, BaseTestCase): fixtures_path = os.path.join(os.path.dirname(__file__), 'fixtures') scenarios = get_scenarios(fixtures_path) klass = builders.Builders
apache-2.0
edxnercel/edx-platform
lms/envs/test.py
20
16666
# -*- coding: utf-8 -*- """ This config file runs the simplest dev environment using sqlite, and db-based sessions. Assumes structure: /envroot/ /db # This is where it'll write the database file /edx-platform # The location of this repo /log # Where we're going to write log files """ # We intentionally define lots of variables that aren't used, and # want to import all variables from base settings files # pylint: disable=wildcard-import, unused-wildcard-import # Pylint gets confused by path.py instances, which report themselves as class # objects. As a result, pylint applies the wrong regex in validating names, # and throws spurious errors. Therefore, we disable invalid-name checking. # pylint: disable=invalid-name from .common import * import os from path import path from uuid import uuid4 from warnings import filterwarnings, simplefilter from openedx.core.lib.tempdir import mkdtemp_clean # Silence noisy logs to make troubleshooting easier when tests fail. import logging LOG_OVERRIDES = [ ('factory.generate', logging.ERROR), ('factory.containers', logging.ERROR), ] for log_name, log_level in LOG_OVERRIDES: logging.getLogger(log_name).setLevel(log_level) # mongo connection settings MONGO_PORT_NUM = int(os.environ.get('EDXAPP_TEST_MONGO_PORT', '27017')) MONGO_HOST = os.environ.get('EDXAPP_TEST_MONGO_HOST', 'localhost') os.environ['DJANGO_LIVE_TEST_SERVER_ADDRESS'] = 'localhost:8000-9000' THIS_UUID = uuid4().hex[:5] # can't test start dates with this True, but on the other hand, # can test everything else :) FEATURES['DISABLE_START_DATES'] = True # Most tests don't use the discussion service, so we turn it off to speed them up. # Tests that do can enable this flag, but must use the UrlResetMixin class to force urls.py # to reload. For consistency in user-experience, keep the value of this setting in sync with # the one in cms/envs/test.py FEATURES['ENABLE_DISCUSSION_SERVICE'] = False FEATURES['ENABLE_SERVICE_STATUS'] = True FEATURES['ENABLE_HINTER_INSTRUCTOR_VIEW'] = True FEATURES['ENABLE_INSTRUCTOR_LEGACY_DASHBOARD'] = True FEATURES['ENABLE_SHOPPING_CART'] = True FEATURES['ENABLE_VERIFIED_CERTIFICATES'] = True # Enable this feature for course staff grade downloads, to enable acceptance tests FEATURES['ENABLE_S3_GRADE_DOWNLOADS'] = True FEATURES['ALLOW_COURSE_STAFF_GRADE_DOWNLOADS'] = True # Toggles embargo on for testing FEATURES['EMBARGO'] = True FEATURES['ENABLE_COMBINED_LOGIN_REGISTRATION'] = True # Need wiki for courseware views to work. TODO (vshnayder): shouldn't need it. WIKI_ENABLED = True # Enable a parental consent age limit for testing PARENTAL_CONSENT_AGE_LIMIT = 13 # Makes the tests run much faster... SOUTH_TESTS_MIGRATE = False # To disable migrations and use syncdb instead # Nose Test Runner TEST_RUNNER = 'django_nose.NoseTestSuiteRunner' _SYSTEM = 'lms' _REPORT_DIR = REPO_ROOT / 'reports' / _SYSTEM _REPORT_DIR.makedirs_p() _NOSEID_DIR = REPO_ROOT / '.testids' / _SYSTEM _NOSEID_DIR.makedirs_p() NOSE_ARGS = [ '--id-file', _NOSEID_DIR / 'noseids', '--xunit-file', _REPORT_DIR / 'nosetests.xml', ] # Local Directories TEST_ROOT = path("test_root") # Want static files in the same dir for running on jenkins. STATIC_ROOT = TEST_ROOT / "staticfiles" STATUS_MESSAGE_PATH = TEST_ROOT / "status_message.json" COURSES_ROOT = TEST_ROOT / "data" DATA_DIR = COURSES_ROOT COMMON_TEST_DATA_ROOT = COMMON_ROOT / "test" / "data" # Where the content data is checked out. This may not exist on jenkins. GITHUB_REPO_ROOT = ENV_ROOT / "data" USE_I18N = True LANGUAGE_CODE = 'en' # tests assume they will get English. XQUEUE_INTERFACE = { "url": "http://sandbox-xqueue.edx.org", "django_auth": { "username": "lms", "password": "***REMOVED***" }, "basic_auth": ('anant', 'agarwal'), } XQUEUE_WAITTIME_BETWEEN_REQUESTS = 5 # seconds # Don't rely on a real staff grading backend MOCK_STAFF_GRADING = True MOCK_PEER_GRADING = True # TODO (cpennington): We need to figure out how envs/test.py can inject things # into common.py so that we don't have to repeat this sort of thing STATICFILES_DIRS = [ COMMON_ROOT / "static", PROJECT_ROOT / "static", ] STATICFILES_DIRS += [ (course_dir, COMMON_TEST_DATA_ROOT / course_dir) for course_dir in os.listdir(COMMON_TEST_DATA_ROOT) if os.path.isdir(COMMON_TEST_DATA_ROOT / course_dir) ] # Avoid having to run collectstatic before the unit test suite # If we don't add these settings, then Django templates that can't # find pipelined assets will raise a ValueError. # http://stackoverflow.com/questions/12816941/unit-testing-with-django-pipeline STATICFILES_STORAGE = 'pipeline.storage.NonPackagingPipelineStorage' PIPELINE_ENABLED = False update_module_store_settings( MODULESTORE, module_store_options={ 'fs_root': TEST_ROOT / "data", }, xml_store_options={ 'data_dir': mkdtemp_clean(dir=TEST_ROOT), # never inadvertently load all the XML courses }, doc_store_settings={ 'host': MONGO_HOST, 'port': MONGO_PORT_NUM, 'db': 'test_xmodule', 'collection': 'test_modulestore{0}'.format(THIS_UUID), }, ) CONTENTSTORE = { 'ENGINE': 'xmodule.contentstore.mongo.MongoContentStore', 'DOC_STORE_CONFIG': { 'host': MONGO_HOST, 'db': 'xcontent', 'port': MONGO_PORT_NUM, } } DATABASES = { 'default': { 'ENGINE': 'django.db.backends.sqlite3', 'NAME': TEST_ROOT / 'db' / 'edx.db' }, } CACHES = { # This is the cache used for most things. # In staging/prod envs, the sessions also live here. 'default': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'edx_loc_mem_cache', 'KEY_FUNCTION': 'util.memcache.safe_key', }, # The general cache is what you get if you use our util.cache. It's used for # things like caching the course.xml file for different A/B test groups. # We set it to be a DummyCache to force reloading of course.xml in dev. # In staging environments, we would grab VERSION from data uploaded by the # push process. 'general': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', 'KEY_PREFIX': 'general', 'VERSION': 4, 'KEY_FUNCTION': 'util.memcache.safe_key', }, 'mongo_metadata_inheritance': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': os.path.join(tempfile.gettempdir(), 'mongo_metadata_inheritance'), 'TIMEOUT': 300, 'KEY_FUNCTION': 'util.memcache.safe_key', }, 'loc_cache': { 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', 'LOCATION': 'edx_location_mem_cache', }, 'course_structure_cache': { 'BACKEND': 'django.core.cache.backends.dummy.DummyCache', }, } # Dummy secret key for dev SECRET_KEY = '85920908f28904ed733fe576320db18cabd7b6cd' # hide ratelimit warnings while running tests filterwarnings('ignore', message='No request passed to the backend, unable to rate-limit') # Ignore deprecation warnings (so we don't clutter Jenkins builds/production) # https://docs.python.org/2/library/warnings.html#the-warnings-filter # Change to "default" to see the first instance of each hit # or "error" to convert all into errors simplefilter('ignore') ############################# SECURITY SETTINGS ################################ # Default to advanced security in common.py, so tests can reset here to use # a simpler security model FEATURES['ENFORCE_PASSWORD_POLICY'] = False FEATURES['ENABLE_MAX_FAILED_LOGIN_ATTEMPTS'] = False FEATURES['SQUELCH_PII_IN_LOGS'] = False FEATURES['PREVENT_CONCURRENT_LOGINS'] = False FEATURES['ADVANCED_SECURITY'] = False PASSWORD_MIN_LENGTH = None PASSWORD_COMPLEXITY = {} ######### Third-party auth ########## FEATURES['ENABLE_THIRD_PARTY_AUTH'] = True AUTHENTICATION_BACKENDS = ( 'social.backends.google.GoogleOAuth2', 'social.backends.linkedin.LinkedinOAuth2', 'social.backends.facebook.FacebookOAuth2', 'third_party_auth.dummy.DummyBackend', 'third_party_auth.saml.SAMLAuthBackend', ) + AUTHENTICATION_BACKENDS ################################## OPENID ##################################### FEATURES['AUTH_USE_OPENID'] = True FEATURES['AUTH_USE_OPENID_PROVIDER'] = True ################################## SHIB ####################################### FEATURES['AUTH_USE_SHIB'] = True FEATURES['SHIB_DISABLE_TOS'] = True FEATURES['RESTRICT_ENROLL_BY_REG_METHOD'] = True OPENID_CREATE_USERS = False OPENID_UPDATE_DETAILS_FROM_SREG = True OPENID_USE_AS_ADMIN_LOGIN = False OPENID_PROVIDER_TRUSTED_ROOTS = ['*'] ############################## OAUTH2 Provider ################################ FEATURES['ENABLE_OAUTH2_PROVIDER'] = True ########################### External REST APIs ################################# FEATURES['ENABLE_MOBILE_REST_API'] = True FEATURES['ENABLE_MOBILE_SOCIAL_FACEBOOK_FEATURES'] = True FEATURES['ENABLE_VIDEO_ABSTRACTION_LAYER_API'] = True FEATURES['ENABLE_COURSE_BLOCKS_NAVIGATION_API'] = True FEATURES['ENABLE_RENDER_XBLOCK_API'] = True ###################### Payment ##############################3 # Enable fake payment processing page FEATURES['ENABLE_PAYMENT_FAKE'] = True # Configure the payment processor to use the fake processing page # Since both the fake payment page and the shoppingcart app are using # the same settings, we can generate this randomly and guarantee # that they are using the same secret. from random import choice from string import letters, digits, punctuation # pylint: disable=deprecated-module RANDOM_SHARED_SECRET = ''.join( choice(letters + digits + punctuation) for x in range(250) ) CC_PROCESSOR_NAME = 'CyberSource2' CC_PROCESSOR['CyberSource2']['SECRET_KEY'] = RANDOM_SHARED_SECRET CC_PROCESSOR['CyberSource2']['ACCESS_KEY'] = "0123456789012345678901" CC_PROCESSOR['CyberSource2']['PROFILE_ID'] = "edx" CC_PROCESSOR['CyberSource2']['PURCHASE_ENDPOINT'] = "/shoppingcart/payment_fake" FEATURES['STORE_BILLING_INFO'] = True ########################### SYSADMIN DASHBOARD ################################ FEATURES['ENABLE_SYSADMIN_DASHBOARD'] = True GIT_REPO_DIR = TEST_ROOT / "course_repos" ################################# CELERY ###################################### CELERY_ALWAYS_EAGER = True CELERY_RESULT_BACKEND = 'djcelery.backends.cache:CacheBackend' ######################### MARKETING SITE ############################### MKTG_URL_LINK_MAP = { 'ABOUT': 'about', 'CONTACT': 'contact', 'FAQ': 'help', 'COURSES': 'courses', 'ROOT': 'root', 'TOS': 'tos', 'HONOR': 'honor', 'PRIVACY': 'privacy', 'JOBS': 'jobs', 'NEWS': 'news', 'PRESS': 'press', 'BLOG': 'blog', 'DONATE': 'donate', # Verified Certificates 'WHAT_IS_VERIFIED_CERT': 'verified-certificate', } ############################ STATIC FILES ############################# DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage' MEDIA_ROOT = TEST_ROOT / "uploads" MEDIA_URL = "/static/uploads/" STATICFILES_DIRS.append(("uploads", MEDIA_ROOT)) _NEW_STATICFILES_DIRS = [] # Strip out any static files that aren't in the repository root # so that the tests can run with only the edx-platform directory checked out for static_dir in STATICFILES_DIRS: # Handle both tuples and non-tuple directory definitions try: _, data_dir = static_dir except ValueError: data_dir = static_dir if data_dir.startswith(REPO_ROOT): _NEW_STATICFILES_DIRS.append(static_dir) STATICFILES_DIRS = _NEW_STATICFILES_DIRS FILE_UPLOAD_TEMP_DIR = TEST_ROOT / "uploads" FILE_UPLOAD_HANDLERS = ( 'django.core.files.uploadhandler.MemoryFileUploadHandler', 'django.core.files.uploadhandler.TemporaryFileUploadHandler', ) ########################### Server Ports ################################### # These ports are carefully chosen so that if the browser needs to # access them, they will be available through the SauceLabs SSH tunnel LETTUCE_SERVER_PORT = 8003 XQUEUE_PORT = 8040 YOUTUBE_PORT = 8031 LTI_PORT = 8765 VIDEO_SOURCE_PORT = 8777 ################### Make tests faster #http://slacy.com/blog/2012/04/make-your-tests-faster-in-django-1-4/ PASSWORD_HASHERS = ( # 'django.contrib.auth.hashers.PBKDF2PasswordHasher', # 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', # 'django.contrib.auth.hashers.BCryptPasswordHasher', 'django.contrib.auth.hashers.SHA1PasswordHasher', 'django.contrib.auth.hashers.MD5PasswordHasher', # 'django.contrib.auth.hashers.CryptPasswordHasher', ) ### This enables the Metrics tab for the Instructor dashboard ########### FEATURES['CLASS_DASHBOARD'] = True ################### Make tests quieter # OpenID spews messages like this to stderr, we don't need to see them: # Generated checkid_setup request to http://testserver/openid/provider/login/ with assocication {HMAC-SHA1}{51d49995}{s/kRmA==} import openid.oidutil openid.oidutil.log = lambda message, level=0: None PLATFORM_NAME = "edX" SITE_NAME = "edx.org" # set up some testing for microsites MICROSITE_CONFIGURATION = { "test_microsite": { "domain_prefix": "testmicrosite", "university": "test_microsite", "platform_name": "Test Microsite", "logo_image_url": "test_microsite/images/header-logo.png", "email_from_address": "test_microsite@edx.org", "payment_support_email": "test_microsite@edx.org", "ENABLE_MKTG_SITE": False, "SITE_NAME": "test_microsite.localhost", "course_org_filter": "TestMicrositeX", "course_about_show_social_links": False, "css_overrides_file": "test_microsite/css/test_microsite.css", "show_partners": False, "show_homepage_promo_video": False, "course_index_overlay_text": "This is a Test Microsite Overlay Text.", "course_index_overlay_logo_file": "test_microsite/images/header-logo.png", "homepage_overlay_html": "<h1>This is a Test Microsite Overlay HTML</h1>", "ALWAYS_REDIRECT_HOMEPAGE_TO_DASHBOARD_FOR_AUTHENTICATED_USER": False, "COURSE_CATALOG_VISIBILITY_PERMISSION": "see_in_catalog", "COURSE_ABOUT_VISIBILITY_PERMISSION": "see_about_page", "ENABLE_SHOPPING_CART": True, "ENABLE_PAID_COURSE_REGISTRATION": True, "SESSION_COOKIE_DOMAIN": "test_microsite.localhost", }, "default": { "university": "default_university", "domain_prefix": "www", } } MICROSITE_ROOT_DIR = COMMON_ROOT / 'test' / 'test_microsites' MICROSITE_TEST_HOSTNAME = 'testmicrosite.testserver' FEATURES['USE_MICROSITES'] = True # add extra template directory for test-only templates MAKO_TEMPLATES['main'].extend([ COMMON_ROOT / 'test' / 'templates' ]) # Setting for the testing of Software Secure Result Callback VERIFY_STUDENT["SOFTWARE_SECURE"] = { "API_ACCESS_KEY": "BBBBBBBBBBBBBBBBBBBB", "API_SECRET_KEY": "CCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCCC", } VIDEO_CDN_URL = { 'CN': 'http://api.xuetangx.com/edx/video?s3_url=' } ######### dashboard git log settings ######### MONGODB_LOG = { 'host': MONGO_HOST, 'port': MONGO_PORT_NUM, 'user': '', 'password': '', 'db': 'xlog', } # Enable EdxNotes for tests. FEATURES['ENABLE_EDXNOTES'] = True # Enable teams feature for tests. FEATURES['ENABLE_TEAMS'] = True # Add milestones to Installed apps for testing INSTALLED_APPS += ('milestones', 'openedx.core.djangoapps.call_stack_manager') # Enable courseware search for tests FEATURES['ENABLE_COURSEWARE_SEARCH'] = True # Enable dashboard search for tests FEATURES['ENABLE_DASHBOARD_SEARCH'] = True # Use MockSearchEngine as the search engine for test scenario SEARCH_ENGINE = "search.tests.mock_search_engine.MockSearchEngine" FACEBOOK_APP_SECRET = "Test" FACEBOOK_APP_ID = "Test" FACEBOOK_API_VERSION = "v2.2" ######### custom courses ######### INSTALLED_APPS += ('ccx',) FEATURES['CUSTOM_COURSES_EDX'] = True # Set dummy values for profile image settings. PROFILE_IMAGE_BACKEND = { 'class': 'storages.backends.overwrite.OverwriteStorage', 'options': { 'location': MEDIA_ROOT, 'base_url': 'http://example-storage.com/profile-images/', }, } PROFILE_IMAGE_DEFAULT_FILENAME = 'default' PROFILE_IMAGE_DEFAULT_FILE_EXTENSION = 'png' PROFILE_IMAGE_SECRET_KEY = 'secret' PROFILE_IMAGE_MAX_BYTES = 1024 * 1024 PROFILE_IMAGE_MIN_BYTES = 100 # Enable the LTI provider feature for testing FEATURES['ENABLE_LTI_PROVIDER'] = True INSTALLED_APPS += ('lti_provider',) AUTHENTICATION_BACKENDS += ('lti_provider.users.LtiBackend',)
agpl-3.0
genenetwork/genenetwork2_diet
wqflask/flask_security/changeable.py
4
1267
# -*- coding: utf-8 -*- """ flask.ext.security.changeable ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Flask-Security recoverable module :copyright: (c) 2012 by Matt Wright. :author: Eskil Heyn Olsen :license: MIT, see LICENSE for more details. """ from flask import current_app as app, request from werkzeug.local import LocalProxy from .signals import password_changed from .utils import send_mail, encrypt_password, url_for_security, \ config_value # Convenient references _security = LocalProxy(lambda: app.extensions['security']) _datastore = LocalProxy(lambda: _security.datastore) def send_password_changed_notice(user): """Sends the password changed notice email for the specified user. :param user: The user to send the notice to """ send_mail(config_value('EMAIL_SUBJECT_PASSWORD_CHANGE_NOTICE'), user.email, 'change_notice', user=user) def change_user_password(user, password): """Change the specified user's password :param user: The user to change_password :param password: The unencrypted new password """ user.password = encrypt_password(password) _datastore.put(user) send_password_changed_notice(user) password_changed.send(user, app=app._get_current_object())
agpl-3.0
blacktear23/django
django/contrib/auth/admin.py
153
6848
from django.db import transaction from django.conf import settings from django.contrib import admin from django.contrib.auth.forms import UserCreationForm, UserChangeForm, AdminPasswordChangeForm from django.contrib.auth.models import User, Group from django.contrib import messages from django.core.exceptions import PermissionDenied from django.http import HttpResponseRedirect, Http404 from django.shortcuts import render_to_response, get_object_or_404 from django.template import RequestContext from django.utils.html import escape from django.utils.decorators import method_decorator from django.utils.translation import ugettext, ugettext_lazy as _ from django.views.decorators.csrf import csrf_protect csrf_protect_m = method_decorator(csrf_protect) class GroupAdmin(admin.ModelAdmin): search_fields = ('name',) ordering = ('name',) filter_horizontal = ('permissions',) class UserAdmin(admin.ModelAdmin): add_form_template = 'admin/auth/user/add_form.html' change_user_password_template = None fieldsets = ( (None, {'fields': ('username', 'password')}), (_('Personal info'), {'fields': ('first_name', 'last_name', 'email')}), (_('Permissions'), {'fields': ('is_active', 'is_staff', 'is_superuser', 'user_permissions')}), (_('Important dates'), {'fields': ('last_login', 'date_joined')}), (_('Groups'), {'fields': ('groups',)}), ) add_fieldsets = ( (None, { 'classes': ('wide',), 'fields': ('username', 'password1', 'password2')} ), ) form = UserChangeForm add_form = UserCreationForm change_password_form = AdminPasswordChangeForm list_display = ('username', 'email', 'first_name', 'last_name', 'is_staff') list_filter = ('is_staff', 'is_superuser', 'is_active') search_fields = ('username', 'first_name', 'last_name', 'email') ordering = ('username',) filter_horizontal = ('user_permissions',) def __call__(self, request, url): # this should not be here, but must be due to the way __call__ routes # in ModelAdmin. if url is None: return self.changelist_view(request) if url.endswith('password'): return self.user_change_password(request, url.split('/')[0]) return super(UserAdmin, self).__call__(request, url) def get_fieldsets(self, request, obj=None): if not obj: return self.add_fieldsets return super(UserAdmin, self).get_fieldsets(request, obj) def get_form(self, request, obj=None, **kwargs): """ Use special form during user creation """ defaults = {} if obj is None: defaults.update({ 'form': self.add_form, 'fields': admin.util.flatten_fieldsets(self.add_fieldsets), }) defaults.update(kwargs) return super(UserAdmin, self).get_form(request, obj, **defaults) def get_urls(self): from django.conf.urls.defaults import patterns return patterns('', (r'^(\d+)/password/$', self.admin_site.admin_view(self.user_change_password)) ) + super(UserAdmin, self).get_urls() @csrf_protect_m @transaction.commit_on_success def add_view(self, request, form_url='', extra_context=None): # It's an error for a user to have add permission but NOT change # permission for users. If we allowed such users to add users, they # could create superusers, which would mean they would essentially have # the permission to change users. To avoid the problem entirely, we # disallow users from adding users if they don't have change # permission. if not self.has_change_permission(request): if self.has_add_permission(request) and settings.DEBUG: # Raise Http404 in debug mode so that the user gets a helpful # error message. raise Http404('Your user does not have the "Change user" permission. In order to add users, Django requires that your user account have both the "Add user" and "Change user" permissions set.') raise PermissionDenied if extra_context is None: extra_context = {} defaults = { 'auto_populated_fields': (), 'username_help_text': self.model._meta.get_field('username').help_text, } extra_context.update(defaults) return super(UserAdmin, self).add_view(request, form_url, extra_context) def user_change_password(self, request, id): if not self.has_change_permission(request): raise PermissionDenied user = get_object_or_404(self.model, pk=id) if request.method == 'POST': form = self.change_password_form(user, request.POST) if form.is_valid(): new_user = form.save() msg = ugettext('Password changed successfully.') messages.success(request, msg) return HttpResponseRedirect('..') else: form = self.change_password_form(user) fieldsets = [(None, {'fields': form.base_fields.keys()})] adminForm = admin.helpers.AdminForm(form, fieldsets, {}) return render_to_response(self.change_user_password_template or 'admin/auth/user/change_password.html', { 'title': _('Change password: %s') % escape(user.username), 'adminForm': adminForm, 'form': form, 'is_popup': '_popup' in request.REQUEST, 'add': True, 'change': False, 'has_delete_permission': False, 'has_change_permission': True, 'has_absolute_url': False, 'opts': self.model._meta, 'original': user, 'save_as': False, 'show_save': True, 'root_path': self.admin_site.root_path, }, context_instance=RequestContext(request)) def response_add(self, request, obj, post_url_continue='../%s/'): """ Determines the HttpResponse for the add_view stage. It mostly defers to its superclass implementation but is customized because the User model has a slightly different workflow. """ # We should allow further modification of the user just added i.e. the # 'Save' button should behave like the 'Save and continue editing' # button except in two scenarios: # * The user has pressed the 'Save and add another' button # * We are adding a user in a popup if '_addanother' not in request.POST and '_popup' not in request.POST: request.POST['_continue'] = 1 return super(UserAdmin, self).response_add(request, obj, post_url_continue) admin.site.register(Group, GroupAdmin) admin.site.register(User, UserAdmin)
bsd-3-clause
Beyond-Imagination/BlubBlub
RaspberryPI/django-env/lib/python3.4/site-packages/serial/urlhandler/protocol_alt.py
26
1993
#! python # # This module implements a special URL handler that allows selecting an # alternate implementation provided by some backends. # # This file is part of pySerial. https://github.com/pyserial/pyserial # (C) 2015 Chris Liechti <cliechti@gmx.net> # # SPDX-License-Identifier: BSD-3-Clause # # URL format: alt://port[?option[=value][&option[=value]]] # options: # - class=X used class named X instead of Serial # # example: # use poll based implementation on Posix (Linux): # python -m serial.tools.miniterm alt:///dev/ttyUSB0?class=PosixPollSerial try: import urlparse except ImportError: import urllib.parse as urlparse import serial def serial_class_for_url(url): """extract host and port from an URL string""" parts = urlparse.urlsplit(url) if parts.scheme != 'alt': raise serial.SerialException( 'expected a string in the form "alt://port[?option[=value][&option[=value]]]": ' 'not starting with alt:// ({!r})'.format(parts.scheme)) class_name = 'Serial' try: for option, values in urlparse.parse_qs(parts.query, True).items(): if option == 'class': class_name = values[0] else: raise ValueError('unknown option: {!r}'.format(option)) except ValueError as e: raise serial.SerialException( 'expected a string in the form ' '"alt://port[?option[=value][&option[=value]]]": {!r}'.format(e)) if not hasattr(serial, class_name): raise ValueError('unknown class: {!r}'.format(class_name)) cls = getattr(serial, class_name) if not issubclass(cls, serial.Serial): raise ValueError('class {!r} is not an instance of Serial'.format(class_name)) return (''.join([parts.netloc, parts.path]), cls) # - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - if __name__ == '__main__': s = serial.serial_for_url('alt:///dev/ttyS0?class=PosixPollSerial') print(s)
gpl-3.0
jxta/cc
vendor/Twisted-10.0.0/doc/core/examples/pbgtk2.py
3
3922
# Copyright (c) 2001-2009 Twisted Matrix Laboratories. # See LICENSE for details. from __future__ import nested_scopes from twisted.internet import gtk2reactor gtk2reactor.install() import gtk from gtk import glade from twisted import copyright from twisted.internet import reactor, defer from twisted.python import failure, log, util from twisted.spread import pb from twisted.cred.credentials import UsernamePassword from twisted.internet import error as netError class LoginDialog: def __init__(self, deferred): self.deferredResult = deferred gladefile = util.sibpath(__file__, "pbgtk2login.glade") self.glade = glade.XML(gladefile) self.glade.signal_autoconnect(self) self.setWidgetsFromGladefile() self._loginDialog.show() def setWidgetsFromGladefile(self): widgets = ("hostEntry", "portEntry", "userNameEntry", "passwordEntry", "statusBar", "loginDialog") gw = self.glade.get_widget for widgetName in widgets: setattr(self, "_" + widgetName, gw(widgetName)) self._statusContext = self._statusBar.get_context_id("Login dialog.") def on_loginDialog_response(self, widget, response): handlers = {gtk.RESPONSE_NONE: self.windowClosed, gtk.RESPONSE_DELETE_EVENT: self.windowClosed, gtk.RESPONSE_OK: self.doLogin, gtk.RESPONSE_CANCEL: self.cancelled} handlers.get(response)() def on_loginDialog_close(self, widget, userdata=None): self.windowClosed() def cancelled(self): if not self.deferredResult.called: self.deferredResult.errback() self._loginDialog.destroy() def windowClosed(self, reason=None): if not self.deferredResult.called: self.deferredResult.errback() def doLogin(self): host = self._hostEntry.get_text() port = int(self._portEntry.get_text()) userName = self._userNameEntry.get_text() password = self._passwordEntry.get_text() client_factory = pb.PBClientFactory() reactor.connectTCP(host, port, client_factory) creds = UsernamePassword(userName, password) client_factory.login(creds).addCallbacks(self._cbGotPerspective, self._ebFailedLogin) self.statusMsg("Contacting server...") def _cbGotPerspective(self, perspective): self.statusMsg("Connected to server.") self.deferredResult.callback(perspective) self._loginDialog.destroy() def _ebFailedLogin(self, reason): if isinstance(reason, failure.Failure): text = str(reason.value) else: text = str(reason) self.statusMsg(text) msg = gtk.MessageDialog(self._loginDialog, gtk.DIALOG_DESTROY_WITH_PARENT, gtk.MESSAGE_ERROR, gtk.BUTTONS_CLOSE, text) msg.show_all() msg.connect("response", lambda *a: msg.destroy()) def statusMsg(self, text): self._statusBar.push(self._statusContext, text) class EchoClient: def __init__(self, echoer): self.echoer = echoer w = gtk.Window(gtk.WINDOW_TOPLEVEL) vb = gtk.VBox(); b = gtk.Button("Echo:") self.entry = gtk.Entry(); self.outry = gtk.Entry() w.add(vb) map(vb.add, [b, self.entry, self.outry]) b.connect('clicked', self.clicked) w.connect('destroy', self.stop) w.show_all() def clicked(self, b): txt = self.entry.get_text() self.entry.set_text("") self.echoer.callRemote('echo',txt).addCallback(self.outry.set_text) def stop(self, b): reactor.stop() d = defer.Deferred() LoginDialog(d) d.addCallbacks(EchoClient, lambda _: reactor.stop()) reactor.run()
apache-2.0
Linaro/test-definitions
automated/linux/fuego-multinode/parser.py
1
1725
#!/usr/bin/env python import argparse import json import os import subprocess import sys parser = argparse.ArgumentParser() parser.add_argument( "-s", "--source", dest="source", required=True, help="path to fuego test result file run.json.", ) parser.add_argument( "-d", "--dest", dest="dest", required=True, help="Path to plain test result file result.txt.", ) args = parser.parse_args() with open(args.source) as f: data = json.load(f) if "test_sets" not in data.keys(): print("test_sets NOT found in {}".format(run_json)) sys.exit(1) result_lines = [] for test_set in data["test_sets"]: result_lines.append("lava-test-set start {}".format(test_set["name"])) for test_case in test_set["test_cases"]: # Functional result_line = "{} {}".format(test_case["name"], test_case["status"].lower()) result_lines.append(result_line) # Benchmark if test_case.get("measurements"): for measurement in test_case["measurements"]: # Use test_case_name plus measurement name as test_case_id so # that it is readable and unique. result_line = "{}_{} {} {} {}".format( test_case["name"], measurement["name"], measurement["status"].lower(), measurement["measure"], measurement.get("unit", ""), ) result_lines.append(result_line) result_lines.append("lava-test-set stop {}".format(test_set["name"])) with open(args.dest, "w") as f: for result_line in result_lines: print(result_line) f.write("{}\n".format(result_line))
gpl-2.0
watonyweng/neutron
neutron/tests/tools.py
12
3965
# Copyright (c) 2013 NEC Corporation # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. import warnings import fixtures import six from neutron.api.v2 import attributes class AttributeMapMemento(fixtures.Fixture): """Create a copy of the resource attribute map so it can be restored during test cleanup. There are a few reasons why this is not included in a class derived from BaseTestCase: - Test cases may need more control about when the backup is made, especially if they are not direct descendants of BaseTestCase. - Inheritance is a bit of overkill for this facility and it's a stretch to rationalize the "is a" criteria. """ def _setUp(self): # Shallow copy is not a proper choice for keeping a backup copy as # the RESOURCE_ATTRIBUTE_MAP map is modified in place through the # 0th level keys. Ideally deepcopy() would be used but this seems # to result in test failures. A compromise is to copy one level # deeper than a shallow copy. self.contents_backup = {} for res, attrs in six.iteritems(attributes.RESOURCE_ATTRIBUTE_MAP): self.contents_backup[res] = attrs.copy() self.addCleanup(self.restore) def restore(self): attributes.RESOURCE_ATTRIBUTE_MAP = self.contents_backup class WarningsFixture(fixtures.Fixture): """Filters out warnings during test runs.""" warning_types = ( DeprecationWarning, PendingDeprecationWarning, ImportWarning ) def _setUp(self): self.addCleanup(warnings.resetwarnings) for wtype in self.warning_types: warnings.filterwarnings( "always", category=wtype, module='^neutron\\.') """setup_mock_calls and verify_mock_calls are convenient methods to setup a sequence of mock calls. expected_calls_and_values is a list of (expected_call, return_value): expected_calls_and_values = [ (mock.call(["ovs-vsctl", self.TO, '--', "--may-exist", "add-port", self.BR_NAME, pname]), None), (mock.call(["ovs-vsctl", self.TO, "set", "Interface", pname, "type=gre"]), None), .... ] * expected_call should be mock.call(expected_arg, ....) * return_value is passed to side_effect of a mocked call. A return value or an exception can be specified. """ import unittest def setup_mock_calls(mocked_call, expected_calls_and_values): return_values = [call[1] for call in expected_calls_and_values] mocked_call.side_effect = return_values def verify_mock_calls(mocked_call, expected_calls_and_values, any_order=False): expected_calls = [call[0] for call in expected_calls_and_values] mocked_call.assert_has_calls(expected_calls, any_order=any_order) def fail(msg=None): """Fail immediately, with the given message. This method is equivalent to TestCase.fail without requiring a testcase instance (usefully for reducing coupling). """ raise unittest.TestCase.failureException(msg) class UnorderedList(list): """A list that is equals to any permutation of itself.""" def __eq__(self, other): if not isinstance(other, list): return False return sorted(self) == sorted(other) def __neq__(self, other): return not self == other
apache-2.0
tayfun/django
django/db/models/functions.py
194
6908
""" Classes that represent database functions. """ from django.db.models import DateTimeField, IntegerField from django.db.models.expressions import Func, Value class Coalesce(Func): """ Chooses, from left to right, the first non-null expression and returns it. """ function = 'COALESCE' def __init__(self, *expressions, **extra): if len(expressions) < 2: raise ValueError('Coalesce must take at least two expressions') super(Coalesce, self).__init__(*expressions, **extra) def as_oracle(self, compiler, connection): # we can't mix TextField (NCLOB) and CharField (NVARCHAR), so convert # all fields to NCLOB when we expect NCLOB if self.output_field.get_internal_type() == 'TextField': class ToNCLOB(Func): function = 'TO_NCLOB' expressions = [ ToNCLOB(expression) for expression in self.get_source_expressions()] self.set_source_expressions(expressions) return super(Coalesce, self).as_sql(compiler, connection) class ConcatPair(Func): """ A helper class that concatenates two arguments together. This is used by `Concat` because not all backend databases support more than two arguments. """ function = 'CONCAT' def __init__(self, left, right, **extra): super(ConcatPair, self).__init__(left, right, **extra) def as_sqlite(self, compiler, connection): self.arg_joiner = ' || ' self.template = '%(expressions)s' self.coalesce() return super(ConcatPair, self).as_sql(compiler, connection) def as_mysql(self, compiler, connection): # Use CONCAT_WS with an empty separator so that NULLs are ignored. self.function = 'CONCAT_WS' self.template = "%(function)s('', %(expressions)s)" return super(ConcatPair, self).as_sql(compiler, connection) def coalesce(self): # null on either side results in null for expression, wrap with coalesce expressions = [ Coalesce(expression, Value('')) for expression in self.get_source_expressions()] self.set_source_expressions(expressions) class Concat(Func): """ Concatenates text fields together. Backends that result in an entire null expression when any arguments are null will wrap each argument in coalesce functions to ensure we always get a non-null result. """ function = None template = "%(expressions)s" def __init__(self, *expressions, **extra): if len(expressions) < 2: raise ValueError('Concat must take at least two expressions') paired = self._paired(expressions) super(Concat, self).__init__(paired, **extra) def _paired(self, expressions): # wrap pairs of expressions in successive concat functions # exp = [a, b, c, d] # -> ConcatPair(a, ConcatPair(b, ConcatPair(c, d)))) if len(expressions) == 2: return ConcatPair(*expressions) return ConcatPair(expressions[0], self._paired(expressions[1:])) class Greatest(Func): """ Chooses the maximum expression and returns it. If any expression is null the return value is database-specific: On Postgres, the maximum not-null expression is returned. On MySQL, Oracle, and SQLite, if any expression is null, null is returned. """ function = 'GREATEST' def __init__(self, *expressions, **extra): if len(expressions) < 2: raise ValueError('Greatest must take at least two expressions') super(Greatest, self).__init__(*expressions, **extra) def as_sqlite(self, compiler, connection): """Use the MAX function on SQLite.""" return super(Greatest, self).as_sql(compiler, connection, function='MAX') class Least(Func): """ Chooses the minimum expression and returns it. If any expression is null the return value is database-specific: On Postgres, the minimum not-null expression is returned. On MySQL, Oracle, and SQLite, if any expression is null, null is returned. """ function = 'LEAST' def __init__(self, *expressions, **extra): if len(expressions) < 2: raise ValueError('Least must take at least two expressions') super(Least, self).__init__(*expressions, **extra) def as_sqlite(self, compiler, connection): """Use the MIN function on SQLite.""" return super(Least, self).as_sql(compiler, connection, function='MIN') class Length(Func): """Returns the number of characters in the expression""" function = 'LENGTH' def __init__(self, expression, **extra): output_field = extra.pop('output_field', IntegerField()) super(Length, self).__init__(expression, output_field=output_field, **extra) def as_mysql(self, compiler, connection): self.function = 'CHAR_LENGTH' return super(Length, self).as_sql(compiler, connection) class Lower(Func): function = 'LOWER' def __init__(self, expression, **extra): super(Lower, self).__init__(expression, **extra) class Now(Func): template = 'CURRENT_TIMESTAMP' def __init__(self, output_field=None, **extra): if output_field is None: output_field = DateTimeField() super(Now, self).__init__(output_field=output_field, **extra) def as_postgresql(self, compiler, connection): # Postgres' CURRENT_TIMESTAMP means "the time at the start of the # transaction". We use STATEMENT_TIMESTAMP to be cross-compatible with # other databases. self.template = 'STATEMENT_TIMESTAMP()' return self.as_sql(compiler, connection) class Substr(Func): function = 'SUBSTRING' def __init__(self, expression, pos, length=None, **extra): """ expression: the name of a field, or an expression returning a string pos: an integer > 0, or an expression returning an integer length: an optional number of characters to return """ if not hasattr(pos, 'resolve_expression'): if pos < 1: raise ValueError("'pos' must be greater than 0") pos = Value(pos) expressions = [expression, pos] if length is not None: if not hasattr(length, 'resolve_expression'): length = Value(length) expressions.append(length) super(Substr, self).__init__(*expressions, **extra) def as_sqlite(self, compiler, connection): self.function = 'SUBSTR' return super(Substr, self).as_sql(compiler, connection) def as_oracle(self, compiler, connection): self.function = 'SUBSTR' return super(Substr, self).as_sql(compiler, connection) class Upper(Func): function = 'UPPER' def __init__(self, expression, **extra): super(Upper, self).__init__(expression, **extra)
bsd-3-clause
NeCTAR-RC/python-neutronclient
neutronclient/shell.py
3
44766
# Copyright 2012 OpenStack Foundation. # All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # """ Command-line interface to the Neutron APIs """ from __future__ import print_function import argparse import getpass import inspect import itertools import logging import os import sys from keystoneclient.auth.identity import v2 as v2_auth from keystoneclient.auth.identity import v3 as v3_auth from keystoneclient import discover from keystoneclient.openstack.common.apiclient import exceptions as ks_exc from keystoneclient import session from oslo_utils import encodeutils import six.moves.urllib.parse as urlparse from cliff import app from cliff import commandmanager from neutronclient.common import clientmanager from neutronclient.common import command as openstack_command from neutronclient.common import exceptions as exc from neutronclient.common import extension as client_extension from neutronclient.common import utils from neutronclient.i18n import _ from neutronclient.neutron.v2_0 import agent from neutronclient.neutron.v2_0 import agentscheduler from neutronclient.neutron.v2_0 import credential from neutronclient.neutron.v2_0 import extension from neutronclient.neutron.v2_0 import floatingip from neutronclient.neutron.v2_0.fw import firewall from neutronclient.neutron.v2_0.fw import firewallpolicy from neutronclient.neutron.v2_0.fw import firewallrule from neutronclient.neutron.v2_0.lb import healthmonitor as lb_healthmonitor from neutronclient.neutron.v2_0.lb import member as lb_member from neutronclient.neutron.v2_0.lb import pool as lb_pool from neutronclient.neutron.v2_0.lb.v2 import healthmonitor as lbaas_healthmon from neutronclient.neutron.v2_0.lb.v2 import listener as lbaas_listener from neutronclient.neutron.v2_0.lb.v2 import loadbalancer as lbaas_loadbalancer from neutronclient.neutron.v2_0.lb.v2 import member as lbaas_member from neutronclient.neutron.v2_0.lb.v2 import pool as lbaas_pool from neutronclient.neutron.v2_0.lb import vip as lb_vip from neutronclient.neutron.v2_0 import metering from neutronclient.neutron.v2_0.nec import packetfilter from neutronclient.neutron.v2_0 import netpartition from neutronclient.neutron.v2_0 import network from neutronclient.neutron.v2_0 import networkprofile from neutronclient.neutron.v2_0.nsx import networkgateway from neutronclient.neutron.v2_0.nsx import qos_queue from neutronclient.neutron.v2_0 import policyprofile from neutronclient.neutron.v2_0 import port from neutronclient.neutron.v2_0 import quota from neutronclient.neutron.v2_0 import router from neutronclient.neutron.v2_0 import securitygroup from neutronclient.neutron.v2_0 import servicetype from neutronclient.neutron.v2_0 import subnet from neutronclient.neutron.v2_0 import subnetpool from neutronclient.neutron.v2_0.vpn import ikepolicy from neutronclient.neutron.v2_0.vpn import ipsec_site_connection from neutronclient.neutron.v2_0.vpn import ipsecpolicy from neutronclient.neutron.v2_0.vpn import vpnservice from neutronclient.version import __version__ VERSION = '2.0' NEUTRON_API_VERSION = '2.0' def run_command(cmd, cmd_parser, sub_argv): _argv = sub_argv index = -1 values_specs = [] if '--' in sub_argv: index = sub_argv.index('--') _argv = sub_argv[:index] values_specs = sub_argv[index:] known_args, _values_specs = cmd_parser.parse_known_args(_argv) if(isinstance(cmd, subnet.CreateSubnet) and not known_args.cidr): cidr = get_first_valid_cidr(_values_specs) if cidr: known_args.cidr = cidr _values_specs.remove(cidr) cmd.values_specs = (index == -1 and _values_specs or values_specs) return cmd.run(known_args) def get_first_valid_cidr(value_specs): # Bug 1442771, argparse does not allow optional positional parameter # to be separated from previous positional parameter. # When cidr was separated from network, the value will not be able # to be parsed into known_args, but saved to _values_specs instead. for value in value_specs: if utils.is_valid_cidr(value): return value def env(*_vars, **kwargs): """Search for the first defined of possibly many env vars. Returns the first environment variable defined in vars, or returns the default defined in kwargs. """ for v in _vars: value = os.environ.get(v, None) if value: return value return kwargs.get('default', '') def check_non_negative_int(value): try: value = int(value) except ValueError: raise argparse.ArgumentTypeError(_("invalid int value: %r") % value) if value < 0: raise argparse.ArgumentTypeError(_("input value %d is negative") % value) return value class BashCompletionCommand(openstack_command.OpenStackCommand): """Prints all of the commands and options for bash-completion.""" resource = "bash_completion" COMMAND_V2 = { 'bash-completion': BashCompletionCommand, 'net-list': network.ListNetwork, 'net-external-list': network.ListExternalNetwork, 'net-show': network.ShowNetwork, 'net-create': network.CreateNetwork, 'net-delete': network.DeleteNetwork, 'net-update': network.UpdateNetwork, 'subnet-list': subnet.ListSubnet, 'subnet-show': subnet.ShowSubnet, 'subnet-create': subnet.CreateSubnet, 'subnet-delete': subnet.DeleteSubnet, 'subnet-update': subnet.UpdateSubnet, 'subnetpool-list': subnetpool.ListSubnetPool, 'subnetpool-show': subnetpool.ShowSubnetPool, 'subnetpool-create': subnetpool.CreateSubnetPool, 'subnetpool-delete': subnetpool.DeleteSubnetPool, 'subnetpool-update': subnetpool.UpdateSubnetPool, 'port-list': port.ListPort, 'port-show': port.ShowPort, 'port-create': port.CreatePort, 'port-delete': port.DeletePort, 'port-update': port.UpdatePort, 'quota-list': quota.ListQuota, 'quota-show': quota.ShowQuota, 'quota-delete': quota.DeleteQuota, 'quota-update': quota.UpdateQuota, 'ext-list': extension.ListExt, 'ext-show': extension.ShowExt, 'router-list': router.ListRouter, 'router-port-list': port.ListRouterPort, 'router-show': router.ShowRouter, 'router-create': router.CreateRouter, 'router-delete': router.DeleteRouter, 'router-update': router.UpdateRouter, 'router-interface-add': router.AddInterfaceRouter, 'router-interface-delete': router.RemoveInterfaceRouter, 'router-gateway-set': router.SetGatewayRouter, 'router-gateway-clear': router.RemoveGatewayRouter, 'floatingip-list': floatingip.ListFloatingIP, 'floatingip-show': floatingip.ShowFloatingIP, 'floatingip-create': floatingip.CreateFloatingIP, 'floatingip-delete': floatingip.DeleteFloatingIP, 'floatingip-associate': floatingip.AssociateFloatingIP, 'floatingip-disassociate': floatingip.DisassociateFloatingIP, 'security-group-list': securitygroup.ListSecurityGroup, 'security-group-show': securitygroup.ShowSecurityGroup, 'security-group-create': securitygroup.CreateSecurityGroup, 'security-group-delete': securitygroup.DeleteSecurityGroup, 'security-group-update': securitygroup.UpdateSecurityGroup, 'security-group-rule-list': securitygroup.ListSecurityGroupRule, 'security-group-rule-show': securitygroup.ShowSecurityGroupRule, 'security-group-rule-create': securitygroup.CreateSecurityGroupRule, 'security-group-rule-delete': securitygroup.DeleteSecurityGroupRule, 'lbaas-loadbalancer-list': lbaas_loadbalancer.ListLoadBalancer, 'lbaas-loadbalancer-show': lbaas_loadbalancer.ShowLoadBalancer, 'lbaas-loadbalancer-create': lbaas_loadbalancer.CreateLoadBalancer, 'lbaas-loadbalancer-update': lbaas_loadbalancer.UpdateLoadBalancer, 'lbaas-loadbalancer-delete': lbaas_loadbalancer.DeleteLoadBalancer, 'lbaas-listener-list': lbaas_listener.ListListener, 'lbaas-listener-show': lbaas_listener.ShowListener, 'lbaas-listener-create': lbaas_listener.CreateListener, 'lbaas-listener-update': lbaas_listener.UpdateListener, 'lbaas-listener-delete': lbaas_listener.DeleteListener, 'lbaas-pool-list': lbaas_pool.ListPool, 'lbaas-pool-show': lbaas_pool.ShowPool, 'lbaas-pool-create': lbaas_pool.CreatePool, 'lbaas-pool-update': lbaas_pool.UpdatePool, 'lbaas-pool-delete': lbaas_pool.DeletePool, 'lbaas-healthmonitor-list': lbaas_healthmon.ListHealthMonitor, 'lbaas-healthmonitor-show': lbaas_healthmon.ShowHealthMonitor, 'lbaas-healthmonitor-create': lbaas_healthmon.CreateHealthMonitor, 'lbaas-healthmonitor-update': lbaas_healthmon.UpdateHealthMonitor, 'lbaas-healthmonitor-delete': lbaas_healthmon.DeleteHealthMonitor, 'lbaas-member-list': lbaas_member.ListMember, 'lbaas-member-show': lbaas_member.ShowMember, 'lbaas-member-create': lbaas_member.CreateMember, 'lbaas-member-update': lbaas_member.UpdateMember, 'lbaas-member-delete': lbaas_member.DeleteMember, 'lb-vip-list': lb_vip.ListVip, 'lb-vip-show': lb_vip.ShowVip, 'lb-vip-create': lb_vip.CreateVip, 'lb-vip-update': lb_vip.UpdateVip, 'lb-vip-delete': lb_vip.DeleteVip, 'lb-pool-list': lb_pool.ListPool, 'lb-pool-show': lb_pool.ShowPool, 'lb-pool-create': lb_pool.CreatePool, 'lb-pool-update': lb_pool.UpdatePool, 'lb-pool-delete': lb_pool.DeletePool, 'lb-pool-stats': lb_pool.RetrievePoolStats, 'lb-member-list': lb_member.ListMember, 'lb-member-show': lb_member.ShowMember, 'lb-member-create': lb_member.CreateMember, 'lb-member-update': lb_member.UpdateMember, 'lb-member-delete': lb_member.DeleteMember, 'lb-healthmonitor-list': lb_healthmonitor.ListHealthMonitor, 'lb-healthmonitor-show': lb_healthmonitor.ShowHealthMonitor, 'lb-healthmonitor-create': lb_healthmonitor.CreateHealthMonitor, 'lb-healthmonitor-update': lb_healthmonitor.UpdateHealthMonitor, 'lb-healthmonitor-delete': lb_healthmonitor.DeleteHealthMonitor, 'lb-healthmonitor-associate': lb_healthmonitor.AssociateHealthMonitor, 'lb-healthmonitor-disassociate': ( lb_healthmonitor.DisassociateHealthMonitor ), 'queue-create': qos_queue.CreateQoSQueue, 'queue-delete': qos_queue.DeleteQoSQueue, 'queue-show': qos_queue.ShowQoSQueue, 'queue-list': qos_queue.ListQoSQueue, 'agent-list': agent.ListAgent, 'agent-show': agent.ShowAgent, 'agent-delete': agent.DeleteAgent, 'agent-update': agent.UpdateAgent, 'net-gateway-create': networkgateway.CreateNetworkGateway, 'net-gateway-update': networkgateway.UpdateNetworkGateway, 'net-gateway-delete': networkgateway.DeleteNetworkGateway, 'net-gateway-show': networkgateway.ShowNetworkGateway, 'net-gateway-list': networkgateway.ListNetworkGateway, 'net-gateway-connect': networkgateway.ConnectNetworkGateway, 'net-gateway-disconnect': networkgateway.DisconnectNetworkGateway, 'gateway-device-create': networkgateway.CreateGatewayDevice, 'gateway-device-update': networkgateway.UpdateGatewayDevice, 'gateway-device-delete': networkgateway.DeleteGatewayDevice, 'gateway-device-show': networkgateway.ShowGatewayDevice, 'gateway-device-list': networkgateway.ListGatewayDevice, 'dhcp-agent-network-add': agentscheduler.AddNetworkToDhcpAgent, 'dhcp-agent-network-remove': agentscheduler.RemoveNetworkFromDhcpAgent, 'net-list-on-dhcp-agent': agentscheduler.ListNetworksOnDhcpAgent, 'dhcp-agent-list-hosting-net': agentscheduler.ListDhcpAgentsHostingNetwork, 'l3-agent-router-add': agentscheduler.AddRouterToL3Agent, 'l3-agent-router-remove': agentscheduler.RemoveRouterFromL3Agent, 'router-list-on-l3-agent': agentscheduler.ListRoutersOnL3Agent, 'l3-agent-list-hosting-router': agentscheduler.ListL3AgentsHostingRouter, 'lb-pool-list-on-agent': agentscheduler.ListPoolsOnLbaasAgent, 'lb-agent-hosting-pool': agentscheduler.GetLbaasAgentHostingPool, 'lbaas-loadbalancer-list-on-agent': agentscheduler.ListLoadBalancersOnLbaasAgent, 'lbaas-agent-hosting-loadbalancer': agentscheduler.GetLbaasAgentHostingLoadBalancer, 'service-provider-list': servicetype.ListServiceProvider, 'firewall-rule-list': firewallrule.ListFirewallRule, 'firewall-rule-show': firewallrule.ShowFirewallRule, 'firewall-rule-create': firewallrule.CreateFirewallRule, 'firewall-rule-update': firewallrule.UpdateFirewallRule, 'firewall-rule-delete': firewallrule.DeleteFirewallRule, 'firewall-policy-list': firewallpolicy.ListFirewallPolicy, 'firewall-policy-show': firewallpolicy.ShowFirewallPolicy, 'firewall-policy-create': firewallpolicy.CreateFirewallPolicy, 'firewall-policy-update': firewallpolicy.UpdateFirewallPolicy, 'firewall-policy-delete': firewallpolicy.DeleteFirewallPolicy, 'firewall-policy-insert-rule': firewallpolicy.FirewallPolicyInsertRule, 'firewall-policy-remove-rule': firewallpolicy.FirewallPolicyRemoveRule, 'firewall-list': firewall.ListFirewall, 'firewall-show': firewall.ShowFirewall, 'firewall-create': firewall.CreateFirewall, 'firewall-update': firewall.UpdateFirewall, 'firewall-delete': firewall.DeleteFirewall, 'cisco-credential-list': credential.ListCredential, 'cisco-credential-show': credential.ShowCredential, 'cisco-credential-create': credential.CreateCredential, 'cisco-credential-delete': credential.DeleteCredential, 'cisco-network-profile-list': networkprofile.ListNetworkProfile, 'cisco-network-profile-show': networkprofile.ShowNetworkProfile, 'cisco-network-profile-create': networkprofile.CreateNetworkProfile, 'cisco-network-profile-delete': networkprofile.DeleteNetworkProfile, 'cisco-network-profile-update': networkprofile.UpdateNetworkProfile, 'cisco-policy-profile-list': policyprofile.ListPolicyProfile, 'cisco-policy-profile-show': policyprofile.ShowPolicyProfile, 'cisco-policy-profile-update': policyprofile.UpdatePolicyProfile, 'ipsec-site-connection-list': ( ipsec_site_connection.ListIPsecSiteConnection ), 'ipsec-site-connection-show': ( ipsec_site_connection.ShowIPsecSiteConnection ), 'ipsec-site-connection-create': ( ipsec_site_connection.CreateIPsecSiteConnection ), 'ipsec-site-connection-update': ( ipsec_site_connection.UpdateIPsecSiteConnection ), 'ipsec-site-connection-delete': ( ipsec_site_connection.DeleteIPsecSiteConnection ), 'vpn-service-list': vpnservice.ListVPNService, 'vpn-service-show': vpnservice.ShowVPNService, 'vpn-service-create': vpnservice.CreateVPNService, 'vpn-service-update': vpnservice.UpdateVPNService, 'vpn-service-delete': vpnservice.DeleteVPNService, 'vpn-ipsecpolicy-list': ipsecpolicy.ListIPsecPolicy, 'vpn-ipsecpolicy-show': ipsecpolicy.ShowIPsecPolicy, 'vpn-ipsecpolicy-create': ipsecpolicy.CreateIPsecPolicy, 'vpn-ipsecpolicy-update': ipsecpolicy.UpdateIPsecPolicy, 'vpn-ipsecpolicy-delete': ipsecpolicy.DeleteIPsecPolicy, 'vpn-ikepolicy-list': ikepolicy.ListIKEPolicy, 'vpn-ikepolicy-show': ikepolicy.ShowIKEPolicy, 'vpn-ikepolicy-create': ikepolicy.CreateIKEPolicy, 'vpn-ikepolicy-update': ikepolicy.UpdateIKEPolicy, 'vpn-ikepolicy-delete': ikepolicy.DeleteIKEPolicy, 'meter-label-create': metering.CreateMeteringLabel, 'meter-label-list': metering.ListMeteringLabel, 'meter-label-show': metering.ShowMeteringLabel, 'meter-label-delete': metering.DeleteMeteringLabel, 'meter-label-rule-create': metering.CreateMeteringLabelRule, 'meter-label-rule-list': metering.ListMeteringLabelRule, 'meter-label-rule-show': metering.ShowMeteringLabelRule, 'meter-label-rule-delete': metering.DeleteMeteringLabelRule, 'nuage-netpartition-list': netpartition.ListNetPartition, 'nuage-netpartition-show': netpartition.ShowNetPartition, 'nuage-netpartition-create': netpartition.CreateNetPartition, 'nuage-netpartition-delete': netpartition.DeleteNetPartition, 'nec-packet-filter-list': packetfilter.ListPacketFilter, 'nec-packet-filter-show': packetfilter.ShowPacketFilter, 'nec-packet-filter-create': packetfilter.CreatePacketFilter, 'nec-packet-filter-update': packetfilter.UpdatePacketFilter, 'nec-packet-filter-delete': packetfilter.DeletePacketFilter, } COMMANDS = {'2.0': COMMAND_V2} class HelpAction(argparse.Action): """Provide a custom action so the -h and --help options to the main app will print a list of the commands. The commands are determined by checking the CommandManager instance, passed in as the "default" value for the action. """ def __call__(self, parser, namespace, values, option_string=None): outputs = [] max_len = 0 app = self.default parser.print_help(app.stdout) app.stdout.write(_('\nCommands for API v%s:\n') % app.api_version) command_manager = app.command_manager for name, ep in sorted(command_manager): factory = ep.load() cmd = factory(self, None) one_liner = cmd.get_description().split('\n')[0] outputs.append((name, one_liner)) max_len = max(len(name), max_len) for (name, one_liner) in outputs: app.stdout.write(' %s %s\n' % (name.ljust(max_len), one_liner)) sys.exit(0) class NeutronShell(app.App): # verbose logging levels WARNING_LEVEL = 0 INFO_LEVEL = 1 DEBUG_LEVEL = 2 CONSOLE_MESSAGE_FORMAT = '%(message)s' DEBUG_MESSAGE_FORMAT = '%(levelname)s: %(name)s %(message)s' log = logging.getLogger(__name__) def __init__(self, apiversion): super(NeutronShell, self).__init__( description=__doc__.strip(), version=VERSION, command_manager=commandmanager.CommandManager('neutron.cli'), ) self.commands = COMMANDS for k, v in self.commands[apiversion].items(): self.command_manager.add_command(k, v) self._register_extensions(VERSION) # Pop the 'complete' to correct the outputs of 'neutron help'. self.command_manager.commands.pop('complete') # This is instantiated in initialize_app() only when using # password flow auth self.auth_client = None self.api_version = apiversion def build_option_parser(self, description, version): """Return an argparse option parser for this application. Subclasses may override this method to extend the parser with more global options. :param description: full description of the application :paramtype description: str :param version: version number for the application :paramtype version: str """ parser = argparse.ArgumentParser( description=description, add_help=False, ) parser.add_argument( '--version', action='version', version=__version__, ) parser.add_argument( '-v', '--verbose', '--debug', action='count', dest='verbose_level', default=self.DEFAULT_VERBOSE_LEVEL, help=_('Increase verbosity of output and show tracebacks on' ' errors. You can repeat this option.')) parser.add_argument( '-q', '--quiet', action='store_const', dest='verbose_level', const=0, help=_('Suppress output except warnings and errors.')) parser.add_argument( '-h', '--help', action=HelpAction, nargs=0, default=self, # tricky help=_("Show this help message and exit.")) parser.add_argument( '-r', '--retries', metavar="NUM", type=check_non_negative_int, default=0, help=_("How many times the request to the Neutron server should " "be retried if it fails.")) # FIXME(bklei): this method should come from python-keystoneclient self._append_global_identity_args(parser) return parser def _append_global_identity_args(self, parser): # FIXME(bklei): these are global identity (Keystone) arguments which # should be consistent and shared by all service clients. Therefore, # they should be provided by python-keystoneclient. We will need to # refactor this code once this functionality is available in # python-keystoneclient. # # Note: At that time we'll need to decide if we can just abandon # the deprecated args (--service-type and --endpoint-type). parser.add_argument( '--os-service-type', metavar='<os-service-type>', default=env('OS_NETWORK_SERVICE_TYPE', default='network'), help=_('Defaults to env[OS_NETWORK_SERVICE_TYPE] or network.')) parser.add_argument( '--os-endpoint-type', metavar='<os-endpoint-type>', default=env('OS_ENDPOINT_TYPE', default='publicURL'), help=_('Defaults to env[OS_ENDPOINT_TYPE] or publicURL.')) # FIXME(bklei): --service-type is deprecated but kept in for # backward compatibility. parser.add_argument( '--service-type', metavar='<service-type>', default=env('OS_NETWORK_SERVICE_TYPE', default='network'), help=_('DEPRECATED! Use --os-service-type.')) # FIXME(bklei): --endpoint-type is deprecated but kept in for # backward compatibility. parser.add_argument( '--endpoint-type', metavar='<endpoint-type>', default=env('OS_ENDPOINT_TYPE', default='publicURL'), help=_('DEPRECATED! Use --os-endpoint-type.')) parser.add_argument( '--os-auth-strategy', metavar='<auth-strategy>', default=env('OS_AUTH_STRATEGY', default='keystone'), help=_('DEPRECATED! Only keystone is supported.')) parser.add_argument( '--os_auth_strategy', help=argparse.SUPPRESS) parser.add_argument( '--os-auth-url', metavar='<auth-url>', default=env('OS_AUTH_URL'), help=_('Authentication URL, defaults to env[OS_AUTH_URL].')) parser.add_argument( '--os_auth_url', help=argparse.SUPPRESS) project_name_group = parser.add_mutually_exclusive_group() project_name_group.add_argument( '--os-tenant-name', metavar='<auth-tenant-name>', default=env('OS_TENANT_NAME'), help=_('Authentication tenant name, defaults to ' 'env[OS_TENANT_NAME].')) project_name_group.add_argument( '--os-project-name', metavar='<auth-project-name>', default=utils.env('OS_PROJECT_NAME'), help='Another way to specify tenant name. ' 'This option is mutually exclusive with ' ' --os-tenant-name. ' 'Defaults to env[OS_PROJECT_NAME].') parser.add_argument( '--os_tenant_name', help=argparse.SUPPRESS) project_id_group = parser.add_mutually_exclusive_group() project_id_group.add_argument( '--os-tenant-id', metavar='<auth-tenant-id>', default=env('OS_TENANT_ID'), help=_('Authentication tenant ID, defaults to ' 'env[OS_TENANT_ID].')) project_id_group.add_argument( '--os-project-id', metavar='<auth-project-id>', default=utils.env('OS_PROJECT_ID'), help='Another way to specify tenant ID. ' 'This option is mutually exclusive with ' ' --os-tenant-id. ' 'Defaults to env[OS_PROJECT_ID].') parser.add_argument( '--os-username', metavar='<auth-username>', default=utils.env('OS_USERNAME'), help=_('Authentication username, defaults to env[OS_USERNAME].')) parser.add_argument( '--os_username', help=argparse.SUPPRESS) parser.add_argument( '--os-user-id', metavar='<auth-user-id>', default=env('OS_USER_ID'), help=_('Authentication user ID (Env: OS_USER_ID)')) parser.add_argument( '--os_user_id', help=argparse.SUPPRESS) parser.add_argument( '--os-user-domain-id', metavar='<auth-user-domain-id>', default=utils.env('OS_USER_DOMAIN_ID'), help='OpenStack user domain ID. ' 'Defaults to env[OS_USER_DOMAIN_ID].') parser.add_argument( '--os_user_domain_id', help=argparse.SUPPRESS) parser.add_argument( '--os-user-domain-name', metavar='<auth-user-domain-name>', default=utils.env('OS_USER_DOMAIN_NAME'), help='OpenStack user domain name. ' 'Defaults to env[OS_USER_DOMAIN_NAME].') parser.add_argument( '--os_user_domain_name', help=argparse.SUPPRESS) parser.add_argument( '--os_project_id', help=argparse.SUPPRESS) parser.add_argument( '--os_project_name', help=argparse.SUPPRESS) parser.add_argument( '--os-project-domain-id', metavar='<auth-project-domain-id>', default=utils.env('OS_PROJECT_DOMAIN_ID'), help='Defaults to env[OS_PROJECT_DOMAIN_ID].') parser.add_argument( '--os-project-domain-name', metavar='<auth-project-domain-name>', default=utils.env('OS_PROJECT_DOMAIN_NAME'), help='Defaults to env[OS_PROJECT_DOMAIN_NAME].') parser.add_argument( '--os-cert', metavar='<certificate>', default=utils.env('OS_CERT'), help=_("Path of certificate file to use in SSL " "connection. This file can optionally be " "prepended with the private key. Defaults " "to env[OS_CERT].")) parser.add_argument( '--os-cacert', metavar='<ca-certificate>', default=env('OS_CACERT', default=None), help=_("Specify a CA bundle file to use in " "verifying a TLS (https) server certificate. " "Defaults to env[OS_CACERT].")) parser.add_argument( '--os-key', metavar='<key>', default=utils.env('OS_KEY'), help=_("Path of client key to use in SSL " "connection. This option is not necessary " "if your key is prepended to your certificate " "file. Defaults to env[OS_KEY].")) parser.add_argument( '--os-password', metavar='<auth-password>', default=utils.env('OS_PASSWORD'), help=_('Authentication password, defaults to env[OS_PASSWORD].')) parser.add_argument( '--os_password', help=argparse.SUPPRESS) parser.add_argument( '--os-region-name', metavar='<auth-region-name>', default=env('OS_REGION_NAME'), help=_('Authentication region name, defaults to ' 'env[OS_REGION_NAME].')) parser.add_argument( '--os_region_name', help=argparse.SUPPRESS) parser.add_argument( '--os-token', metavar='<token>', default=env('OS_TOKEN'), help=_('Authentication token, defaults to env[OS_TOKEN].')) parser.add_argument( '--os_token', help=argparse.SUPPRESS) parser.add_argument( '--http-timeout', metavar='<seconds>', default=env('OS_NETWORK_TIMEOUT', default=None), type=float, help=_('Timeout in seconds to wait for an HTTP response. Defaults ' 'to env[OS_NETWORK_TIMEOUT] or None if not specified.')) parser.add_argument( '--os-url', metavar='<url>', default=env('OS_URL'), help=_('Defaults to env[OS_URL].')) parser.add_argument( '--os_url', help=argparse.SUPPRESS) parser.add_argument( '--insecure', action='store_true', default=env('NEUTRONCLIENT_INSECURE', default=False), help=_("Explicitly allow neutronclient to perform \"insecure\" " "SSL (https) requests. The server's certificate will " "not be verified against any certificate authorities. " "This option should be used with caution.")) def _bash_completion(self): """Prints all of the commands and options for bash-completion.""" commands = set() options = set() for option, _action in self.parser._option_string_actions.items(): options.add(option) for command_name, command in self.command_manager: commands.add(command_name) cmd_factory = command.load() cmd = cmd_factory(self, None) cmd_parser = cmd.get_parser('') for option, _action in cmd_parser._option_string_actions.items(): options.add(option) print(' '.join(commands | options)) def _register_extensions(self, version): for name, module in itertools.chain( client_extension._discover_via_entry_points()): self._extend_shell_commands(module, version) def _extend_shell_commands(self, module, version): classes = inspect.getmembers(module, inspect.isclass) for cls_name, cls in classes: if (issubclass(cls, client_extension.NeutronClientExtension) and hasattr(cls, 'shell_command')): cmd = cls.shell_command if hasattr(cls, 'versions'): if version not in cls.versions: continue try: self.command_manager.add_command(cmd, cls) self.commands[version][cmd] = cls except TypeError: pass def run(self, argv): """Equivalent to the main program for the application. :param argv: input arguments and options :paramtype argv: list of str """ try: index = 0 command_pos = -1 help_pos = -1 help_command_pos = -1 for arg in argv: if arg == 'bash-completion' and help_command_pos == -1: self._bash_completion() return 0 if arg in self.commands[self.api_version]: if command_pos == -1: command_pos = index elif arg in ('-h', '--help'): if help_pos == -1: help_pos = index elif arg == 'help': if help_command_pos == -1: help_command_pos = index index = index + 1 if command_pos > -1 and help_pos > command_pos: argv = ['help', argv[command_pos]] if help_command_pos > -1 and command_pos == -1: argv[help_command_pos] = '--help' self.options, remainder = self.parser.parse_known_args(argv) self.configure_logging() self.interactive_mode = not remainder self.initialize_app(remainder) except Exception as err: if self.options.verbose_level >= self.DEBUG_LEVEL: self.log.exception(err) raise else: self.log.error(err) return 1 if self.interactive_mode: _argv = [sys.argv[0]] sys.argv = _argv return self.interact() return self.run_subcommand(remainder) def run_subcommand(self, argv): subcommand = self.command_manager.find_command(argv) cmd_factory, cmd_name, sub_argv = subcommand cmd = cmd_factory(self, self.options) try: self.prepare_to_run_command(cmd) full_name = (cmd_name if self.interactive_mode else ' '.join([self.NAME, cmd_name]) ) cmd_parser = cmd.get_parser(full_name) return run_command(cmd, cmd_parser, sub_argv) except Exception as e: if self.options.verbose_level >= self.DEBUG_LEVEL: self.log.exception("%s", e) raise self.log.error("%s", e) return 1 def authenticate_user(self): """Make sure the user has provided all of the authentication info we need. """ if self.options.os_auth_strategy == 'keystone': if self.options.os_token or self.options.os_url: # Token flow auth takes priority if not self.options.os_token: raise exc.CommandError( _("You must provide a token via" " either --os-token or env[OS_TOKEN]" " when providing a service URL")) if not self.options.os_url: raise exc.CommandError( _("You must provide a service URL via" " either --os-url or env[OS_URL]" " when providing a token")) else: # Validate password flow auth project_info = (self.options.os_tenant_name or self.options.os_tenant_id or (self.options.os_project_name and (self.options.os_project_domain_name or self.options.os_project_domain_id)) or self.options.os_project_id) if (not self.options.os_username and not self.options.os_user_id): raise exc.CommandError( _("You must provide a username or user ID via" " --os-username, env[OS_USERNAME] or" " --os-user-id, env[OS_USER_ID]")) if not self.options.os_password: # No password, If we've got a tty, try prompting for it if hasattr(sys.stdin, 'isatty') and sys.stdin.isatty(): # Check for Ctl-D try: self.options.os_password = getpass.getpass( 'OS Password: ') except EOFError: pass # No password because we didn't have a tty or the # user Ctl-D when prompted. if not self.options.os_password: raise exc.CommandError( _("You must provide a password via" " either --os-password or env[OS_PASSWORD]")) if (not project_info): # tenent is deprecated in Keystone v3. Use the latest # terminology instead. raise exc.CommandError( _("You must provide a project_id or project_name (" "with project_domain_name or project_domain_id) " "via " " --os-project-id (env[OS_PROJECT_ID])" " --os-project-name (env[OS_PROJECT_NAME])," " --os-project-domain-id " "(env[OS_PROJECT_DOMAIN_ID])" " --os-project-domain-name " "(env[OS_PROJECT_DOMAIN_NAME])")) if not self.options.os_auth_url: raise exc.CommandError( _("You must provide an auth url via" " either --os-auth-url or via env[OS_AUTH_URL]")) auth_session = self._get_keystone_session() auth = auth_session.auth else: # not keystone if not self.options.os_url: raise exc.CommandError( _("You must provide a service URL via" " either --os-url or env[OS_URL]")) auth_session = None auth = None self.client_manager = clientmanager.ClientManager( token=self.options.os_token, url=self.options.os_url, auth_url=self.options.os_auth_url, tenant_name=self.options.os_tenant_name, tenant_id=self.options.os_tenant_id, username=self.options.os_username, user_id=self.options.os_user_id, password=self.options.os_password, region_name=self.options.os_region_name, api_version=self.api_version, auth_strategy=self.options.os_auth_strategy, # FIXME (bklei) honor deprecated service_type and # endpoint type until they are removed service_type=self.options.os_service_type or self.options.service_type, endpoint_type=self.options.os_endpoint_type or self.endpoint_type, insecure=self.options.insecure, ca_cert=self.options.os_cacert, timeout=self.options.http_timeout, retries=self.options.retries, raise_errors=False, session=auth_session, auth=auth, log_credentials=True) return def initialize_app(self, argv): """Global app init bits: * set up API versions * validate authentication info """ super(NeutronShell, self).initialize_app(argv) self.api_version = {'network': self.api_version} # If the user is not asking for help, make sure they # have given us auth. cmd_name = None if argv: cmd_info = self.command_manager.find_command(argv) cmd_factory, cmd_name, sub_argv = cmd_info if self.interactive_mode or cmd_name != 'help': self.authenticate_user() def configure_logging(self): """Create logging handlers for any log output.""" root_logger = logging.getLogger('') # Set up logging to a file root_logger.setLevel(logging.DEBUG) # Send higher-level messages to the console via stderr console = logging.StreamHandler(self.stderr) console_level = {self.WARNING_LEVEL: logging.WARNING, self.INFO_LEVEL: logging.INFO, self.DEBUG_LEVEL: logging.DEBUG, }.get(self.options.verbose_level, logging.DEBUG) # The default log level is INFO, in this situation, set the # log level of the console to WARNING, to avoid displaying # useless messages. This equals using "--quiet" if console_level == logging.INFO: console.setLevel(logging.WARNING) else: console.setLevel(console_level) if logging.DEBUG == console_level: formatter = logging.Formatter(self.DEBUG_MESSAGE_FORMAT) else: formatter = logging.Formatter(self.CONSOLE_MESSAGE_FORMAT) logging.getLogger('iso8601.iso8601').setLevel(logging.WARNING) logging.getLogger('urllib3.connectionpool').setLevel(logging.WARNING) console.setFormatter(formatter) root_logger.addHandler(console) return def get_v2_auth(self, v2_auth_url): return v2_auth.Password( v2_auth_url, username=self.options.os_username, password=self.options.os_password, tenant_id=self.options.os_tenant_id, tenant_name=self.options.os_tenant_name) def get_v3_auth(self, v3_auth_url): project_id = self.options.os_project_id or self.options.os_tenant_id project_name = (self.options.os_project_name or self.options.os_tenant_name) return v3_auth.Password( v3_auth_url, username=self.options.os_username, password=self.options.os_password, user_id=self.options.os_user_id, user_domain_name=self.options.os_user_domain_name, user_domain_id=self.options.os_user_domain_id, project_id=project_id, project_name=project_name, project_domain_name=self.options.os_project_domain_name, project_domain_id=self.options.os_project_domain_id ) def _discover_auth_versions(self, session, auth_url): # discover the API versions the server is supporting base on the # given URL try: ks_discover = discover.Discover(session=session, auth_url=auth_url) return (ks_discover.url_for('2.0'), ks_discover.url_for('3.0')) except ks_exc.ClientException: # Identity service may not support discover API version. # Lets try to figure out the API version from the original URL. url_parts = urlparse.urlparse(auth_url) (scheme, netloc, path, params, query, fragment) = url_parts path = path.lower() if path.startswith('/v3'): return (None, auth_url) elif path.startswith('/v2'): return (auth_url, None) else: # not enough information to determine the auth version msg = _('Unable to determine the Keystone version ' 'to authenticate with using the given ' 'auth_url. Identity service may not support API ' 'version discovery. Please provide a versioned ' 'auth_url instead.') raise exc.CommandError(msg) def _get_keystone_session(self): # first create a Keystone session cacert = self.options.os_cacert or None cert = self.options.os_cert or None key = self.options.os_key or None insecure = self.options.insecure or False ks_session = session.Session.construct(dict(cacert=cacert, cert=cert, key=key, insecure=insecure)) # discover the supported keystone versions using the given url (v2_auth_url, v3_auth_url) = self._discover_auth_versions( session=ks_session, auth_url=self.options.os_auth_url) # Determine which authentication plugin to use. First inspect the # auth_url to see the supported version. If both v3 and v2 are # supported, then use the highest version if possible. user_domain_name = self.options.os_user_domain_name or None user_domain_id = self.options.os_user_domain_id or None project_domain_name = self.options.os_project_domain_name or None project_domain_id = self.options.os_project_domain_id or None domain_info = (user_domain_name or user_domain_id or project_domain_name or project_domain_id) if (v2_auth_url and not domain_info) or not v3_auth_url: ks_session.auth = self.get_v2_auth(v2_auth_url) else: ks_session.auth = self.get_v3_auth(v3_auth_url) return ks_session def main(argv=sys.argv[1:]): try: return NeutronShell(NEUTRON_API_VERSION).run( list(map(encodeutils.safe_decode, argv))) except KeyboardInterrupt: print("... terminating neutron client", file=sys.stderr) return 130 except exc.NeutronClientException: return 1 except Exception as e: print(e) return 1 if __name__ == "__main__": sys.exit(main(sys.argv[1:]))
apache-2.0
h2oai/h2o-dev
h2o-py/tests/testdir_hdfs/pyunit_NOFEATURE_INTERNAL_HDFS_import_folder_orc_milsongs_air.py
4
1334
from __future__ import print_function import sys sys.path.insert(1,"../../") import h2o import time from tests import pyunit_utils #---------------------------------------------------------------------- # This test is used to show what happens if we mix datasets but keep # the file format to orc. We expect it to throw an error. #---------------------------------------------------------------------- def hdfs_orc_parser(): # Check if we are running inside the H2O network by seeing if we can touch # the namenode. hadoop_namenode_is_accessible = pyunit_utils.hadoop_namenode_is_accessible() if hadoop_namenode_is_accessible: hdfs_name_node = pyunit_utils.hadoop_namenode() if pyunit_utils.cannaryHDFSTest(hdfs_name_node, "/datasets/orc_parser/orc/orc_split_elim.orc"): print("Your hive-exec version is too old. Orc parser test {0} is " "skipped.".format("pyunit_INTERNAL_HDFS_import_folder_orc.py")) pass else: mix_folder = "/datasets/orc_milsongs_air" url_csv1 = "hdfs://{0}{1}".format(hdfs_name_node, mix_folder) multi_file_mixed = h2o.import_file(url_csv1) else: raise EnvironmentError if __name__ == "__main__": pyunit_utils.standalone_test(hdfs_orc_parser) else: hdfs_orc_parser()
apache-2.0
telwertowski/QGIS
scripts/qgis_fixes/fix_qfiledialog.py
69
1597
# -*- coding: utf-8 -*- """ Migrate QFileDialog methods from PyQt4 to PyQt5 """ # Author: Médéric Ribreux <mederic.ribreux@medspx.fr> # Adapted from fix_pyqt # and http://python3porting.com/fixers.html # Local imports from lib2to3.fixer_base import BaseFix class FixQfiledialog(BaseFix): PATTERN = """ power< 'QFileDialog' trailer< '.' filter=('getOpenFileNameAndFilter'|'getOpenFileNamesAndFilter'|'getSaveFileNameAndFilter') > any > | expr_stmt< filename=any '=' power< any trailer< '(' power< 'QFileDialog' trailer< '.' method=('getOpenFileName'|'getOpenFileNames'|'getSaveFileName') > any > ')' > > > | expr_stmt< filename=any '=' power< 'QFileDialog' trailer< '.' method=('getOpenFileName'|'getOpenFileNames'|'getSaveFileName') > any > > """ def transform(self, node, results): # First case: getOpen/SaveFileName # We need to add __ variable because in PyQt5 # getOpen/SaveFileName returns a tuple if 'filename' in results: node = results['filename'] # count number of leaves (result variables) nbLeaves = sum(1 for i in node.leaves()) # If we have less than two args, # we add __ special variable if nbLeaves < 3: fileName = node.value node.value = u'{}, __'.format(fileName) node.changed() # Rename *AndFilter methods if 'filter' in results: method = results['filter'][0] method.value = method.value.replace(u'AndFilter', u'') method.changed()
gpl-2.0
ssh-odoo/scrapy
scrapy/log.py
149
2007
""" This module is kept to provide a helpful warning about its removal. """ import logging import warnings from twisted.python.failure import Failure from scrapy.exceptions import ScrapyDeprecationWarning from scrapy.utils.log import failure_to_exc_info logger = logging.getLogger(__name__) warnings.warn("Module `scrapy.log` has been deprecated, Scrapy now relies on " "the builtin Python library for logging. Read the updated " "logging entry in the documentation to learn more.", ScrapyDeprecationWarning, stacklevel=2) # Imports and level_names variable kept for backwards-compatibility DEBUG = logging.DEBUG INFO = logging.INFO WARNING = logging.WARNING ERROR = logging.ERROR CRITICAL = logging.CRITICAL SILENT = CRITICAL + 1 level_names = { logging.DEBUG: "DEBUG", logging.INFO: "INFO", logging.WARNING: "WARNING", logging.ERROR: "ERROR", logging.CRITICAL: "CRITICAL", SILENT: "SILENT", } def msg(message=None, _level=logging.INFO, **kw): warnings.warn('log.msg has been deprecated, create a python logger and ' 'log through it instead', ScrapyDeprecationWarning, stacklevel=2) level = kw.pop('level', _level) message = kw.pop('format', message) # NOTE: logger.log doesn't handle well passing empty dictionaries with format # arguments because of some weird use-case: # https://hg.python.org/cpython/file/648dcafa7e5f/Lib/logging/__init__.py#l269 logger.log(level, message, *[kw] if kw else []) def err(_stuff=None, _why=None, **kw): warnings.warn('log.err has been deprecated, create a python logger and ' 'use its error method instead', ScrapyDeprecationWarning, stacklevel=2) level = kw.pop('level', logging.ERROR) failure = kw.pop('failure', _stuff) or Failure() message = kw.pop('why', _why) or failure.value logger.log(level, message, *[kw] if kw else [], exc_info=failure_to_exc_info(failure))
bsd-3-clause
with-git/tensorflow
tensorflow/contrib/slim/python/slim/nets/overfeat.py
164
5562
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains the model definition for the OverFeat network. The definition for the network was obtained from: OverFeat: Integrated Recognition, Localization and Detection using Convolutional Networks Pierre Sermanet, David Eigen, Xiang Zhang, Michael Mathieu, Rob Fergus and Yann LeCun, 2014 http://arxiv.org/abs/1312.6229 Usage: with slim.arg_scope(overfeat.overfeat_arg_scope()): outputs, end_points = overfeat.overfeat(inputs) @@overfeat """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from tensorflow.contrib import layers from tensorflow.contrib.framework.python.ops import arg_scope from tensorflow.contrib.layers.python.layers import layers as layers_lib from tensorflow.contrib.layers.python.layers import regularizers from tensorflow.contrib.layers.python.layers import utils from tensorflow.python.ops import array_ops from tensorflow.python.ops import init_ops from tensorflow.python.ops import nn_ops from tensorflow.python.ops import variable_scope trunc_normal = lambda stddev: init_ops.truncated_normal_initializer(0.0, stddev) def overfeat_arg_scope(weight_decay=0.0005): with arg_scope( [layers.conv2d, layers_lib.fully_connected], activation_fn=nn_ops.relu, weights_regularizer=regularizers.l2_regularizer(weight_decay), biases_initializer=init_ops.zeros_initializer()): with arg_scope([layers.conv2d], padding='SAME'): with arg_scope([layers_lib.max_pool2d], padding='VALID') as arg_sc: return arg_sc def overfeat(inputs, num_classes=1000, is_training=True, dropout_keep_prob=0.5, spatial_squeeze=True, scope='overfeat'): """Contains the model definition for the OverFeat network. The definition for the network was obtained from: OverFeat: Integrated Recognition, Localization and Detection using Convolutional Networks Pierre Sermanet, David Eigen, Xiang Zhang, Michael Mathieu, Rob Fergus and Yann LeCun, 2014 http://arxiv.org/abs/1312.6229 Note: All the fully_connected layers have been transformed to conv2d layers. To use in classification mode, resize input to 231x231. To use in fully convolutional mode, set spatial_squeeze to false. Args: inputs: a tensor of size [batch_size, height, width, channels]. num_classes: number of predicted classes. is_training: whether or not the model is being trained. dropout_keep_prob: the probability that activations are kept in the dropout layers during training. spatial_squeeze: whether or not should squeeze the spatial dimensions of the outputs. Useful to remove unnecessary dimensions for classification. scope: Optional scope for the variables. Returns: the last op containing the log predictions and end_points dict. """ with variable_scope.variable_scope(scope, 'overfeat', [inputs]) as sc: end_points_collection = sc.name + '_end_points' # Collect outputs for conv2d, fully_connected and max_pool2d with arg_scope( [layers.conv2d, layers_lib.fully_connected, layers_lib.max_pool2d], outputs_collections=end_points_collection): net = layers.conv2d( inputs, 64, [11, 11], 4, padding='VALID', scope='conv1') net = layers_lib.max_pool2d(net, [2, 2], scope='pool1') net = layers.conv2d(net, 256, [5, 5], padding='VALID', scope='conv2') net = layers_lib.max_pool2d(net, [2, 2], scope='pool2') net = layers.conv2d(net, 512, [3, 3], scope='conv3') net = layers.conv2d(net, 1024, [3, 3], scope='conv4') net = layers.conv2d(net, 1024, [3, 3], scope='conv5') net = layers_lib.max_pool2d(net, [2, 2], scope='pool5') with arg_scope( [layers.conv2d], weights_initializer=trunc_normal(0.005), biases_initializer=init_ops.constant_initializer(0.1)): # Use conv2d instead of fully_connected layers. net = layers.conv2d(net, 3072, [6, 6], padding='VALID', scope='fc6') net = layers_lib.dropout( net, dropout_keep_prob, is_training=is_training, scope='dropout6') net = layers.conv2d(net, 4096, [1, 1], scope='fc7') net = layers_lib.dropout( net, dropout_keep_prob, is_training=is_training, scope='dropout7') net = layers.conv2d( net, num_classes, [1, 1], activation_fn=None, normalizer_fn=None, biases_initializer=init_ops.zeros_initializer(), scope='fc8') # Convert end_points_collection into a end_point dict. end_points = utils.convert_collection_to_dict(end_points_collection) if spatial_squeeze: net = array_ops.squeeze(net, [1, 2], name='fc8/squeezed') end_points[sc.name + '/fc8'] = net return net, end_points
apache-2.0
romanchyla/pylucene-trunk
samples/ThreadIndexFiles.py
2
1350
# ==================================================================== # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ==================================================================== # This sample illustrates how to use a thread with PyLucene import sys, os, threading from datetime import datetime from lucene import StandardAnalyzer, VERSION, initVM, Version from IndexFiles import IndexFiles if __name__ == '__main__': if len(sys.argv) < 2: print IndexFiles.__doc__ sys.exit(1) env=initVM() print 'lucene', VERSION def fn(): env.attachCurrentThread() start = datetime.now() IndexFiles(sys.argv[1], "index", StandardAnalyzer(Version.LUCENE_CURRENT)) end = datetime.now() print end - start threading.Thread(target=fn).start()
apache-2.0
StarbuckBG/BTCGPU
contrib/zmq/zmq_sub3.4.py
44
3274
#!/usr/bin/env python3 # Copyright (c) 2014-2016 The Bitcoin Core developers # Distributed under the MIT software license, see the accompanying # file COPYING or http://www.opensource.org/licenses/mit-license.php. """ ZMQ example using python3's asyncio Bitcoin should be started with the command line arguments: bitcoind -testnet -daemon \ -zmqpubhashblock=tcp://127.0.0.1:28332 \ -zmqpubrawtx=tcp://127.0.0.1:28332 \ -zmqpubhashtx=tcp://127.0.0.1:28332 \ -zmqpubhashblock=tcp://127.0.0.1:28332 We use the asyncio library here. `self.handle()` installs itself as a future at the end of the function. Since it never returns with the event loop having an empty stack of futures, this creates an infinite loop. An alternative is to wrap the contents of `handle` inside `while True`. The `@asyncio.coroutine` decorator and the `yield from` syntax found here was introduced in python 3.4 and has been deprecated in favor of the `async` and `await` keywords respectively. A blocking example using python 2.7 can be obtained from the git history: https://github.com/bitcoin/bitcoin/blob/37a7fe9e440b83e2364d5498931253937abe9294/contrib/zmq/zmq_sub.py """ import binascii import asyncio import zmq import zmq.asyncio import signal import struct import sys if not (sys.version_info.major >= 3 and sys.version_info.minor >= 4): print("This example only works with Python 3.4 and greater") exit(1) port = 28332 class ZMQHandler(): def __init__(self): self.loop = zmq.asyncio.install() self.zmqContext = zmq.asyncio.Context() self.zmqSubSocket = self.zmqContext.socket(zmq.SUB) self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashblock") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "hashtx") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawblock") self.zmqSubSocket.setsockopt_string(zmq.SUBSCRIBE, "rawtx") self.zmqSubSocket.connect("tcp://127.0.0.1:%i" % port) @asyncio.coroutine def handle(self) : msg = yield from self.zmqSubSocket.recv_multipart() topic = msg[0] body = msg[1] sequence = "Unknown" if len(msg[-1]) == 4: msgSequence = struct.unpack('<I', msg[-1])[-1] sequence = str(msgSequence) if topic == b"hashblock": print('- HASH BLOCK ('+sequence+') -') print(binascii.hexlify(body)) elif topic == b"hashtx": print('- HASH TX ('+sequence+') -') print(binascii.hexlify(body)) elif topic == b"rawblock": print('- RAW BLOCK HEADER ('+sequence+') -') print(binascii.hexlify(body[:80])) elif topic == b"rawtx": print('- RAW TX ('+sequence+') -') print(binascii.hexlify(body)) # schedule ourselves to receive the next message asyncio.ensure_future(self.handle()) def start(self): self.loop.add_signal_handler(signal.SIGINT, self.stop) self.loop.create_task(self.handle()) self.loop.run_forever() def stop(self): self.loop.stop() self.zmqContext.destroy() daemon = ZMQHandler() daemon.start()
mit
robhudson/kuma
vendor/packages/nose/tools/trivial.py
97
1184
"""Tools so trivial that tracebacks should not descend into them We define the ``__unittest`` symbol in their module namespace so unittest will skip them when printing tracebacks, just as it does for their corresponding methods in ``unittest`` proper. """ import re import unittest __all__ = ['ok_', 'eq_'] # Use the same flag as unittest itself to prevent descent into these functions: __unittest = 1 def ok_(expr, msg=None): """Shorthand for assert. Saves 3 whole characters! """ if not expr: raise AssertionError(msg) def eq_(a, b, msg=None): """Shorthand for 'assert a == b, "%r != %r" % (a, b) """ if not a == b: raise AssertionError(msg or "%r != %r" % (a, b)) # # Expose assert* from unittest.TestCase # - give them pep8 style names # caps = re.compile('([A-Z])') def pep8(name): return caps.sub(lambda m: '_' + m.groups()[0].lower(), name) class Dummy(unittest.TestCase): def nop(): pass _t = Dummy('nop') for at in [ at for at in dir(_t) if at.startswith('assert') and not '_' in at ]: pepd = pep8(at) vars()[pepd] = getattr(_t, at) __all__.append(pepd) del Dummy del _t del pep8
mpl-2.0
hackatbrown/2015.hackatbrown.org
hack-at-brown-2015/requests/packages/urllib3/connection.py
89
3386
# urllib3/connection.py # Copyright 2008-2013 Andrey Petrov and contributors (see CONTRIBUTORS.txt) # # This module is part of urllib3 and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import socket from socket import timeout as SocketTimeout try: # Python 3 from http.client import HTTPConnection, HTTPException except ImportError: from httplib import HTTPConnection, HTTPException class DummyConnection(object): "Used to detect a failed ConnectionCls import." pass try: # Compiled with SSL? ssl = None HTTPSConnection = DummyConnection class BaseSSLError(BaseException): pass try: # Python 3 from http.client import HTTPSConnection except ImportError: from httplib import HTTPSConnection import ssl BaseSSLError = ssl.SSLError except (ImportError, AttributeError): # Platform-specific: No SSL. pass from .exceptions import ( ConnectTimeoutError, ) from .packages.ssl_match_hostname import match_hostname from .util import ( assert_fingerprint, resolve_cert_reqs, resolve_ssl_version, ssl_wrap_socket, ) class VerifiedHTTPSConnection(HTTPSConnection): """ Based on httplib.HTTPSConnection but wraps the socket with SSL certification. """ cert_reqs = None ca_certs = None ssl_version = None def set_cert(self, key_file=None, cert_file=None, cert_reqs=None, ca_certs=None, assert_hostname=None, assert_fingerprint=None): self.key_file = key_file self.cert_file = cert_file self.cert_reqs = cert_reqs self.ca_certs = ca_certs self.assert_hostname = assert_hostname self.assert_fingerprint = assert_fingerprint def connect(self): # Add certificate verification try: sock = socket.create_connection( address=(self.host, self.port), timeout=self.timeout, ) except SocketTimeout: raise ConnectTimeoutError( self, "Connection to %s timed out. (connect timeout=%s)" % (self.host, self.timeout)) resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs) resolved_ssl_version = resolve_ssl_version(self.ssl_version) if self._tunnel_host: self.sock = sock # Calls self._set_hostport(), so self.host is # self._tunnel_host below. self._tunnel() # Wrap socket using verification with the root certs in # trusted_root_certs self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file, cert_reqs=resolved_cert_reqs, ca_certs=self.ca_certs, server_hostname=self.host, ssl_version=resolved_ssl_version) if resolved_cert_reqs != ssl.CERT_NONE: if self.assert_fingerprint: assert_fingerprint(self.sock.getpeercert(binary_form=True), self.assert_fingerprint) elif self.assert_hostname is not False: match_hostname(self.sock.getpeercert(), self.assert_hostname or self.host) if ssl: HTTPSConnection = VerifiedHTTPSConnection
mit
quoclieu/codebrew17-starving
env/lib/python3.5/site-packages/werkzeug/script.py
270
11365
# -*- coding: utf-8 -*- r''' werkzeug.script ~~~~~~~~~~~~~~~ .. admonition:: Deprecated Functionality ``werkzeug.script`` is deprecated without replacement functionality. Python's command line support improved greatly with :mod:`argparse` and a bunch of alternative modules. Most of the time you have recurring tasks while writing an application such as starting up an interactive python interpreter with some prefilled imports, starting the development server, initializing the database or something similar. For that purpose werkzeug provides the `werkzeug.script` module which helps you writing such scripts. Basic Usage ----------- The following snippet is roughly the same in every werkzeug script:: #!/usr/bin/env python # -*- coding: utf-8 -*- from werkzeug import script # actions go here if __name__ == '__main__': script.run() Starting this script now does nothing because no actions are defined. An action is a function in the same module starting with ``"action_"`` which takes a number of arguments where every argument has a default. The type of the default value specifies the type of the argument. Arguments can then be passed by position or using ``--name=value`` from the shell. Because a runserver and shell command is pretty common there are two factory functions that create such commands:: def make_app(): from yourapplication import YourApplication return YourApplication(...) action_runserver = script.make_runserver(make_app, use_reloader=True) action_shell = script.make_shell(lambda: {'app': make_app()}) Using The Scripts ----------------- The script from above can be used like this from the shell now: .. sourcecode:: text $ ./manage.py --help $ ./manage.py runserver localhost 8080 --debugger --no-reloader $ ./manage.py runserver -p 4000 $ ./manage.py shell As you can see it's possible to pass parameters as positional arguments or as named parameters, pretty much like Python function calls. :copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details. :license: BSD, see LICENSE for more details. ''' from __future__ import print_function import sys import inspect import getopt from os.path import basename from werkzeug._compat import iteritems argument_types = { bool: 'boolean', str: 'string', int: 'integer', float: 'float' } converters = { 'boolean': lambda x: x.lower() in ('1', 'true', 'yes', 'on'), 'string': str, 'integer': int, 'float': float } def run(namespace=None, action_prefix='action_', args=None): """Run the script. Participating actions are looked up in the caller's namespace if no namespace is given, otherwise in the dict provided. Only items that start with action_prefix are processed as actions. If you want to use all items in the namespace provided as actions set action_prefix to an empty string. :param namespace: An optional dict where the functions are looked up in. By default the local namespace of the caller is used. :param action_prefix: The prefix for the functions. Everything else is ignored. :param args: the arguments for the function. If not specified :data:`sys.argv` without the first argument is used. """ if namespace is None: namespace = sys._getframe(1).f_locals actions = find_actions(namespace, action_prefix) if args is None: args = sys.argv[1:] if not args or args[0] in ('-h', '--help'): return print_usage(actions) elif args[0] not in actions: fail('Unknown action \'%s\'' % args[0]) arguments = {} types = {} key_to_arg = {} long_options = [] formatstring = '' func, doc, arg_def = actions[args.pop(0)] for idx, (arg, shortcut, default, option_type) in enumerate(arg_def): real_arg = arg.replace('-', '_') if shortcut: formatstring += shortcut if not isinstance(default, bool): formatstring += ':' key_to_arg['-' + shortcut] = real_arg long_options.append(isinstance(default, bool) and arg or arg + '=') key_to_arg['--' + arg] = real_arg key_to_arg[idx] = real_arg types[real_arg] = option_type arguments[real_arg] = default try: optlist, posargs = getopt.gnu_getopt(args, formatstring, long_options) except getopt.GetoptError as e: fail(str(e)) specified_arguments = set() for key, value in enumerate(posargs): try: arg = key_to_arg[key] except IndexError: fail('Too many parameters') specified_arguments.add(arg) try: arguments[arg] = converters[types[arg]](value) except ValueError: fail('Invalid value for argument %s (%s): %s' % (key, arg, value)) for key, value in optlist: arg = key_to_arg[key] if arg in specified_arguments: fail('Argument \'%s\' is specified twice' % arg) if types[arg] == 'boolean': if arg.startswith('no_'): value = 'no' else: value = 'yes' try: arguments[arg] = converters[types[arg]](value) except ValueError: fail('Invalid value for \'%s\': %s' % (key, value)) newargs = {} for k, v in iteritems(arguments): newargs[k.startswith('no_') and k[3:] or k] = v arguments = newargs return func(**arguments) def fail(message, code=-1): """Fail with an error.""" print('Error: %s' % message, file=sys.stderr) sys.exit(code) def find_actions(namespace, action_prefix): """Find all the actions in the namespace.""" actions = {} for key, value in iteritems(namespace): if key.startswith(action_prefix): actions[key[len(action_prefix):]] = analyse_action(value) return actions def print_usage(actions): """Print the usage information. (Help screen)""" actions = sorted(iteritems(actions)) print('usage: %s <action> [<options>]' % basename(sys.argv[0])) print(' %s --help' % basename(sys.argv[0])) print() print('actions:') for name, (func, doc, arguments) in actions: print(' %s:' % name) for line in doc.splitlines(): print(' %s' % line) if arguments: print() for arg, shortcut, default, argtype in arguments: if isinstance(default, bool): print(' %s' % ( (shortcut and '-%s, ' % shortcut or '') + '--' + arg )) else: print(' %-30s%-10s%s' % ( (shortcut and '-%s, ' % shortcut or '') + '--' + arg, argtype, default )) print() def analyse_action(func): """Analyse a function.""" description = inspect.getdoc(func) or 'undocumented action' arguments = [] args, varargs, kwargs, defaults = inspect.getargspec(func) if varargs or kwargs: raise TypeError('variable length arguments for action not allowed.') if len(args) != len(defaults or ()): raise TypeError('not all arguments have proper definitions') for idx, (arg, definition) in enumerate(zip(args, defaults or ())): if arg.startswith('_'): raise TypeError('arguments may not start with an underscore') if not isinstance(definition, tuple): shortcut = None default = definition else: shortcut, default = definition argument_type = argument_types[type(default)] if isinstance(default, bool) and default is True: arg = 'no-' + arg arguments.append((arg.replace('_', '-'), shortcut, default, argument_type)) return func, description, arguments def make_shell(init_func=None, banner=None, use_ipython=True): """Returns an action callback that spawns a new interactive python shell. :param init_func: an optional initialization function that is called before the shell is started. The return value of this function is the initial namespace. :param banner: the banner that is displayed before the shell. If not specified a generic banner is used instead. :param use_ipython: if set to `True` ipython is used if available. """ if banner is None: banner = 'Interactive Werkzeug Shell' if init_func is None: init_func = dict def action(ipython=use_ipython): """Start a new interactive python session.""" namespace = init_func() if ipython: try: try: from IPython.frontend.terminal.embed import InteractiveShellEmbed sh = InteractiveShellEmbed(banner1=banner) except ImportError: from IPython.Shell import IPShellEmbed sh = IPShellEmbed(banner=banner) except ImportError: pass else: sh(global_ns={}, local_ns=namespace) return from code import interact interact(banner, local=namespace) return action def make_runserver(app_factory, hostname='localhost', port=5000, use_reloader=False, use_debugger=False, use_evalex=True, threaded=False, processes=1, static_files=None, extra_files=None, ssl_context=None): """Returns an action callback that spawns a new development server. .. versionadded:: 0.5 `static_files` and `extra_files` was added. ..versionadded:: 0.6.1 `ssl_context` was added. :param app_factory: a function that returns a new WSGI application. :param hostname: the default hostname the server should listen on. :param port: the default port of the server. :param use_reloader: the default setting for the reloader. :param use_evalex: the default setting for the evalex flag of the debugger. :param threaded: the default threading setting. :param processes: the default number of processes to start. :param static_files: optional dict of static files. :param extra_files: optional list of extra files to track for reloading. :param ssl_context: optional SSL context for running server in HTTPS mode. """ def action(hostname=('h', hostname), port=('p', port), reloader=use_reloader, debugger=use_debugger, evalex=use_evalex, threaded=threaded, processes=processes): """Start a new development server.""" from werkzeug.serving import run_simple app = app_factory() run_simple(hostname, port, app, use_reloader=reloader, use_debugger=debugger, use_evalex=evalex, extra_files=extra_files, reloader_interval=1, threaded=threaded, processes=processes, static_files=static_files, ssl_context=ssl_context) return action
mit
54lihaoxin/leetcode_python
src/ConvertSortedListToBinarySearchTree/solution.py
1
2264
# Convert Sorted List to Binary Search Tree # # Given a singly linked list where elements are sorted in ascending order, convert it to a height balanced BST. debug = True debug = False from CommonClasses import * # hxl: comment out this line for submission # hxl: It's easy to create an array to hold to list and then build a tree from it, but it's boring. # The following solution doesn't make use of an extra array. class Solution: def __init__(self): self.remainingList = None # @param head, a list node # @return a tree node def sortedListToBST(self, head): if head == None: return None self.remainingList = head length = self.getListLength(head) return self.binaryMakeTree(length) def getListLength(self, head): count = 0 while head != None: head = head.next count += 1 return count def binaryMakeTree(self, length): if length < 1: return None if length == 1: root = TreeNode(self.remainingList.val) self.remainingList = self.remainingList.next return root elif length == 2: left = TreeNode(self.remainingList.val) self.remainingList = self.remainingList.next root = TreeNode(self.remainingList.val) self.remainingList = self.remainingList.next root.left = left return root elif length == 3: left = TreeNode(self.remainingList.val) self.remainingList = self.remainingList.next root = TreeNode(self.remainingList.val) root.left = left self.remainingList = self.remainingList.next root.right = TreeNode(self.remainingList.val) self.remainingList = self.remainingList.next return root else: midPoint = length / 2 left = self.binaryMakeTree(midPoint) root = self.binaryMakeTree(1) root.left = left root.right = self.binaryMakeTree(length - midPoint - 1) return root
apache-2.0
ctrlaltdylan/CouchPotato
library/sqlalchemy/orm/identity.py
17
8475
# identity.py # Copyright (C) the SQLAlchemy authors and contributors # # This module is part of SQLAlchemy and is released under # the MIT License: http://www.opensource.org/licenses/mit-license.php import weakref from sqlalchemy import util as base_util from sqlalchemy.orm import attributes class IdentityMap(dict): def __init__(self): self._mutable_attrs = set() self._modified = set() self._wr = weakref.ref(self) def replace(self, state): raise NotImplementedError() def add(self, state): raise NotImplementedError() def remove(self, state): raise NotImplementedError() def update(self, dict): raise NotImplementedError("IdentityMap uses add() to insert data") def clear(self): raise NotImplementedError("IdentityMap uses remove() to remove data") def _manage_incoming_state(self, state): state._instance_dict = self._wr if state.modified: self._modified.add(state) if state.manager.mutable_attributes: self._mutable_attrs.add(state) def _manage_removed_state(self, state): del state._instance_dict self._mutable_attrs.discard(state) self._modified.discard(state) def _dirty_states(self): return self._modified.union(s for s in self._mutable_attrs.copy() if s.modified) def check_modified(self): """return True if any InstanceStates present have been marked as 'modified'.""" if self._modified: return True else: for state in self._mutable_attrs.copy(): if state.modified: return True return False def has_key(self, key): return key in self def popitem(self): raise NotImplementedError("IdentityMap uses remove() to remove data") def pop(self, key, *args): raise NotImplementedError("IdentityMap uses remove() to remove data") def setdefault(self, key, default=None): raise NotImplementedError("IdentityMap uses add() to insert data") def copy(self): raise NotImplementedError() def __setitem__(self, key, value): raise NotImplementedError("IdentityMap uses add() to insert data") def __delitem__(self, key): raise NotImplementedError("IdentityMap uses remove() to remove data") class WeakInstanceDict(IdentityMap): def __init__(self): IdentityMap.__init__(self) self._remove_mutex = base_util.threading.Lock() def __getitem__(self, key): state = dict.__getitem__(self, key) o = state.obj() if o is None: o = state._is_really_none() if o is None: raise KeyError, key return o def __contains__(self, key): try: if dict.__contains__(self, key): state = dict.__getitem__(self, key) o = state.obj() if o is None: o = state._is_really_none() else: return False except KeyError: return False else: return o is not None def contains_state(self, state): return dict.get(self, state.key) is state def replace(self, state): if dict.__contains__(self, state.key): existing = dict.__getitem__(self, state.key) if existing is not state: self._manage_removed_state(existing) else: return dict.__setitem__(self, state.key, state) self._manage_incoming_state(state) def add(self, state): if state.key in self: if dict.__getitem__(self, state.key) is not state: raise AssertionError("A conflicting state is already " "present in the identity map for key %r" % (state.key, )) else: dict.__setitem__(self, state.key, state) self._manage_incoming_state(state) def remove_key(self, key): state = dict.__getitem__(self, key) self.remove(state) def remove(self, state): self._remove_mutex.acquire() try: if dict.pop(self, state.key) is not state: raise AssertionError("State %s is not present in this identity map" % state) finally: self._remove_mutex.release() self._manage_removed_state(state) def discard(self, state): if self.contains_state(state): dict.__delitem__(self, state.key) self._manage_removed_state(state) def get(self, key, default=None): state = dict.get(self, key, default) if state is default: return default o = state.obj() if o is None: o = state._is_really_none() if o is None: return default return o def items(self): # Py2K return list(self.iteritems()) def iteritems(self): # end Py2K self._remove_mutex.acquire() try: result = [] for state in dict.values(self): value = state.obj() if value is not None: result.append((state.key, value)) return iter(result) finally: self._remove_mutex.release() def values(self): # Py2K return list(self.itervalues()) def itervalues(self): # end Py2K self._remove_mutex.acquire() try: result = [] for state in dict.values(self): value = state.obj() if value is not None: result.append(value) return iter(result) finally: self._remove_mutex.release() def all_states(self): self._remove_mutex.acquire() try: # Py3K # return list(dict.values(self)) # Py2K return dict.values(self) # end Py2K finally: self._remove_mutex.release() def prune(self): return 0 class StrongInstanceDict(IdentityMap): def all_states(self): return [attributes.instance_state(o) for o in self.itervalues()] def contains_state(self, state): return state.key in self and attributes.instance_state(self[state.key]) is state def replace(self, state): if dict.__contains__(self, state.key): existing = dict.__getitem__(self, state.key) existing = attributes.instance_state(existing) if existing is not state: self._manage_removed_state(existing) else: return dict.__setitem__(self, state.key, state.obj()) self._manage_incoming_state(state) def add(self, state): if state.key in self: if attributes.instance_state(dict.__getitem__(self, state.key)) is not state: raise AssertionError("A conflicting state is already present in the identity map for key %r" % (state.key, )) else: dict.__setitem__(self, state.key, state.obj()) self._manage_incoming_state(state) def remove(self, state): if attributes.instance_state(dict.pop(self, state.key)) is not state: raise AssertionError("State %s is not present in this identity map" % state) self._manage_removed_state(state) def discard(self, state): if self.contains_state(state): dict.__delitem__(self, state.key) self._manage_removed_state(state) def remove_key(self, key): state = attributes.instance_state(dict.__getitem__(self, key)) self.remove(state) def prune(self): """prune unreferenced, non-dirty states.""" ref_count = len(self) dirty = [s.obj() for s in self.all_states() if s.modified] # work around http://bugs.python.org/issue6149 keepers = weakref.WeakValueDictionary() keepers.update(self) dict.clear(self) dict.update(self, keepers) self.modified = bool(dirty) return ref_count - len(self)
gpl-3.0
franciscocpg/pop
Import/libtorrent/tools/parse_disk_access.py
57
2523
#! /usr/bin/env python import os, sys, time lines = open(sys.argv[1], 'rb').readlines() # logfile format: # <time(us)> <key>: <value> # example: # 16434 read cache: 17 keys = ['read', 'write', 'head movement', 'seek per read byte', 'seek per written byte', 'read operations per second', 'write operations per second'] colors = ['305030', '503030', '3030f0', '10a010', 'a01010', 'd0d040', 'd040d0'] style = ['dots', 'points', 'lines', 'lines', 'lines', 'lines', 'lines'] axis = ['x1y1', 'x1y1', 'x1y2', 'x1y2', 'x1y2', 'x1y2', 'x1y2'] plot = [True, False, False, False, False, True, False] out = open('disk_access_log.dat', 'w+') time = 1000000 last_pos = 0 last_t = 0 cur_movement = 0 cur_read = 0 cur_write = 0 cur_read_ops = 0 cur_write_ops = 0 for l in lines: try: # strip newline l = l[0:-1].split(' ') t = int(l[0]) k = l[1] n = int(l[2]) except: print l continue read = '-' write = '-' movement = '-' amount_read = '-' amount_write = '-' read_ops = '-' write_ops = '-' if k == 'read': read = '%d' % n cur_read_ops += 1 if k == 'write': write = '%d' % n cur_write_ops += 1 if k == 'read_end': cur_read += n - last_pos if k == 'write_end': cur_write += n - last_pos cur_movement += abs(last_pos - n) last_pos = n if last_t + time <= t: movement = '%d' % cur_movement if cur_read > 0: amount_read = '%d' % (cur_movement / cur_read) if cur_write > 0: amount_write = '%d' % (cur_movement / cur_write) read_ops = '%d' % cur_read_ops write_ops = '%d' % cur_write_ops cur_movement = 0 cur_read = 0 cur_write = 0 last_t = t cur_read_ops = 0 cur_write_ops = 0 print >>out, '%d\t%s\t%s\t%s\t%s\t%s\t%s\t%s' % (t, read, write, movement, amount_read, amount_write, read_ops, write_ops) out.close() out = open('disk_access.gnuplot', 'wb') print >>out, "set term png size 1200,700" print >>out, 'set output "disk_access.png"' print >>out, 'set xrange [*:*]' #print >>out, 'set y2range [0:*]' print >>out, 'set xlabel "time (us)"' print >>out, 'set ylabel "drive offset"' #print >>out, 'set y2label "bytes / %d second(s)"' % (time / 1000) print >>out, "set key box" print >>out, "set tics nomirror" print >>out, "set y2tics auto" print >>out, 'plot', count = 1 for k in keys: count += 1 if not plot[count-2]: continue print >>out, ' "disk_access_log.dat" using 1:%d title "%s" with %s lt rgb "#%s" axis %s,' \ % (count, k, style[count-2], colors[count-2], axis[count-2]), print >>out, 'x=0' out.close() os.system('gnuplot disk_access.gnuplot')
gpl-3.0
VigTech/Vigtech-Services
env/lib/python2.7/site-packages/django/contrib/flatpages/migrations/0001_initial.py
143
1582
# -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): dependencies = [ ('sites', '0001_initial'), ] operations = [ migrations.CreateModel( name='FlatPage', fields=[ ('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)), ('url', models.CharField(max_length=100, verbose_name='URL', db_index=True)), ('title', models.CharField(max_length=200, verbose_name='title')), ('content', models.TextField(verbose_name='content', blank=True)), ('enable_comments', models.BooleanField(default=False, verbose_name='enable comments')), ('template_name', models.CharField(help_text="Example: 'flatpages/contact_page.html'. If this isn't provided, the system will use 'flatpages/default.html'.", max_length=70, verbose_name='template name', blank=True)), ('registration_required', models.BooleanField(default=False, help_text='If this is checked, only logged-in users will be able to view the page.', verbose_name='registration required')), ('sites', models.ManyToManyField(to='sites.Site')), ], options={ 'ordering': ('url',), 'db_table': 'django_flatpage', 'verbose_name': 'flat page', 'verbose_name_plural': 'flat pages', }, bases=(models.Model,), ), ]
lgpl-3.0
Oliver2213/NVDAYoutube-dl
addon/globalPlugins/nvdaYoutubeDL/youtube_dl/extractor/telemb.py
177
2964
# coding: utf-8 from __future__ import unicode_literals import re from .common import InfoExtractor from ..utils import remove_start class TeleMBIE(InfoExtractor): _VALID_URL = r'https?://(?:www\.)?telemb\.be/(?P<display_id>.+?)_d_(?P<id>\d+)\.html' _TESTS = [ { 'url': 'http://www.telemb.be/mons-cook-with-danielle-des-cours-de-cuisine-en-anglais-_d_13466.html', 'md5': 'f45ea69878516ba039835794e0f8f783', 'info_dict': { 'id': '13466', 'display_id': 'mons-cook-with-danielle-des-cours-de-cuisine-en-anglais-', 'ext': 'mp4', 'title': 'Mons - Cook with Danielle : des cours de cuisine en anglais ! - Les reportages', 'description': 'md5:bc5225f47b17c309761c856ad4776265', 'thumbnail': 're:^http://.*\.(?:jpg|png)$', } }, { # non-ASCII characters in download URL 'url': 'http://telemb.be/les-reportages-havre-incendie-mortel_d_13514.html', 'md5': '6e9682736e5ccd4eab7f21e855350733', 'info_dict': { 'id': '13514', 'display_id': 'les-reportages-havre-incendie-mortel', 'ext': 'mp4', 'title': 'Havré - Incendie mortel - Les reportages', 'description': 'md5:5e54cb449acb029c2b7734e2d946bd4a', 'thumbnail': 're:^http://.*\.(?:jpg|png)$', } }, ] def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) video_id = mobj.group('id') display_id = mobj.group('display_id') webpage = self._download_webpage(url, display_id) formats = [] for video_url in re.findall(r'file\s*:\s*"([^"]+)"', webpage): fmt = { 'url': video_url, 'format_id': video_url.split(':')[0] } rtmp = re.search(r'^(?P<url>rtmp://[^/]+/(?P<app>.+))/(?P<playpath>mp4:.+)$', video_url) if rtmp: fmt.update({ 'play_path': rtmp.group('playpath'), 'app': rtmp.group('app'), 'player_url': 'http://p.jwpcdn.com/6/10/jwplayer.flash.swf', 'page_url': 'http://www.telemb.be', 'preference': -1, }) formats.append(fmt) self._sort_formats(formats) title = remove_start(self._og_search_title(webpage), 'TéléMB : ') description = self._html_search_regex( r'<meta property="og:description" content="(.+?)" />', webpage, 'description', fatal=False) thumbnail = self._og_search_thumbnail(webpage) return { 'id': video_id, 'display_id': display_id, 'title': title, 'description': description, 'thumbnail': thumbnail, 'formats': formats, }
gpl-2.0
darkelement/Sunflower
application/plugins/file_list/gio_wrapper.py
2
1433
import gio from plugin_base.provider import Mode class File: """This is a wrapper class that provides file-like object but uses gio.File for actual operations.""" def __init__(self, path, mode): if mode == Mode.READ: self._resource = gio.File(path).read() elif mode == Mode.WRITE: if gio.File(path).query_exists(): gio.File(path).delete() self._resource = gio.File(path).create() elif mode == Mode.APPEND: self._resource = gio.File(path).append_to() def close(self): """Close file""" self._resource.close() def closed(self): """If file is closed""" self._resource.is_closed() def flush(self): """Flush internal buffer""" if hasattr(self._resource, 'flush'): self._resource.flush() def read(self, size=-1): """Read at most _size_ bytes from the file""" result = self._resource.read(size) if result is True: result = "" return result def seek(self, offset, whence=0): """Set the file's current position""" relative = (1, 0, 2)[whence] if self._resource.can_seek(): self._resource.seek(offset, relative) def tell(self): """Return file's current position""" return self._resource.tell() def truncate(self, size=None): """Truncate the file's size""" if size is None: size = self.tell() if self._resource.can_truncate(): self._resource.truncate(size) def write(self, buff): """Write string to the file""" self._resource.write(buff)
gpl-3.0
taigaio/taiga-back
taiga/hooks/bitbucket/api.py
1
2578
# -*- coding: utf-8 -*- # Copyright (C) 2014-present Taiga Agile LLC # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. from django.utils.translation import ugettext_lazy as _ from django.conf import settings from taiga.base import exceptions as exc from taiga.projects.models import Project from taiga.hooks.api import BaseWebhookApiViewSet from . import event_hooks from netaddr import all_matching_cidrs from netaddr.core import AddrFormatError from urllib.parse import parse_qs from ipware.ip import get_ip class BitBucketViewSet(BaseWebhookApiViewSet): event_hook_classes = { "repo:push": event_hooks.PushEventHook, "issue:created": event_hooks.IssuesEventHook, "issue:comment_created": event_hooks.IssueCommentEventHook, } def _validate_signature(self, project, request): secret_key = request.GET.get("key", None) if secret_key is None: return False if not hasattr(project, "modules_config"): return False if project.modules_config.config is None: return False project_secret = project.modules_config.config.get("bitbucket", {}).get("secret", "") if not project_secret: return False bitbucket_config = project.modules_config.config.get("bitbucket", {}) valid_origin_ips = bitbucket_config.get("valid_origin_ips", settings.BITBUCKET_VALID_ORIGIN_IPS) origin_ip = get_ip(request) matching_origin_ip = True if valid_origin_ips: try: matching_origin_ip = len(all_matching_cidrs(origin_ip,valid_origin_ips)) > 0 except(AddrFormatError, ValueError): matching_origin_ip = False if not matching_origin_ip: return False return project_secret == secret_key def _get_event_name(self, request): return request.META.get('HTTP_X_EVENT_KEY', None)
agpl-3.0
theefer/xmms2
waftools/cython.py
8
3571
#! /usr/bin/env python # encoding: utf-8 # Thomas Nagy, 2010 import re import waflib import waflib.Logs as _msg from waflib import Task from waflib.TaskGen import extension, feature, before_method, after_method cy_api_pat = re.compile(r'\s*?cdef\s*?(public|api)\w*') re_cyt = re.compile('import\\s(\\w+)\\s*$', re.M) @extension('.pyx') def add_cython_file(self, node): """ Process a *.pyx* file given in the list of source files. No additional feature is required:: def build(bld): bld(features='c cshlib pyext', source='main.c foo.pyx', target='app') """ ext = '.c' if 'cxx' in self.features: self.env.append_unique('CYTHONFLAGS', '--cplus') ext = '.cc' for inc in getattr(self, 'cython_includes', []): d = self.path.find_dir(inc) self.env.append_unique('CYTHONFLAGS', '-I'+d.get_src().abspath()) self.env.append_unique('CYTHONFLAGS', '-I'+d.get_bld().abspath()) tsk = self.create_task('cython', node, node.change_ext(ext).get_bld()) self.source += tsk.outputs class cython(Task.Task): run_str = '${CYTHON} ${CYTHONFLAGS} -o ${TGT[0].abspath()} ${SRC}' color = 'BLUE' vars = ['INCLUDES'] """ Rebuild whenever the INCLUDES change. The variables such as CYTHONFLAGS will be appended by the metaclass. """ ext_out = ['.h'] """ The creation of a .h file is known only after the build has begun, so it is not possible to compute a build order just by looking at the task inputs/outputs. """ def runnable_status(self): """ Perform a double-check to add the headers created by cython to the output nodes. The scanner is executed only when the cython task must be executed (optimization). """ ret = super(cython, self).runnable_status() if ret == Task.ASK_LATER: return ret for x in self.generator.bld.raw_deps[self.uid()]: if x.startswith('header:'): self.outputs.append(self.inputs[0].parent.find_or_declare(x.replace('header:', ''))) return super(cython, self).runnable_status() def scan(self): """ Return the dependent files (.pxd) by looking in the include folders. Put the headers to generate in the custom list "bld.raw_deps". To inspect the scanne results use:: $ waf clean build --zones=deps """ txt = self.inputs[0].read() mods = [] for m in re_cyt.finditer(txt): mods.append(m.group(1)) _msg.debug("cython: mods %r" % mods) incs = getattr(self.generator, 'cython_includes', []) incs = [self.generator.path.find_dir(x) for x in incs] incs.append(self.inputs[0].parent) found = [] missing = [] for x in mods: for y in incs: k = y.find_resource(x + '.pxd') if k: found.append(k) break else: missing.append(x) _msg.debug("cython: found %r" % found) # Now the .h created - store them in bld.raw_deps for later use has_api = False has_public = False for l in txt.splitlines(): if cy_api_pat.match(l): if ' api ' in l: has_api = True if ' public ' in l: has_public = True name = self.inputs[0].name.replace('.pyx', '') if has_api: missing.append('header:%s_api.h' % name) if has_public: missing.append('header:%s.h' % name) return (found, missing) def options(ctx): ctx.add_option('--cython-flags', action='store', default='', help='space separated list of flags to pass to cython') def configure(ctx): if not ctx.env.CC and not ctx.env.CXX: ctx.fatal('Load a C/C++ compiler first') if not ctx.env.PYTHON: ctx.fatal('Load the python tool first!') ctx.find_program('cython', var='CYTHON') if ctx.options.cython_flags: ctx.env.CYTHONFLAGS = ctx.options.cython_flags
lgpl-2.1
shivam1111/odoo
addons/base_action_rule/__init__.py
438
1098
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## import base_action_rule import test_models # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
nrc/servo
python/servo/testing_commands.py
3
31073
# Copyright 2013 The Servo Project Developers. See the COPYRIGHT # file at the top-level directory of this distribution. # # Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or # http://www.apache.org/licenses/LICENSE-2.0> or the MIT license # <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your # option. This file may not be copied, modified, or distributed # except according to those terms. from __future__ import print_function, unicode_literals import argparse import re import sys import os import os.path as path import copy from collections import OrderedDict from time import time from mach.registrar import Registrar from mach.decorators import ( CommandArgument, CommandProvider, Command, ) from servo.command_base import CommandBase, call, check_call from wptrunner import wptcommandline from update import updatecommandline from servo_tidy import tidy from servo_tidy_tests import test_tidy SCRIPT_PATH = os.path.split(__file__)[0] PROJECT_TOPLEVEL_PATH = os.path.abspath(os.path.join(SCRIPT_PATH, "..", "..")) WEB_PLATFORM_TESTS_PATH = os.path.join("tests", "wpt", "web-platform-tests") SERVO_TESTS_PATH = os.path.join("tests", "wpt", "mozilla", "tests") TEST_SUITES = OrderedDict([ ("tidy", {"kwargs": {"all_files": False, "no_progress": False, "self_test": False}, "include_arg": "include"}), ("wpt", {"kwargs": {"release": False}, "paths": [path.abspath(WEB_PLATFORM_TESTS_PATH), path.abspath(SERVO_TESTS_PATH)], "include_arg": "include"}), ("css", {"kwargs": {"release": False}, "paths": [path.abspath(path.join("tests", "wpt", "css-tests"))], "include_arg": "include"}), ("unit", {"kwargs": {}, "paths": [path.abspath(path.join("tests", "unit"))], "include_arg": "test_name"}), ("compiletest", {"kwargs": {"release": False}, "paths": [path.abspath(path.join("tests", "compiletest"))], "include_arg": "test_name"}) ]) TEST_SUITES_BY_PREFIX = {path: k for k, v in TEST_SUITES.iteritems() if "paths" in v for path in v["paths"]} def create_parser_wpt(): parser = wptcommandline.create_parser() parser.add_argument('--release', default=False, action="store_true", help="Run with a release build of servo") parser.add_argument('--chaos', default=False, action="store_true", help="Run under chaos mode in rr until a failure is captured") parser.add_argument('--pref', default=[], action="append", dest="prefs", help="Pass preferences to servo") return parser @CommandProvider class MachCommands(CommandBase): DEFAULT_RENDER_MODE = "cpu" HELP_RENDER_MODE = "Value can be 'cpu', 'gpu' or 'both' (default " + DEFAULT_RENDER_MODE + ")" def __init__(self, context): CommandBase.__init__(self, context) if not hasattr(self.context, "built_tests"): self.context.built_tests = False @Command('test', description='Run specified Servo tests', category='testing') @CommandArgument('params', default=None, nargs="...", help="Optionally select test based on " "test file directory") @CommandArgument('--render-mode', '-rm', default=DEFAULT_RENDER_MODE, help="The render mode to be used on all tests. " + HELP_RENDER_MODE) @CommandArgument('--release', default=False, action="store_true", help="Run with a release build of servo") @CommandArgument('--tidy-all', default=False, action="store_true", help="Check all files, and run the WPT lint in tidy, " "even if unchanged") @CommandArgument('--no-progress', default=False, action="store_true", help="Don't show progress for tidy") @CommandArgument('--self-test', default=False, action="store_true", help="Run unit tests for tidy") @CommandArgument('--all', default=False, action="store_true", dest="all_suites", help="Run all test suites") def test(self, params, render_mode=DEFAULT_RENDER_MODE, release=False, tidy_all=False, no_progress=False, self_test=False, all_suites=False): suites = copy.deepcopy(TEST_SUITES) suites["tidy"]["kwargs"] = {"all_files": tidy_all, "no_progress": no_progress, "self_test": self_test} suites["wpt"]["kwargs"] = {"release": release} suites["css"]["kwargs"] = {"release": release} suites["unit"]["kwargs"] = {} suites["compiletest"]["kwargs"] = {"release": release} selected_suites = OrderedDict() if params is None: if all_suites: params = suites.keys() else: print("Specify a test path or suite name, or pass --all to run all test suites.\n\nAvailable suites:") for s in suites: print(" %s" % s) return 1 for arg in params: found = False if arg in suites and arg not in selected_suites: selected_suites[arg] = [] found = True else: suite = self.suite_for_path(arg) if suite is not None: if suite not in selected_suites: selected_suites[suite] = [] selected_suites[suite].append(arg) found = True break if not found: print("%s is not a valid test path or suite name" % arg) return 1 test_start = time() for suite, tests in selected_suites.iteritems(): props = suites[suite] kwargs = props.get("kwargs", {}) if tests: kwargs[props["include_arg"]] = tests Registrar.dispatch("test-%s" % suite, context=self.context, **kwargs) elapsed = time() - test_start print("Tests completed in %0.2fs" % elapsed) # Helper to determine which test suite owns the path def suite_for_path(self, path_arg): if os.path.exists(path.abspath(path_arg)): abs_path = path.abspath(path_arg) for prefix, suite in TEST_SUITES_BY_PREFIX.iteritems(): if abs_path.startswith(prefix): return suite return None @Command('test-unit', description='Run unit tests', category='testing') @CommandArgument('--package', '-p', default=None, help="Specific package to test") @CommandArgument('test_name', nargs=argparse.REMAINDER, help="Only run tests that match this pattern or file path") def test_unit(self, test_name=None, package=None): if test_name is None: test_name = [] self.ensure_bootstrapped() if package: packages = {package} else: packages = set() test_patterns = [] for test in test_name: # add package if 'tests/unit/<package>' match = re.search("tests/unit/(\\w+)/?$", test) if match: packages.add(match.group(1)) # add package & test if '<package>/<test>', 'tests/unit/<package>/<test>.rs', or similar elif re.search("\\w/\\w", test): tokens = test.split("/") packages.add(tokens[-2]) test_prefix = tokens[-1] if test_prefix.endswith(".rs"): test_prefix = test_prefix[:-3] test_prefix += "::" test_patterns.append(test_prefix) # add test as-is otherwise else: test_patterns.append(test) if not packages: packages = set(os.listdir(path.join(self.context.topdir, "tests", "unit"))) args = ["cargo", "test"] for crate in packages: args += ["-p", "%s_tests" % crate] args += test_patterns features = self.servo_features() if features: args += ["--features", "%s" % ' '.join(features)] env = self.build_env() env["RUST_BACKTRACE"] = "1" result = call(args, env=env, cwd=self.servo_crate()) if result != 0: return result @Command('test-compiletest', description='Run compiletests', category='testing') @CommandArgument('--package', '-p', default=None, help="Specific package to test") @CommandArgument('test_name', nargs=argparse.REMAINDER, help="Only run tests that match this pattern or file path") @CommandArgument('--release', default=False, action="store_true", help="Run with a release build of servo") def test_compiletest(self, test_name=None, package=None, release=False): if test_name is None: test_name = [] self.ensure_bootstrapped() if package: packages = {package} else: packages = set() test_patterns = [] for test in test_name: # add package if 'tests/compiletest/<package>' match = re.search("tests/compiletest/(\\w+)/?$", test) if match: packages.add(match.group(1)) # add package & test if '<package>/<test>', 'tests/compiletest/<package>/<test>.rs', or similar elif re.search("\\w/\\w", test): tokens = test.split("/") packages.add(tokens[-2]) test_prefix = tokens[-1] if test_prefix.endswith(".rs"): test_prefix = test_prefix[:-3] test_prefix += "::" test_patterns.append(test_prefix) # add test as-is otherwise else: test_patterns.append(test) if not packages: packages = set(os.listdir(path.join(self.context.topdir, "tests", "compiletest"))) packages.remove("helper") args = ["cargo", "test"] for crate in packages: args += ["-p", "%s_compiletest" % crate] args += test_patterns env = self.build_env() if release: env["BUILD_MODE"] = "release" args += ["--release"] else: env["BUILD_MODE"] = "debug" result = call(args, env=env, cwd=self.servo_crate()) if result != 0: return result @Command('test-content', description='Run the content tests', category='testing') def test_content(self): print("Content tests have been replaced by web-platform-tests under " "tests/wpt/mozilla/.") return 0 @Command('test-tidy', description='Run the source code tidiness check', category='testing') @CommandArgument('--all', default=False, action="store_true", dest="all_files", help="Check all files, and run the WPT lint in tidy, " "even if unchanged") @CommandArgument('--no-progress', default=False, action="store_true", help="Don't show progress for tidy") @CommandArgument('--self-test', default=False, action="store_true", help="Run unit tests for tidy") def test_tidy(self, all_files, no_progress, self_test): if self_test: return test_tidy.do_tests() else: return tidy.scan(not all_files, not no_progress) @Command('test-webidl', description='Run the WebIDL parser tests', category='testing') @CommandArgument('--quiet', '-q', default=False, action="store_true", help="Don't print passing tests.") @CommandArgument('tests', default=None, nargs="...", help="Specific tests to run, relative to the tests directory") def test_webidl(self, quiet, tests): self.ensure_bootstrapped() test_file_dir = path.abspath(path.join(PROJECT_TOPLEVEL_PATH, "components", "script", "dom", "bindings", "codegen", "parser")) # For the `import WebIDL` in runtests.py sys.path.insert(0, test_file_dir) run_file = path.abspath(path.join(test_file_dir, "runtests.py")) run_globals = {"__file__": run_file} execfile(run_file, run_globals) verbose = not quiet return run_globals["run_tests"](tests, verbose) @Command('test-wpt-failure', description='Run the web platform tests', category='testing') def test_wpt_failure(self): self.ensure_bootstrapped() return not call([ "bash", path.join("tests", "wpt", "run.sh"), "--no-pause-after-test", "--include", "infrastructure/failing-test.html" ], env=self.build_env()) @Command('test-wpt', description='Run the web platform tests', category='testing', parser=create_parser_wpt) def test_wpt(self, **kwargs): self.ensure_bootstrapped() return self.run_test_list_or_dispatch(kwargs["test_list"], "wpt", self._test_wpt, **kwargs) def _test_wpt(self, **kwargs): hosts_file_path = path.join(self.context.topdir, 'tests', 'wpt', 'hosts') os.environ["hosts_file_path"] = hosts_file_path run_file = path.abspath(path.join(self.context.topdir, "tests", "wpt", "run_wpt.py")) return self.wptrunner(run_file, **kwargs) # Helper to ensure all specified paths are handled, otherwise dispatch to appropriate test suite. def run_test_list_or_dispatch(self, requested_paths, correct_suite, correct_function, **kwargs): if not requested_paths: return correct_function(**kwargs) else: # Paths specified on command line. Ensure they can be handled, re-dispatch otherwise. all_handled = True for test_path in requested_paths: suite = self.suite_for_path(test_path) if suite is not None and correct_suite != suite: all_handled = False print("Warning: %s is not a %s test. Delegating to test-%s." % (test_path, correct_suite, suite)) if all_handled: return correct_function(**kwargs) else: # Dispatch each test to the correct suite via test() Registrar.dispatch("test", context=self.context, params=requested_paths) # Helper for test_css and test_wpt: def wptrunner(self, run_file, **kwargs): os.environ["RUST_BACKTRACE"] = "1" kwargs["debug"] = not kwargs["release"] if kwargs.pop("chaos"): kwargs["debugger"] = "rr" kwargs["debugger_args"] = "record --chaos" kwargs["repeat_until_unexpected"] = True # TODO: Delete rr traces from green test runs? prefs = kwargs.pop("prefs") if prefs: binary_args = [] for pref in prefs: binary_args.append("--pref=" + pref) kwargs["binary_args"] = binary_args run_globals = {"__file__": run_file} execfile(run_file, run_globals) return run_globals["run_tests"](**kwargs) @Command('update-manifest', description='run test-wpt --manifest-update SKIP_TESTS to regenerate MANIFEST.json', category='testing', parser=create_parser_wpt) def update_manifest(self, **kwargs): kwargs['test_list'].append(str('SKIP_TESTS')) kwargs['manifest_update'] = True return self.test_wpt(**kwargs) @Command('update-wpt', description='Update the web platform tests', category='testing', parser=updatecommandline.create_parser()) @CommandArgument('--patch', action='store_true', default=False, help='Create an mq patch or git commit containing the changes') def update_wpt(self, patch, **kwargs): self.ensure_bootstrapped() run_file = path.abspath(path.join("tests", "wpt", "update.py")) kwargs["no_patch"] = not patch if kwargs["no_patch"] and kwargs["sync"]: print("Are you sure you don't want a patch?") return 1 run_globals = {"__file__": run_file} execfile(run_file, run_globals) return run_globals["update_tests"](**kwargs) @Command('test-jquery', description='Run the jQuery test suite', category='testing') @CommandArgument('--release', '-r', action='store_true', help='Run the release build') @CommandArgument('--dev', '-d', action='store_true', help='Run the dev build') def test_jquery(self, release, dev): return self.jquery_test_runner("test", release, dev) @Command('test-dromaeo', description='Run the Dromaeo test suite', category='testing') @CommandArgument('tests', default=["recommended"], nargs="...", help="Specific tests to run") @CommandArgument('--release', '-r', action='store_true', help='Run the release build') @CommandArgument('--dev', '-d', action='store_true', help='Run the dev build') def test_dromaeo(self, tests, release, dev): return self.dromaeo_test_runner(tests, release, dev) @Command('update-jquery', description='Update the jQuery test suite expected results', category='testing') @CommandArgument('--release', '-r', action='store_true', help='Run the release build') @CommandArgument('--dev', '-d', action='store_true', help='Run the dev build') def update_jquery(self, release, dev): return self.jquery_test_runner("update", release, dev) @Command('test-css', description='Run the web platform tests', category='testing', parser=create_parser_wpt) def test_css(self, **kwargs): self.ensure_bootstrapped() return self.run_test_list_or_dispatch(kwargs["test_list"], "css", self._test_css, **kwargs) def _test_css(self, **kwargs): run_file = path.abspath(path.join("tests", "wpt", "run_css.py")) return self.wptrunner(run_file, **kwargs) @Command('update-css', description='Update the web platform tests', category='testing', parser=updatecommandline.create_parser()) @CommandArgument('--patch', action='store_true', default=False, help='Create an mq patch or git commit containing the changes') def update_css(self, patch, **kwargs): self.ensure_bootstrapped() run_file = path.abspath(path.join("tests", "wpt", "update_css.py")) kwargs["no_patch"] = not patch if kwargs["no_patch"] and kwargs["sync"]: print("Are you sure you don't want a patch?") return 1 run_globals = {"__file__": run_file} execfile(run_file, run_globals) return run_globals["update_tests"](**kwargs) @Command('compare_dromaeo', description='compare outputs of two runs of ./mach test-dromaeo command', category='testing') @CommandArgument('params', default=None, nargs="...", help=" filepaths of output files of two runs of dromaeo test ") def compare_dromaeo(self, params): prev_op_filename = params[0] cur_op_filename = params[1] result = {'Test': [], 'Prev_Time': [], 'Cur_Time': [], 'Difference(%)': []} with open(prev_op_filename, 'r') as prev_op, open(cur_op_filename, 'r') as cur_op: l1 = prev_op.readline() l2 = cur_op.readline() while ((l1.find('[dromaeo] Saving...') and l2.find('[dromaeo] Saving...'))): l1 = prev_op.readline() l2 = cur_op.readline() reach = 3 while (reach > 0): l1 = prev_op.readline() l2 = cur_op.readline() reach -= 1 while True: l1 = prev_op.readline() l2 = cur_op.readline() if not l1: break result['Test'].append(str(l1).split('|')[0].strip()) result['Prev_Time'].append(float(str(l1).split('|')[1].strip())) result['Cur_Time'].append(float(str(l2).split('|')[1].strip())) a = float(str(l1).split('|')[1].strip()) b = float(str(l2).split('|')[1].strip()) result['Difference(%)'].append(((b - a) / a) * 100) width_col1 = max([len(x) for x in result['Test']]) width_col2 = max([len(str(x)) for x in result['Prev_Time']]) width_col3 = max([len(str(x)) for x in result['Cur_Time']]) width_col4 = max([len(str(x)) for x in result['Difference(%)']]) for p, q, r, s in zip(['Test'], ['First Run'], ['Second Run'], ['Difference(%)']): print ("\033[1m" + "{}|{}|{}|{}".format(p.ljust(width_col1), q.ljust(width_col2), r.ljust(width_col3), s.ljust(width_col4)) + "\033[0m" + "\n" + "--------------------------------------------------" + "-------------------------------------------------------------------------") for a1, b1, c1, d1 in zip(result['Test'], result['Prev_Time'], result['Cur_Time'], result['Difference(%)']): if d1 > 0: print ("\033[91m" + "{}|{}|{}|{}".format(a1.ljust(width_col1), str(b1).ljust(width_col2), str(c1).ljust(width_col3), str(d1).ljust(width_col4)) + "\033[0m") elif d1 < 0: print ("\033[92m" + "{}|{}|{}|{}".format(a1.ljust(width_col1), str(b1).ljust(width_col2), str(c1).ljust(width_col3), str(d1).ljust(width_col4)) + "\033[0m") else: print ("{}|{}|{}|{}".format(a1.ljust(width_col1), str(b1).ljust(width_col2), str(c1).ljust(width_col3), str(d1).ljust(width_col4))) def jquery_test_runner(self, cmd, release, dev): self.ensure_bootstrapped() base_dir = path.abspath(path.join("tests", "jquery")) jquery_dir = path.join(base_dir, "jquery") run_file = path.join(base_dir, "run_jquery.py") # Clone the jQuery repository if it doesn't exist if not os.path.isdir(jquery_dir): check_call( ["git", "clone", "-b", "servo", "--depth", "1", "https://github.com/servo/jquery", jquery_dir]) # Run pull in case the jQuery repo was updated since last test run check_call( ["git", "-C", jquery_dir, "pull"]) # Check that a release servo build exists bin_path = path.abspath(self.get_binary_path(release, dev)) return call([run_file, cmd, bin_path, base_dir]) def dromaeo_test_runner(self, tests, release, dev): self.ensure_bootstrapped() base_dir = path.abspath(path.join("tests", "dromaeo")) dromaeo_dir = path.join(base_dir, "dromaeo") run_file = path.join(base_dir, "run_dromaeo.py") # Clone the Dromaeo repository if it doesn't exist if not os.path.isdir(dromaeo_dir): check_call( ["git", "clone", "-b", "servo", "--depth", "1", "https://github.com/notriddle/dromaeo", dromaeo_dir]) # Run pull in case the Dromaeo repo was updated since last test run check_call( ["git", "-C", dromaeo_dir, "pull"]) # Compile test suite check_call( ["make", "-C", dromaeo_dir, "web"]) # Check that a release servo build exists bin_path = path.abspath(self.get_binary_path(release, dev)) return check_call( [run_file, "|".join(tests), bin_path, base_dir]) def create_parser_create(): import argparse p = argparse.ArgumentParser() p.add_argument("--no-editor", action="store_true", help="Don't try to open the test in an editor") p.add_argument("-e", "--editor", action="store", help="Editor to use") p.add_argument("--no-run", action="store_true", help="Don't try to update the wpt manifest or open the test in a browser") p.add_argument('--release', action="store_true", help="Run with a release build of servo") p.add_argument("--long-timeout", action="store_true", help="Test should be given a long timeout (typically 60s rather than 10s," "but varies depending on environment)") p.add_argument("--overwrite", action="store_true", help="Allow overwriting an existing test file") p.add_argument("-r", "--reftest", action="store_true", help="Create a reftest rather than a testharness (js) test"), p.add_argument("-ref", "--reference", dest="ref", help="Path to the reference file") p.add_argument("--mismatch", action="store_true", help="Create a mismatch reftest") p.add_argument("--wait", action="store_true", help="Create a reftest that waits until takeScreenshot() is called") p.add_argument("path", action="store", help="Path to the test file") return p @CommandProvider class WebPlatformTestsCreator(CommandBase): template_prefix = """<!doctype html> %(documentElement)s<meta charset="utf-8"> """ template_long_timeout = "<meta name=timeout content=long>\n" template_body_th = """<title></title> <script src="/resources/testharness.js"></script> <script src="/resources/testharnessreport.js"></script> <script> </script> """ template_body_reftest = """<title></title> <link rel="%(match)s" href="%(ref)s"> """ template_body_reftest_wait = """<script src="/common/reftest-wait.js"></script> """ def make_test_file_url(self, absolute_file_path): # Make the path relative to the project top-level directory so that # we can more easily find the right test directory. file_path = os.path.relpath(absolute_file_path, PROJECT_TOPLEVEL_PATH) if file_path.startswith(WEB_PLATFORM_TESTS_PATH): url = file_path[len(WEB_PLATFORM_TESTS_PATH):] elif file_path.startswith(SERVO_TESTS_PATH): url = "/mozilla" + file_path[len(SERVO_TESTS_PATH):] else: # This test file isn't in any known test directory. return None return url.replace(os.path.sep, "/") def make_test_and_reference_urls(self, test_path, reference_path): test_path = os.path.normpath(os.path.abspath(test_path)) test_url = self.make_test_file_url(test_path) if test_url is None: return (None, None) if reference_path is None: return (test_url, '') reference_path = os.path.normpath(os.path.abspath(reference_path)) # If the reference is in the same directory, the URL can just be the # name of the refernce file itself. reference_path_parts = os.path.split(reference_path) if reference_path_parts[0] == os.path.split(test_path)[0]: return (test_url, reference_path_parts[1]) return (test_url, self.make_test_file_url(reference_path)) @Command("create-wpt", category="testing", parser=create_parser_create) def run_create(self, **kwargs): import subprocess test_path = kwargs["path"] reference_path = kwargs["ref"] if reference_path: kwargs["reftest"] = True (test_url, reference_url) = self.make_test_and_reference_urls( test_path, reference_path) if test_url is None: print("""Test path %s is not in wpt directories: tests/wpt/web-platform-tests for tests that may be shared tests/wpt/mozilla/tests for Servo-only tests""" % test_path) return 1 if reference_url is None: print("""Reference path %s is not in wpt directories: testing/web-platform/tests for tests that may be shared testing/web-platform/mozilla/tests for Servo-only tests""" % reference_path) return 1 if os.path.exists(test_path) and not kwargs["overwrite"]: print("Test path already exists, pass --overwrite to replace") return 1 if kwargs["mismatch"] and not kwargs["reftest"]: print("--mismatch only makes sense for a reftest") return 1 if kwargs["wait"] and not kwargs["reftest"]: print("--wait only makes sense for a reftest") return 1 args = {"documentElement": "<html class=\"reftest-wait\">\n" if kwargs["wait"] else ""} template = self.template_prefix % args if kwargs["long_timeout"]: template += self.template_long_timeout if kwargs["reftest"]: args = {"match": "match" if not kwargs["mismatch"] else "mismatch", "ref": reference_url} template += self.template_body_reftest % args if kwargs["wait"]: template += self.template_body_reftest_wait else: template += self.template_body_th with open(test_path, "w") as f: f.write(template) if kwargs["no_editor"]: editor = None elif kwargs["editor"]: editor = kwargs["editor"] elif "VISUAL" in os.environ: editor = os.environ["VISUAL"] elif "EDITOR" in os.environ: editor = os.environ["EDITOR"] else: editor = None if editor: proc = subprocess.Popen("%s %s" % (editor, test_path), shell=True) if not kwargs["no_run"]: p = create_parser_wpt() args = ["--manifest-update"] if kwargs["release"]: args.append("--release") args.append(test_path) wpt_kwargs = vars(p.parse_args(args)) self.context.commands.dispatch("test-wpt", self.context, **wpt_kwargs) if editor: proc.wait() @Command('update-net-cookies', description='Update the net unit tests with cookie tests from http-state', category='testing') def update_net_cookies(self): cache_dir = path.join(self.config["tools"]["cache-dir"], "tests") run_file = path.abspath(path.join(PROJECT_TOPLEVEL_PATH, "tests", "unit", "net", "cookie_http_state_utils.py")) run_globals = {"__file__": run_file} execfile(run_file, run_globals) return run_globals["update_test_file"](cache_dir)
mpl-2.0
ryfeus/lambda-packs
LightGBM_sklearn_scipy_numpy/source/scipy/odr/__init__.py
19
4343
""" ================================================= Orthogonal distance regression (:mod:`scipy.odr`) ================================================= .. currentmodule:: scipy.odr Package Content =============== .. autosummary:: :toctree: generated/ Data -- The data to fit. RealData -- Data with weights as actual std. dev.s and/or covariances. Model -- Stores information about the function to be fit. ODR -- Gathers all info & manages the main fitting routine. Output -- Result from the fit. odr -- Low-level function for ODR. OdrWarning -- Warning about potential problems when running ODR OdrError -- Error exception. OdrStop -- Stop exception. odr_error -- Same as OdrError (for backwards compatibility) odr_stop -- Same as OdrStop (for backwards compatibility) Prebuilt models: .. autosummary:: :toctree: generated/ polynomial .. data:: exponential .. data:: multilinear .. data:: unilinear .. data:: quadratic .. data:: polynomial Usage information ================= Introduction ------------ Why Orthogonal Distance Regression (ODR)? Sometimes one has measurement errors in the explanatory (a.k.a., "independent") variable(s), not just the response (a.k.a., "dependent") variable(s). Ordinary Least Squares (OLS) fitting procedures treat the data for explanatory variables as fixed, i.e., not subject to error of any kind. Furthermore, OLS procedures require that the response variables be an explicit function of the explanatory variables; sometimes making the equation explicit is impractical and/or introduces errors. ODR can handle both of these cases with ease, and can even reduce to the OLS case if that is sufficient for the problem. ODRPACK is a FORTRAN-77 library for performing ODR with possibly non-linear fitting functions. It uses a modified trust-region Levenberg-Marquardt-type algorithm [1]_ to estimate the function parameters. The fitting functions are provided by Python functions operating on NumPy arrays. The required derivatives may be provided by Python functions as well, or may be estimated numerically. ODRPACK can do explicit or implicit ODR fits, or it can do OLS. Input and output variables may be multi-dimensional. Weights can be provided to account for different variances of the observations, and even covariances between dimensions of the variables. The `scipy.odr` package offers an object-oriented interface to ODRPACK, in addition to the low-level `odr` function. Additional background information about ODRPACK can be found in the `ODRPACK User's Guide <https://docs.scipy.org/doc/external/odrpack_guide.pdf>`_, reading which is recommended. Basic usage ----------- 1. Define the function you want to fit against.:: def f(B, x): '''Linear function y = m*x + b''' # B is a vector of the parameters. # x is an array of the current x values. # x is in the same format as the x passed to Data or RealData. # # Return an array in the same format as y passed to Data or RealData. return B[0]*x + B[1] 2. Create a Model.:: linear = Model(f) 3. Create a Data or RealData instance.:: mydata = Data(x, y, wd=1./power(sx,2), we=1./power(sy,2)) or, when the actual covariances are known:: mydata = RealData(x, y, sx=sx, sy=sy) 4. Instantiate ODR with your data, model and initial parameter estimate.:: myodr = ODR(mydata, linear, beta0=[1., 2.]) 5. Run the fit.:: myoutput = myodr.run() 6. Examine output.:: myoutput.pprint() References ---------- .. [1] P. T. Boggs and J. E. Rogers, "Orthogonal Distance Regression," in "Statistical analysis of measurement error models and applications: proceedings of the AMS-IMS-SIAM joint summer research conference held June 10-16, 1989," Contemporary Mathematics, vol. 112, pg. 186, 1990. """ # version: 0.7 # author: Robert Kern <robert.kern@gmail.com> # date: 2006-09-21 from __future__ import division, print_function, absolute_import from .odrpack import * from .models import * from . import add_newdocs __all__ = [s for s in dir() if not s.startswith('_')] from scipy._lib._testutils import PytestTester test = PytestTester(__name__) del PytestTester
mit
supersven/intellij-community
python/testData/highlighting/declarations.py
61
1081
# bg is always black. # effect is white # func decl: red bold # class decl: blue bold # predefined decl: green bold def <info descr="null" type="INFORMATION" foreground="0xff0000" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">foo</info>(): pass class <info descr="null" type="INFORMATION" foreground="0x0000ff" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">Moo</info>: def <info descr="null" type="INFORMATION" foreground="0x00ff00" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">__init__</info>(<info descr="null">self</info>): pass def <info descr="null" type="INFORMATION" foreground="0xff0000" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">doodle</info>(<info descr="null">self</info>): pass def <info descr="null" type="INFORMATION" foreground="0xff0000" background="0x000000" effectcolor="0xffffff" effecttype="BOXED" fonttype="1">__made_up__</info>(<info descr="null">self</info>): return <info type="INFORMATION">None</info>
apache-2.0
pepeantena4040/MiSitioWeb
lib/bb/ui/toasterui.py
2
14114
# # BitBake ToasterUI Implementation # based on (No)TTY UI Implementation by Richard Purdie # # Handling output to TTYs or files (no TTY) # # Copyright (C) 2006-2012 Richard Purdie # Copyright (C) 2013 Intel Corporation # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License version 2 as # published by the Free Software Foundation. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License along # with this program; if not, write to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. from __future__ import division try: import bb except RuntimeError as exc: sys.exit(str(exc)) from bb.ui import uihelper from bb.ui.buildinfohelper import BuildInfoHelper import bb.msg import copy import fcntl import logging import os import progressbar import signal import struct import sys import time import xmlrpclib featureSet = [bb.cooker.CookerFeatures.HOB_EXTRA_CACHES, bb.cooker.CookerFeatures.SEND_DEPENDS_TREE, bb.cooker.CookerFeatures.BASEDATASTORE_TRACKING, bb.cooker.CookerFeatures.SEND_SANITYEVENTS] logger = logging.getLogger("ToasterLogger") interactive = sys.stdout.isatty() def _log_settings_from_server(server): # Get values of variables which control our output includelogs, error = server.runCommand(["getVariable", "BBINCLUDELOGS"]) if error: logger.error("Unable to get the value of BBINCLUDELOGS variable: %s" % error) raise BaseException(error) loglines, error = server.runCommand(["getVariable", "BBINCLUDELOGS_LINES"]) if error: logger.error("Unable to get the value of BBINCLUDELOGS_LINES variable: %s" % error) raise BaseException(error) consolelogfile, error = server.runCommand(["getVariable", "BB_CONSOLELOG"]) if error: logger.error("Unable to get the value of BB_CONSOLELOG variable: %s" % error) raise BaseException(error) return includelogs, loglines, consolelogfile def main(server, eventHandler, params ): helper = uihelper.BBUIHelper() console = logging.StreamHandler(sys.stdout) format_str = "%(levelname)s: %(message)s" format = bb.msg.BBLogFormatter(format_str) bb.msg.addDefaultlogFilter(console) console.setFormatter(format) logger.addHandler(console) includelogs, loglines, consolelogfile = _log_settings_from_server(server) # verify and warn build_history_enabled = True inheritlist, error = server.runCommand(["getVariable", "INHERIT"]) if not "buildhistory" in inheritlist.split(" "): logger.warn("buildhistory is not enabled. Please enable INHERIT += \"buildhistory\" to see image details.") build_history_enabled = False if not params.observe_only: logger.error("ToasterUI can only work in observer mode") return 1 main.shutdown = 0 interrupted = False return_value = 0 errors = 0 warnings = 0 taskfailures = [] first = True buildinfohelper = BuildInfoHelper(server, build_history_enabled) if buildinfohelper.brbe is not None and consolelogfile: # if we are under managed mode we have no other UI and we need to write our own file bb.utils.mkdirhier(os.path.dirname(consolelogfile)) conlogformat = bb.msg.BBLogFormatter(format_str) consolelog = logging.FileHandler(consolelogfile) bb.msg.addDefaultlogFilter(consolelog) consolelog.setFormatter(conlogformat) logger.addHandler(consolelog) while True: try: event = eventHandler.waitEvent(0.25) if first: first = False logger.info("ToasterUI waiting for events") if event is None: if main.shutdown > 0: break continue helper.eventHandler(event) if isinstance(event, bb.event.BuildStarted): buildinfohelper.store_started_build(event) if isinstance(event, (bb.build.TaskStarted, bb.build.TaskSucceeded, bb.build.TaskFailedSilent)): buildinfohelper.update_and_store_task(event) logger.warn("Logfile for task %s" % event.logfile) continue if isinstance(event, bb.build.TaskBase): logger.info(event._message) if isinstance(event, bb.event.LogExecTTY): logger.warn(event.msg) continue if isinstance(event, logging.LogRecord): if event.levelno == -1: event.levelno = format.ERROR buildinfohelper.store_log_event(event) if event.levelno >= format.ERROR: errors = errors + 1 elif event.levelno == format.WARNING: warnings = warnings + 1 # For "normal" logging conditions, don't show note logs from tasks # but do show them if the user has changed the default log level to # include verbose/debug messages if event.taskpid != 0 and event.levelno <= format.NOTE: continue logger.handle(event) continue if isinstance(event, bb.build.TaskFailed): buildinfohelper.update_and_store_task(event) logfile = event.logfile if logfile and os.path.exists(logfile): bb.error("Logfile of failure stored in: %s" % logfile) continue # these events are unprocessed now, but may be used in the future to log # timing and error informations from the parsing phase in Toaster if isinstance(event, (bb.event.SanityCheckPassed, bb.event.SanityCheck)): continue if isinstance(event, bb.event.ParseStarted): continue if isinstance(event, bb.event.ParseProgress): continue if isinstance(event, bb.event.ParseCompleted): continue if isinstance(event, bb.event.CacheLoadStarted): continue if isinstance(event, bb.event.CacheLoadProgress): continue if isinstance(event, bb.event.CacheLoadCompleted): continue if isinstance(event, bb.event.MultipleProviders): logger.info("multiple providers are available for %s%s (%s)", event._is_runtime and "runtime " or "", event._item, ", ".join(event._candidates)) logger.info("consider defining a PREFERRED_PROVIDER entry to match %s", event._item) continue if isinstance(event, bb.event.NoProvider): errors = errors + 1 if event._runtime: r = "R" else: r = "" if event._dependees: text = "Nothing %sPROVIDES '%s' (but %s %sDEPENDS on or otherwise requires it)" % (r, event._item, ", ".join(event._dependees), r) else: text = "Nothing %sPROVIDES '%s'" % (r, event._item) logger.error(text) if event._reasons: for reason in event._reasons: logger.error("%s", reason) text += reason buildinfohelper.store_log_error(text) continue if isinstance(event, bb.event.ConfigParsed): continue if isinstance(event, bb.event.RecipeParsed): continue # end of saved events if isinstance(event, (bb.runqueue.sceneQueueTaskStarted, bb.runqueue.runQueueTaskStarted, bb.runqueue.runQueueTaskSkipped)): buildinfohelper.store_started_task(event) continue if isinstance(event, bb.runqueue.runQueueTaskCompleted): buildinfohelper.update_and_store_task(event) continue if isinstance(event, bb.runqueue.runQueueTaskFailed): buildinfohelper.update_and_store_task(event) taskfailures.append(event.taskstring) logger.error("Task %s (%s) failed with exit code '%s'", event.taskid, event.taskstring, event.exitcode) continue if isinstance(event, (bb.runqueue.sceneQueueTaskCompleted, bb.runqueue.sceneQueueTaskFailed)): buildinfohelper.update_and_store_task(event) continue if isinstance(event, (bb.event.TreeDataPreparationStarted, bb.event.TreeDataPreparationCompleted)): continue if isinstance(event, (bb.event.BuildCompleted, bb.command.CommandFailed)): errorcode = 0 if (isinstance(event, bb.command.CommandFailed)): errors += 1 errorcode = 1 logger.error("Command execution failed: %s", event.error) # update the build info helper on BuildCompleted, not on CommandXXX buildinfohelper.update_build_information(event, errors, warnings, taskfailures) buildinfohelper.close(errorcode) # mark the log output; controllers may kill the toasterUI after seeing this log logger.info("ToasterUI build done 1, brbe: %s" % buildinfohelper.brbe ) # we start a new build info if buildinfohelper.brbe is not None: logger.debug(1, "ToasterUI under BuildEnvironment management - exiting after the build") server.terminateServer() else: logger.debug(1, "ToasterUI prepared for new build") errors = 0 warnings = 0 taskfailures = [] buildinfohelper = BuildInfoHelper(server, build_history_enabled) logger.info("ToasterUI build done 2") continue if isinstance(event, (bb.command.CommandCompleted, bb.command.CommandFailed, bb.command.CommandExit)): errorcode = 0 continue if isinstance(event, bb.event.MetadataEvent): if event.type == "SinglePackageInfo": buildinfohelper.store_build_package_information(event) elif event.type == "LayerInfo": buildinfohelper.store_layer_info(event) elif event.type == "BuildStatsList": buildinfohelper.store_tasks_stats(event) elif event.type == "ImagePkgList": buildinfohelper.store_target_package_data(event) elif event.type == "MissedSstate": buildinfohelper.store_missed_state_tasks(event) elif event.type == "ImageFileSize": buildinfohelper.update_target_image_file(event) elif event.type == "ArtifactFileSize": buildinfohelper.update_artifact_image_file(event) elif event.type == "LicenseManifestPath": buildinfohelper.store_license_manifest_path(event) else: logger.error("Unprocessed MetadataEvent %s " % str(event)) continue if isinstance(event, bb.cooker.CookerExit): # exit when the server exits break # ignore if isinstance(event, (bb.event.BuildBase, bb.event.StampUpdate, bb.event.RecipePreFinalise, bb.runqueue.runQueueEvent, bb.runqueue.runQueueExitWait, bb.event.OperationProgress, bb.command.CommandFailed, bb.command.CommandExit, bb.command.CommandCompleted)): continue if isinstance(event, bb.event.DepTreeGenerated): buildinfohelper.store_dependency_information(event) continue logger.error("Unknown event: %s", event) return_value += 1 except EnvironmentError as ioerror: # ignore interrupted io if ioerror.args[0] == 4: pass except KeyboardInterrupt: main.shutdown = 1 pass except Exception as e: # print errors to log import traceback from pprint import pformat exception_data = traceback.format_exc() logger.error("%s\n%s" % (e, exception_data)) exc_type, exc_value, tb = sys.exc_info() if tb is not None: curr = tb while curr is not None: logger.warn("Error data dump %s\n%s\n" % (traceback.format_tb(curr,1), pformat(curr.tb_frame.f_locals))) curr = curr.tb_next # save them to database, if possible; if it fails, we already logged to console. try: buildinfohelper.store_log_exception("%s\n%s" % (str(e), exception_data)) except Exception as ce: logger.error("CRITICAL - Failed to to save toaster exception to the database: %s" % str(ce)) # make sure we return with an error return_value += 1 pass if interrupted: if return_value == 0: return_value += 1 logger.warn("Return value is %d", return_value) return return_value
gpl-2.0
onitake/ansible
test/units/module_utils/facts/base.py
47
2309
# base unit test classes for ansible/module_utils/facts/ tests # -*- coding: utf-8 -*- # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # # Make coding more python3-ish from __future__ import (absolute_import, division) __metaclass__ = type from units.compat import unittest from units.compat.mock import Mock class BaseFactsTest(unittest.TestCase): # just a base class, not an actual test __test__ = False gather_subset = ['all'] valid_subsets = None fact_namespace = None collector_class = None # a dict ansible_facts. Some fact collectors depend on facts gathered by # other collectors (like 'ansible_architecture' or 'ansible_system') which # can be passed via the collected_facts arg to collect() collected_facts = None def _mock_module(self): mock_module = Mock() mock_module.params = {'gather_subset': self.gather_subset, 'gather_timeout': 5, 'filter': '*'} mock_module.get_bin_path = Mock(return_value=None) return mock_module def test_collect(self): module = self._mock_module() fact_collector = self.collector_class() facts_dict = fact_collector.collect(module=module, collected_facts=self.collected_facts) self.assertIsInstance(facts_dict, dict) return facts_dict def test_collect_with_namespace(self): module = self._mock_module() fact_collector = self.collector_class() facts_dict = fact_collector.collect_with_namespace(module=module, collected_facts=self.collected_facts) self.assertIsInstance(facts_dict, dict) return facts_dict
gpl-3.0
guaix-ucm/pyemir
emirdrp/recipes/image/naming.py
3
1640
# # Copyright 2011-2014 Universidad Complutense de Madrid # # This file is part of PyEmir # # PyEmir is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # PyEmir is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with PyEmir. If not, see <http://www.gnu.org/licenses/>. # ''' Naming intermediate images ''' def name_redimensioned_frames(label, step, ext='.fits'): dn = '%s_r%s' % (label, ext) mn = '%s_mr%s' % (label, ext) return dn, mn def name_object_mask(label, step, ext='.fits'): return '%s_mro_i%01d%s' % (label, step, ext) def name_skybackground(label, step, ext='.fits'): dn = '%s_sky_i%01d%s' % (label, step, ext) return dn def name_skybackgroundmask(label, step, ext='.fits'): dn = '%s_skymask_i%01d%s' % (label, step, ext) return dn def name_skysub_proc(label, step, ext='.fits'): dn = '%s_rfs_i%01d%s' % (label, step, ext) return dn def name_skyflat(label, step, ext='.fits'): dn = 'superflat_%s_i%01d%s' % (label, step, ext) return dn def name_skyflat_proc(label, step, ext='.fits'): dn = '%s_rf_i%01d%s' % (label, step, ext) return dn def name_segmask(step, ext='.fits'): return "check_i%01d%s" % (step, ext)
gpl-3.0
tinchoss/Python_Android
python/src/Lib/plat-mac/lib-scriptpackages/Finder/Legacy_suite.py
73
7687
"""Suite Legacy suite: Operations formerly handled by the Finder, but now automatically delegated to other applications Level 1, version 1 Generated from /System/Library/CoreServices/Finder.app AETE/AEUT resource version 0/144, language 0, script 0 """ import aetools import MacOS _code = 'fleg' class Legacy_suite_Events: def restart(self, _no_object=None, _attributes={}, **_arguments): """restart: Restart the computer Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'fndr' _subcode = 'rest' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def shut_down(self, _no_object=None, _attributes={}, **_arguments): """shut down: Shut Down the computer Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'fndr' _subcode = 'shut' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] def sleep(self, _no_object=None, _attributes={}, **_arguments): """sleep: Put the computer to sleep Keyword argument _attributes: AppleEvent attribute dictionary """ _code = 'fndr' _subcode = 'slep' if _arguments: raise TypeError, 'No optional args expected' if _no_object is not None: raise TypeError, 'No direct arg expected' _reply, _arguments, _attributes = self.send(_code, _subcode, _arguments, _attributes) if _arguments.get('errn', 0): raise aetools.Error, aetools.decodeerror(_arguments) # XXXX Optionally decode result if _arguments.has_key('----'): return _arguments['----'] class application(aetools.ComponentItem): """application - The Finder """ want = 'capp' class _Prop_desktop_picture(aetools.NProperty): """desktop picture - the desktop picture of the main monitor """ which = 'dpic' want = 'file' desktop_picture = _Prop_desktop_picture() class application_process(aetools.ComponentItem): """application process - A process launched from an application file """ want = 'pcap' class _Prop__3c_Inheritance_3e_(aetools.NProperty): """<Inheritance> - inherits some of its properties from the process class """ which = 'c@#^' want = 'prcs' class _Prop_application_file(aetools.NProperty): """application file - the application file from which this process was launched """ which = 'appf' want = 'appf' application_processes = application_process class desk_accessory_process(aetools.ComponentItem): """desk accessory process - A process launched from a desk accessory file """ want = 'pcda' class _Prop_desk_accessory_file(aetools.NProperty): """desk accessory file - the desk accessory file from which this process was launched """ which = 'dafi' want = 'obj ' desk_accessory_processes = desk_accessory_process class process(aetools.ComponentItem): """process - A process running on this computer """ want = 'prcs' class _Prop_accepts_high_level_events(aetools.NProperty): """accepts high level events - Is the process high-level event aware (accepts open application, open document, print document, and quit)? """ which = 'isab' want = 'bool' class _Prop_accepts_remote_events(aetools.NProperty): """accepts remote events - Does the process accept remote events? """ which = 'revt' want = 'bool' class _Prop_creator_type(aetools.NProperty): """creator type - the OSType of the creator of the process (the signature) """ which = 'fcrt' want = 'type' class _Prop_file(aetools.NProperty): """file - the file from which the process was launched """ which = 'file' want = 'obj ' class _Prop_file_type(aetools.NProperty): """file type - the OSType of the file type of the process """ which = 'asty' want = 'type' class _Prop_frontmost(aetools.NProperty): """frontmost - Is the process the frontmost process? """ which = 'pisf' want = 'bool' class _Prop_has_scripting_terminology(aetools.NProperty): """has scripting terminology - Does the process have a scripting terminology, i.e., can it be scripted? """ which = 'hscr' want = 'bool' class _Prop_name(aetools.NProperty): """name - the name of the process """ which = 'pnam' want = 'itxt' class _Prop_partition_space_used(aetools.NProperty): """partition space used - the number of bytes currently used in the process' partition """ which = 'pusd' want = 'long' class _Prop_total_partition_size(aetools.NProperty): """total partition size - the size of the partition with which the process was launched """ which = 'appt' want = 'long' class _Prop_visible(aetools.NProperty): """visible - Is the process' layer visible? """ which = 'pvis' want = 'bool' processes = process application._superclassnames = [] application._privpropdict = { 'desktop_picture' : _Prop_desktop_picture, } application._privelemdict = { } application_process._superclassnames = ['process'] application_process._privpropdict = { '_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_, 'application_file' : _Prop_application_file, } application_process._privelemdict = { } desk_accessory_process._superclassnames = ['process'] desk_accessory_process._privpropdict = { '_3c_Inheritance_3e_' : _Prop__3c_Inheritance_3e_, 'desk_accessory_file' : _Prop_desk_accessory_file, } desk_accessory_process._privelemdict = { } process._superclassnames = [] process._privpropdict = { 'accepts_high_level_events' : _Prop_accepts_high_level_events, 'accepts_remote_events' : _Prop_accepts_remote_events, 'creator_type' : _Prop_creator_type, 'file' : _Prop_file, 'file_type' : _Prop_file_type, 'frontmost' : _Prop_frontmost, 'has_scripting_terminology' : _Prop_has_scripting_terminology, 'name' : _Prop_name, 'partition_space_used' : _Prop_partition_space_used, 'total_partition_size' : _Prop_total_partition_size, 'visible' : _Prop_visible, } process._privelemdict = { } # # Indices of types declared in this module # _classdeclarations = { 'capp' : application, 'pcap' : application_process, 'pcda' : desk_accessory_process, 'prcs' : process, } _propdeclarations = { 'appf' : _Prop_application_file, 'appt' : _Prop_total_partition_size, 'asty' : _Prop_file_type, 'c@#^' : _Prop__3c_Inheritance_3e_, 'dafi' : _Prop_desk_accessory_file, 'dpic' : _Prop_desktop_picture, 'fcrt' : _Prop_creator_type, 'file' : _Prop_file, 'hscr' : _Prop_has_scripting_terminology, 'isab' : _Prop_accepts_high_level_events, 'pisf' : _Prop_frontmost, 'pnam' : _Prop_name, 'pusd' : _Prop_partition_space_used, 'pvis' : _Prop_visible, 'revt' : _Prop_accepts_remote_events, } _compdeclarations = { } _enumdeclarations = { }
apache-2.0
iftekeriba/softlayer-python
SoftLayer/CLI/mq/topic_subscribe.py
2
1737
"""Create a subscription on a topic.""" # :license: MIT, see LICENSE for more details. import SoftLayer from SoftLayer.CLI import environment from SoftLayer.CLI import mq import click @click.command() @click.argument('account-id') @click.argument('topic-name') @click.option('--datacenter', help="Datacenter, E.G.: dal05") @click.option('--network', type=click.Choice(['public', 'private']), help="Network type") @click.option('--sub-type', type=click.Choice(['http', 'queue']), help="Type of endpoint") @click.option('--queue-name', help="Queue name. Required if --type is queue") @click.option('--http-method', help="HTTP Method to use if --type is http") @click.option('--http-url', help="HTTP/HTTPS URL to use. Required if --type is http") @click.option('--http-body', help="HTTP Body template to use if --type is http") @environment.pass_env def cli(env, account_id, topic_name, datacenter, network, sub_type, queue_name, http_method, http_url, http_body): """Create a subscription on a topic.""" manager = SoftLayer.MessagingManager(env.client) mq_client = manager.get_connection(account_id, datacenter=datacenter, network=network) if sub_type == 'queue': subscription = mq_client.create_subscription(topic_name, 'queue', queue_name=queue_name) elif sub_type == 'http': subscription = mq_client.create_subscription( topic_name, 'http', method=http_method, url=http_url, body=http_body, ) env.fout(mq.subscription_table(subscription))
mit
Hurence/log-island
logisland-components/logisland-processors/logisland-processor-scripting/src/main/resources/nltk/classify/megam.py
7
6384
# Natural Language Toolkit: Interface to Megam Classifier # # Copyright (C) 2001-2016 NLTK Project # Author: Edward Loper <edloper@gmail.com> # URL: <http://nltk.org/> # For license information, see LICENSE.TXT """ A set of functions used to interface with the external megam_ maxent optimization package. Before megam can be used, you should tell NLTK where it can find the megam binary, using the ``config_megam()`` function. Typical usage: >>> from nltk.classify import megam >>> megam.config_megam() # pass path to megam if not found in PATH # doctest: +SKIP [Found megam: ...] Use with MaxentClassifier. Example below, see MaxentClassifier documentation for details. nltk.classify.MaxentClassifier.train(corpus, 'megam') .. _megam: http://www.umiacs.umd.edu/~hal/megam/index.html """ from __future__ import print_function import subprocess from nltk import compat from nltk.internals import find_binary try: import numpy except ImportError: numpy = None ###################################################################### #{ Configuration ###################################################################### _megam_bin = None def config_megam(bin=None): """ Configure NLTK's interface to the ``megam`` maxent optimization package. :param bin: The full path to the ``megam`` binary. If not specified, then nltk will search the system for a ``megam`` binary; and if one is not found, it will raise a ``LookupError`` exception. :type bin: str """ global _megam_bin _megam_bin = find_binary( 'megam', bin, env_vars=['MEGAM'], binary_names=['megam.opt', 'megam', 'megam_686', 'megam_i686.opt'], url='http://www.umiacs.umd.edu/~hal/megam/index.html') ###################################################################### #{ Megam Interface Functions ###################################################################### def write_megam_file(train_toks, encoding, stream, bernoulli=True, explicit=True): """ Generate an input file for ``megam`` based on the given corpus of classified tokens. :type train_toks: list(tuple(dict, str)) :param train_toks: Training data, represented as a list of pairs, the first member of which is a feature dictionary, and the second of which is a classification label. :type encoding: MaxentFeatureEncodingI :param encoding: A feature encoding, used to convert featuresets into feature vectors. May optionally implement a cost() method in order to assign different costs to different class predictions. :type stream: stream :param stream: The stream to which the megam input file should be written. :param bernoulli: If true, then use the 'bernoulli' format. I.e., all joint features have binary values, and are listed iff they are true. Otherwise, list feature values explicitly. If ``bernoulli=False``, then you must call ``megam`` with the ``-fvals`` option. :param explicit: If true, then use the 'explicit' format. I.e., list the features that would fire for any of the possible labels, for each token. If ``explicit=True``, then you must call ``megam`` with the ``-explicit`` option. """ # Look up the set of labels. labels = encoding.labels() labelnum = dict((label, i) for (i, label) in enumerate(labels)) # Write the file, which contains one line per instance. for featureset, label in train_toks: # First, the instance number (or, in the weighted multiclass case, the cost of each label). if hasattr(encoding, 'cost'): stream.write(':'.join(str(encoding.cost(featureset, label, l)) for l in labels)) else: stream.write('%d' % labelnum[label]) # For implicit file formats, just list the features that fire # for this instance's actual label. if not explicit: _write_megam_features(encoding.encode(featureset, label), stream, bernoulli) # For explicit formats, list the features that would fire for # any of the possible labels. else: for l in labels: stream.write(' #') _write_megam_features(encoding.encode(featureset, l), stream, bernoulli) # End of the instance. stream.write('\n') def parse_megam_weights(s, features_count, explicit=True): """ Given the stdout output generated by ``megam`` when training a model, return a ``numpy`` array containing the corresponding weight vector. This function does not currently handle bias features. """ if numpy is None: raise ValueError('This function requires that numpy be installed') assert explicit, 'non-explicit not supported yet' lines = s.strip().split('\n') weights = numpy.zeros(features_count, 'd') for line in lines: if line.strip(): fid, weight = line.split() weights[int(fid)] = float(weight) return weights def _write_megam_features(vector, stream, bernoulli): if not vector: raise ValueError('MEGAM classifier requires the use of an ' 'always-on feature.') for (fid, fval) in vector: if bernoulli: if fval == 1: stream.write(' %s' % fid) elif fval != 0: raise ValueError('If bernoulli=True, then all' 'features must be binary.') else: stream.write(' %s %s' % (fid, fval)) def call_megam(args): """ Call the ``megam`` binary with the given arguments. """ if isinstance(args, compat.string_types): raise TypeError('args should be a list of strings') if _megam_bin is None: config_megam() # Call megam via a subprocess cmd = [_megam_bin] + args p = subprocess.Popen(cmd, stdout=subprocess.PIPE) (stdout, stderr) = p.communicate() # Check the return code. if p.returncode != 0: print() print(stderr) raise OSError('megam command failed!') if isinstance(stdout, compat.string_types): return stdout else: return stdout.decode('utf-8')
apache-2.0
SmoothKat/android_platform_external_chromium
chrome/browser/resources/file_manager/bin/squashdir.py
66
1735
#!/usr/bin/python # Copyright (c) 2011 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. import glob import os import shutil import stat import sys def usage(): print("""Usage: squashdir.py <dest-dir> <source-dir> ... Basic tool to copy a directory heirarchy into a flat space. This crawls an arbitrarily deep heirarchy of files and directories, and copies each file into the destination directory. The destination file name will include the relative path to the source file, with '^^' inserted between each directory name. The resulting directory can then be imported into the file manager test harness, which will reconstitute the directory structure. This is used to work around the fact that the FileList and File objects presented by <input type=file multiple> do not allow users to recurse a selected directory, nor do they provide information about directory structure. """) def status(msg): sys.stderr.write(msg + '\n') def scan_path(dest, src, path): abs_src = os.path.join(src, path) statinfo = os.stat(abs_src) basename = os.path.basename(path) if not stat.S_ISDIR(statinfo.st_mode): newname = os.path.join(dest, path.replace('/', '^^')) status(newname) shutil.copyfile(abs_src, newname) else: for child_path in glob.glob(abs_src + '/*'): scan_path(dest, src, child_path[len(src) + 1:]) if __name__ == '__main__': if len(sys.argv) < 3 or sys.argv[1][0] == '-': usage() return dest = sys.argv[1] for src in sys.argv[2:]: abs_src = os.path.abspath(src) path = os.path.basename(abs_src) abs_src = os.path.dirname(abs_src) scan_path(dest, abs_src, path)
bsd-3-clause
XiaodunServerGroup/xiaodun-platform
common/lib/xmodule/xmodule/tests/xml/test_policy.py
248
1262
""" Tests that policy json files import correctly when loading XML """ from nose.tools import assert_equals, assert_raises # pylint: disable=no-name-in-module from xmodule.tests.xml.factories import CourseFactory from xmodule.tests.xml import XModuleXmlImportTest class TestPolicy(XModuleXmlImportTest): """ Tests that policy json files import correctly when loading xml """ def test_no_attribute_mapping(self): # Policy files are json, and thus the values aren't passed through 'deserialize_field' # Therefor, the string 'null' is passed unchanged to the Float field, which will trigger # a ValueError with assert_raises(ValueError): course = self.process_xml(CourseFactory.build(policy={'days_early_for_beta': 'null'})) # Trigger the exception by looking at the imported data course.days_early_for_beta # pylint: disable=pointless-statement def test_course_policy(self): course = self.process_xml(CourseFactory.build(policy={'days_early_for_beta': None})) assert_equals(None, course.days_early_for_beta) course = self.process_xml(CourseFactory.build(policy={'days_early_for_beta': 9})) assert_equals(9, course.days_early_for_beta)
agpl-3.0
mbdriscoll/indigo
indigo/backends/np.py
1
5341
from ctypes import * import numpy as np import scipy.sparse as spp from numpy.ctypeslib import ndpointer from indigo.backends.backend import Backend class NumpyBackend(Backend): def __init__(self, device_id=0): super(NumpyBackend, self).__init__() # ----------------------------------------------------------------------- # Arrays # ----------------------------------------------------------------------- class dndarray(Backend.dndarray): def _copy_from(self, arr): self._arr.flat[:] = arr.flat def _copy_to(self, arr): arr.flat[:] = self._arr.flat def _copy(self, d_arr): dst = self._arr.reshape(-1, order='F') src = d_arr._arr.reshape(-1, order='F') dst.flat[:] = src.flat def _malloc(self, shape, dtype): return np.ndarray(shape, dtype, order='F') def _free(self): del self._arr def _zero(self): self._arr[:] = 0 def __getitem__(self, slc): d = self._arr.reshape(self.shape, order='F')[slc] ld = self._leading_dim return self._backend.dndarray( self._backend, d.shape, d.dtype, ld=ld, own=False, data=d) @staticmethod def from_param(obj): if not isinstance(obj, NUMPY.dndarray): raise ArgumentError('{} is not a dndarray'.format( type(obj) )) return obj._arr.ctypes.get_as_parameter() # ----------------------------------------------------------------------- # BLAS Routines # ----------------------------------------------------------------------- def axpby(self, beta, y, alpha, x): """ y = beta*y + alpha*x """ assert isinstance(x, self.dndarray) assert isinstance(y, self.dndarray) x = x._arr.reshape(y._arr.shape, order='F') y._arr[:] = beta * y._arr + alpha * x def dot(self, x, y): """ returns x^T * y """ assert isinstance(x, self.dndarray) assert isinstance(y, self.dndarray) return np.vdot( x._arr, y._arr ).real def norm2(self, x): """ returns ||x||_2""" assert isinstance(x, self.dndarray) return np.linalg.norm(x._arr)**2 def scale(self, x, alpha): """ x *= alpha """ assert isinstance(x, self.dndarray) x._arr *= alpha def cgemm(self, y, M, x, alpha=1, beta=0, forward=True, left=True): y, M, x = y._arr, M._arr, x._arr if not forward: M = np.conj(M.T) if left: x = x.reshape((M.shape[1],-1), order='F') y = y.reshape((M.shape[0],-1), order='F') y[:] = alpha * (M @ x) + beta * y else: x = x.reshape((-1,M.shape[0]), order='F') y = y.reshape((-1,M.shape[1]), order='F') y[:] = alpha * (x @ M) + beta * y def csymm(self, y, M, x, alpha, beta, left=True): return self.cgemm(y, M, x, alpha, beta, forward=True, left=left) # ----------------------------------------------------------------------- # OneMM Routines # ----------------------------------------------------------------------- def onemm(self, y, x, alpha, beta): y._arr[:] = beta * y._arr + alpha * \ np.broadcast_to(x._arr.sum(axis=0, keepdims=True), y.shape) # ----------------------------------------------------------------------- # FFT Routines # ----------------------------------------------------------------------- def fftn(self, y, x): X = x._arr.reshape( x.shape, order='F' ) Y = y._arr.reshape( y.shape, order='F' ) ndim = X.ndim-1 axes = tuple(range(ndim)) Y[:] = np.fft.fftn(X, axes=axes) def ifftn(self, y, x): X = x._arr.reshape( x.shape, order='F' ) Y = y._arr.reshape( y.shape, order='F' ) ndim = X.ndim-1 axes = tuple(range(ndim)) scale = np.prod( X.shape[:ndim] ) Y[:] = np.fft.ifftn(X, axes=axes) * scale # ----------------------------------------------------------------------- # CSRMM Routine # ----------------------------------------------------------------------- def ccsrmm(self, y, A_shape, A_indx, A_ptr, A_vals, x, alpha, beta, adjoint=False, exwrite=False): A = spp.csr_matrix((A_vals._arr, A_indx._arr, A_ptr._arr), shape=A_shape) X = x._arr.reshape( x.shape, order='F') Y = y._arr.reshape( y.shape, order='F') if adjoint: Y[:] = alpha * (A.H @ X) + beta * Y else: Y[:] = alpha * (A @ X) + beta * Y def cdiamm(self, y, shape, offsets, data, x, alpha=1.0, beta=0.0, adjoint=True): A = spp.dia_matrix((data._arr.T, offsets._arr), shape=shape) X = x._arr.reshape( x.shape, order='F' ) Y = y._arr.reshape( y.shape, order='F' ) if adjoint: Y[:] = alpha * (A.H @ X) + beta * Y else: Y[:] = alpha * (A @ X) + beta * Y # ----------------------------------------------------------------------- # Misc Routines # ----------------------------------------------------------------------- @staticmethod def max(val, arr): mr = np.maximum(arr._arr.real, val) mi = np.maximum(arr._arr.imag, val) arr._arr[:] = mr + 1j * mi
bsd-3-clause
davits/ycmd
ycmd/completers/general/general_completer_store.py
4
3211
# Copyright (C) 2013-2020 ycmd contributors # # This file is part of ycmd. # # ycmd is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # ycmd is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with ycmd. If not, see <http://www.gnu.org/licenses/>. from ycmd.completers.completer import Completer from ycmd.completers.all.identifier_completer import IdentifierCompleter from ycmd.completers.general.filename_completer import FilenameCompleter from ycmd.completers.general.ultisnips_completer import UltiSnipsCompleter class GeneralCompleterStore( Completer ): """ Holds a list of completers that can be used in all filetypes. It overrides all Completer API methods so that specific calls to GeneralCompleterStore are passed to all general completers. """ def __init__( self, user_options ): super().__init__( user_options ) self._identifier_completer = IdentifierCompleter( user_options ) self._filename_completer = FilenameCompleter( user_options ) self._ultisnips_completer = UltiSnipsCompleter( user_options ) self._non_filename_completers = [ self._identifier_completer ] if user_options.get( 'use_ultisnips_completer', True ): self._non_filename_completers.append( self._ultisnips_completer ) self._all_completers = [ self._identifier_completer, self._filename_completer, self._ultisnips_completer ] def SupportedFiletypes( self ): return set() def GetIdentifierCompleter( self ): return self._identifier_completer def ComputeCandidates( self, request_data ): candidates = self._filename_completer.ComputeCandidates( request_data ) if candidates: return candidates for completer in self._non_filename_completers: candidates += completer.ComputeCandidates( request_data ) return candidates def OnFileReadyToParse( self, request_data ): for completer in self._all_completers: completer.OnFileReadyToParse( request_data ) def OnBufferVisit( self, request_data ): for completer in self._all_completers: completer.OnBufferVisit( request_data ) def OnBufferUnload( self, request_data ): for completer in self._all_completers: completer.OnBufferUnload( request_data ) def OnInsertLeave( self, request_data ): for completer in self._all_completers: completer.OnInsertLeave( request_data ) def OnCurrentIdentifierFinished( self, request_data ): for completer in self._all_completers: completer.OnCurrentIdentifierFinished( request_data ) def GettingCompletions( self ): for completer in self._all_completers: completer.GettingCompletions() def Shutdown( self ): for completer in self._all_completers: completer.Shutdown()
gpl-3.0
lizardsystem/lizard-wms
lizard_wms/migrations/0034_auto__add_field_wmssource_get_feature_type.py
2
6967
# -*- coding: utf-8 -*- import datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding field 'WMSSource.get_feature_type' db.add_column('lizard_wms_wmssource', 'get_feature_type', self.gf('django.db.models.fields.CharField')(default=u'gml', max_length=100), keep_default=False) def backwards(self, orm): # Deleting field 'WMSSource.get_feature_type' db.delete_column('lizard_wms_wmssource', 'get_feature_type') models = { 'lizard_maptree.category': { 'Meta': {'ordering': "('index', 'name')", 'object_name': 'Category'}, 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'index': ('django.db.models.fields.IntegerField', [], {'default': '1000'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80'}), 'parent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'unique': 'True', 'max_length': '20'}) }, 'lizard_security.dataset': { 'Meta': {'ordering': "['name']", 'object_name': 'DataSet'}, 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '80', 'blank': 'True'}) }, 'lizard_wms.featureline': { 'Meta': {'ordering': "(u'order_using', u'description', u'name')", 'object_name': 'FeatureLine'}, 'description': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'in_hover': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'order_using': ('django.db.models.fields.IntegerField', [], {'default': '1000'}), 'render_as': ('django.db.models.fields.CharField', [], {'default': "u'T'", 'max_length': '1'}), 'use_as_id': ('django.db.models.fields.BooleanField', [], {'default': 'False'}), 'visible': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'wms_layer': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wms.WMSSource']"}) }, 'lizard_wms.filterpage': { 'Meta': {'ordering': "(u'wms_source',)", 'object_name': 'FilterPage'}, 'available_filters': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['lizard_wms.FeatureLine']", 'symmetrical': 'False', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'slug': ('django.db.models.fields.SlugField', [], {'max_length': '50'}), 'wms_source': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "u'filter_pages'", 'to': "orm['lizard_wms.WMSSource']"}) }, 'lizard_wms.wmsconnection': { 'Meta': {'object_name': 'WMSConnection'}, 'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'options': ('django.db.models.fields.TextField', [], {'default': 'u\'{"buffer": 0, "isBaseLayer": false, "opacity": 0.5}\''}), 'params': ('django.db.models.fields.TextField', [], {'default': 'u\'{"height": "256", "width": "256", "styles": "", "format": "image/png", "tiled": "true", "transparent": "true"}\''}), 'slug': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'title': ('django.db.models.fields.CharField', [], {'max_length': '100'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}), 'version': ('django.db.models.fields.CharField', [], {'default': "u'1.3.0'", 'max_length': '20'}), 'xml': ('django.db.models.fields.TextField', [], {'default': "u''", 'blank': 'True'}) }, 'lizard_wms.wmssource': { 'Meta': {'ordering': "(u'index', u'display_name')", 'object_name': 'WMSSource'}, '_params': ('jsonfield.fields.JSONField', [], {'default': 'u\'{"height": "256", "width": "256", "styles": "", "format": "image/png", "tiled": "true", "transparent": "true"}\'', 'null': 'True', 'blank': 'True'}), 'bbox': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}), 'category': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'to': "orm['lizard_maptree.Category']", 'null': 'True', 'blank': 'True'}), 'connection': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_wms.WMSConnection']", 'null': 'True', 'blank': 'True'}), 'data_set': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['lizard_security.DataSet']", 'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'enable_search': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'get_feature_type': ('django.db.models.fields.CharField', [], {'default': "u'gml'", 'max_length': '100'}), 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'index': ('django.db.models.fields.IntegerField', [], {'default': '1000'}), 'layer_name': ('django.db.models.fields.TextField', [], {}), 'legend_url': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), 'metadata': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}), 'options': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}), 'show_legend': ('django.db.models.fields.BooleanField', [], {'default': 'True'}), 'timepositions': ('django.db.models.fields.CharField', [], {'max_length': '2048', 'null': 'True', 'blank': 'True'}), 'url': ('django.db.models.fields.URLField', [], {'max_length': '200'}) } } complete_apps = ['lizard_wms']
gpl-3.0
carryLabs/carrydb
deps/rocksdb-4.9/coverage/parse_gcov_output.py
101
4208
import optparse import re import sys from optparse import OptionParser # the gcov report follows certain pattern. Each file will have two lines # of report, from which we can extract the file name, total lines and coverage # percentage. def parse_gcov_report(gcov_input): per_file_coverage = {} total_coverage = None for line in sys.stdin: line = line.strip() # --First line of the coverage report (with file name in it)? match_obj = re.match("^File '(.*)'$", line) if match_obj: # fetch the file name from the first line of the report. current_file = match_obj.group(1) continue # -- Second line of the file report (with coverage percentage) match_obj = re.match("^Lines executed:(.*)% of (.*)", line) if match_obj: coverage = float(match_obj.group(1)) lines = int(match_obj.group(2)) if current_file is not None: per_file_coverage[current_file] = (coverage, lines) current_file = None else: # If current_file is not set, we reach the last line of report, # which contains the summarized coverage percentage. total_coverage = (coverage, lines) continue # If the line's pattern doesn't fall into the above categories. We # can simply ignore them since they're either empty line or doesn't # find executable lines of the given file. current_file = None return per_file_coverage, total_coverage def get_option_parser(): usage = "Parse the gcov output and generate more human-readable code " +\ "coverage report." parser = OptionParser(usage) parser.add_option( "--interested-files", "-i", dest="filenames", help="Comma separated files names. if specified, we will display " + "the coverage report only for interested source files. " + "Otherwise we will display the coverage report for all " + "source files." ) return parser def display_file_coverage(per_file_coverage, total_coverage): # To print out auto-adjustable column, we need to know the longest # length of file names. max_file_name_length = max( len(fname) for fname in per_file_coverage.keys() ) # -- Print header # size of separator is determined by 3 column sizes: # file name, coverage percentage and lines. header_template = \ "%" + str(max_file_name_length) + "s\t%s\t%s" separator = "-" * (max_file_name_length + 10 + 20) print header_template % ("Filename", "Coverage", "Lines") print separator # -- Print body # template for printing coverage report for each file. record_template = "%" + str(max_file_name_length) + "s\t%5.2f%%\t%10d" for fname, coverage_info in per_file_coverage.items(): coverage, lines = coverage_info print record_template % (fname, coverage, lines) # -- Print footer if total_coverage: print separator print record_template % ("Total", total_coverage[0], total_coverage[1]) def report_coverage(): parser = get_option_parser() (options, args) = parser.parse_args() interested_files = set() if options.filenames is not None: interested_files = set(f.strip() for f in options.filenames.split(',')) # To make things simple, right now we only read gcov report from the input per_file_coverage, total_coverage = parse_gcov_report(sys.stdin) # Check if we need to display coverage info for interested files. if len(interested_files): per_file_coverage = dict( (fname, per_file_coverage[fname]) for fname in interested_files if fname in per_file_coverage ) # If we only interested in several files, it makes no sense to report # the total_coverage total_coverage = None if not len(per_file_coverage): print >> sys.stderr, "Cannot find coverage info for the given files." return display_file_coverage(per_file_coverage, total_coverage) if __name__ == "__main__": report_coverage()
bsd-3-clause
vineodd/PIMSim
GEM5Simulation/gem5/ext/ply/example/hedit/hedit.py
165
1093
# ----------------------------------------------------------------------------- # hedit.py # # Paring of Fortran H Edit descriptions (Contributed by Pearu Peterson) # # These tokens can't be easily tokenized because they are of the following # form: # # nHc1...cn # # where n is a positive integer and c1 ... cn are characters. # # This example shows how to modify the state of the lexer to parse # such tokens # ----------------------------------------------------------------------------- import sys sys.path.insert(0,"../..") tokens = ( 'H_EDIT_DESCRIPTOR', ) # Tokens t_ignore = " \t\n" def t_H_EDIT_DESCRIPTOR(t): r"\d+H.*" # This grabs all of the remaining text i = t.value.index('H') n = eval(t.value[:i]) # Adjust the tokenizing position t.lexer.lexpos -= len(t.value) - (i+1+n) t.value = t.value[i+1:i+1+n] return t def t_error(t): print("Illegal character '%s'" % t.value[0]) t.lexer.skip(1) # Build the lexer import ply.lex as lex lex.lex() lex.runmain()
gpl-3.0
QRAAT/QRAAT
node/python/qa_rmg.py
1
1878
#!/usr/bin/env python2 # # Copyright 2004 Free Software Foundation, Inc. # # This file is part of GNU Radio # # GNU Radio is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # GNU Radio is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Radio; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 59 Temple Place - Suite 330, # Boston, MA 02111-1307, USA. # #editted by Todd Borrowman ECE-UIUC 01/18/08~02/2010 from gnuradio import gr, gr_unittest, blks2 import rmg, sys tx_name = "test" test_data = "unit_test.tdat" class qa_rmg (gr_unittest.TestCase): def setUp (self): self.fg = gr.top_block () def tearDown (self): self.fg = None def test_001_detect (self): src_file = gr.file_source(gr.sizeof_gr_complex, test_data) #src_file = gr.file_source(gr.sizeof_gr_complex, "data_backup20070423/20070208164806.tdat") #src_file = gr.file_source(gr.sizeof_gr_complex, "20080911161545.tdat") di = gr.deinterleave(gr.sizeof_gr_complex) self.fg.connect(src_file, di) pd = rmg.detect(4,8000,160,480,1.1,"results", tx_name, 1.1,1.5) pd.enable() self.fg.connect((di,0),(pd,0)) self.fg.connect((di,1),(pd,1)) self.fg.connect((di,2),(pd,2)) self.fg.connect((di,3),(pd,3)) self.fg.run(); #self.assertFloatTuplesAlmostEqual ([1],[1],1) if __name__ == '__main__': gr_unittest.main ()
gpl-3.0
lnielsen/invenio
invenio/modules/upgrader/upgrades/invenio_2013_10_18_crcLIBRARY_type.py
4
1334
# -*- coding: utf-8 -*- ## ## This file is part of Invenio. ## Copyright (C) 2013 CERN. ## ## Invenio is free software; you can redistribute it and/or ## modify it under the terms of the GNU General Public License as ## published by the Free Software Foundation; either version 2 of the ## License, or (at your option) any later version. ## ## Invenio is distributed in the hope that it will be useful, but ## WITHOUT ANY WARRANTY; without even the implied warranty of ## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU ## General Public License for more details. ## ## You should have received a copy of the GNU General Public License ## along with Invenio; if not, write to the Free Software Foundation, Inc., ## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA. from invenio.legacy.dbquery import run_sql depends_on = ['invenio_release_1_1_0'] def info(): return "crcLIBRARY.type is now mandatory" def do_upgrade(): run_sql("UPDATE crcLIBRARY SET type='main' WHERE type IS NULL") create_statement = run_sql('SHOW CREATE TABLE crcLIBRARY')[0][1] if '`type` varchar(30) NOT NULL' not in create_statement: run_sql("ALTER TABLE crcLIBRARY CHANGE type type varchar(30) NOT NULL default 'main'") def estimate(): return 1 def pre_upgrade(): pass def post_upgrade(): pass
gpl-2.0
chengchingwen/moth_prediction
get_ft.py
1
1143
import sqlite3 as sql import pandas as pd import datetime as d import make_db as m date = "%d-%d-%d" datef = "%Y-%m-%d" year_end = "%Y-12-31" year_start = "%Y-01-01" query = "select * from `%d` where Time between '%s' and '%s'" def nd(s): return d.datetime.strptime(s, "%Y-%m-%d").month def get_ft_t(year, month, day, place ,delta=10 ): today = d.datetime(year, month ,day)-d.timedelta(1) start_date = today - d.timedelta(delta-1) db = sql.connect(place) if today.year == start_date.year: table = pd.read_sql(query % (year, start_date.strftime(datef),today.strftime(datef)),db) else: table1 = pd.read_sql(query % (start_date.year, start_date.strftime(datef), start_date.strftime(year_end)), db) table2 = pd.read_sql(query % (today.year, today.strftime(year_start),today.strftime(datef)),db) table = pd.concat([table1, table2]) if len(table): table.index = range(delta) return table def get_ft(row): time = d.datetime.strptime(row["date"],"%Y-%m-%d") return get_ft_t(time.year,time.month,time.day,m.db_path % m.place[row["ID"]],delta=20)
artistic-2.0
yun3195/Z5S_NX503A_KitKat_kernel
tools/perf/scripts/python/failed-syscalls-by-pid.py
11180
2058
# failed system call counts, by pid # (c) 2010, Tom Zanussi <tzanussi@gmail.com> # Licensed under the terms of the GNU GPL License version 2 # # Displays system-wide failed system call totals, broken down by pid. # If a [comm] arg is specified, only syscalls called by [comm] are displayed. import os import sys sys.path.append(os.environ['PERF_EXEC_PATH'] + \ '/scripts/python/Perf-Trace-Util/lib/Perf/Trace') from perf_trace_context import * from Core import * from Util import * usage = "perf script -s syscall-counts-by-pid.py [comm|pid]\n"; for_comm = None for_pid = None if len(sys.argv) > 2: sys.exit(usage) if len(sys.argv) > 1: try: for_pid = int(sys.argv[1]) except: for_comm = sys.argv[1] syscalls = autodict() def trace_begin(): print "Press control+C to stop and show the summary" def trace_end(): print_error_totals() def raw_syscalls__sys_exit(event_name, context, common_cpu, common_secs, common_nsecs, common_pid, common_comm, id, ret): if (for_comm and common_comm != for_comm) or \ (for_pid and common_pid != for_pid ): return if ret < 0: try: syscalls[common_comm][common_pid][id][ret] += 1 except TypeError: syscalls[common_comm][common_pid][id][ret] = 1 def print_error_totals(): if for_comm is not None: print "\nsyscall errors for %s:\n\n" % (for_comm), else: print "\nsyscall errors:\n\n", print "%-30s %10s\n" % ("comm [pid]", "count"), print "%-30s %10s\n" % ("------------------------------", \ "----------"), comm_keys = syscalls.keys() for comm in comm_keys: pid_keys = syscalls[comm].keys() for pid in pid_keys: print "\n%s [%d]\n" % (comm, pid), id_keys = syscalls[comm][pid].keys() for id in id_keys: print " syscall: %-16s\n" % syscall_name(id), ret_keys = syscalls[comm][pid][id].keys() for ret, val in sorted(syscalls[comm][pid][id].iteritems(), key = lambda(k, v): (v, k), reverse = True): print " err = %-20s %10d\n" % (strerror(ret), val),
gpl-2.0
vmanoria/bluemix-hue-filebrowser
hue-3.8.1-bluemix/desktop/core/ext-py/pycrypto-2.6.1/lib/Crypto/Util/py21compat.py
125
2779
# -*- coding: utf-8 -*- # # Util/py21compat.py : Compatibility code for Python 2.1 # # Written in 2008 by Dwayne C. Litzenberger <dlitz@dlitz.net> # # =================================================================== # The contents of this file are dedicated to the public domain. To # the extent that dedication to the public domain is not available, # everyone is granted a worldwide, perpetual, royalty-free, # non-exclusive license to exercise all rights associated with the # contents of this file for any purpose whatsoever. # No rights are reserved. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, # EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF # MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS # BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN # ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN # CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE # SOFTWARE. # =================================================================== """Compatibility code for Python 2.1 Currently, this just defines: - True and False - object - isinstance """ __revision__ = "$Id$" __all__ = [] import sys import __builtin__ # 'True' and 'False' aren't defined in Python 2.1. Define them. try: True, False except NameError: (True, False) = (1, 0) __all__ += ['True', 'False'] # New-style classes were introduced in Python 2.2. Defining "object" in Python # 2.1 lets us use new-style classes in versions of Python that support them, # while still maintaining backward compatibility with old-style classes try: object except NameError: class object: pass __all__ += ['object'] # Starting with Python 2.2, isinstance allows a tuple for the second argument. # Also, builtins like "tuple", "list", "str", "unicode", "int", and "long" # became first-class types, rather than functions. We want to support # constructs like: # isinstance(x, (int, long)) # So we hack it for Python 2.1. try: isinstance(5, (int, long)) except TypeError: __all__ += ['isinstance'] _builtin_type_map = { tuple: type(()), list: type([]), str: type(""), unicode: type(u""), int: type(0), long: type(0L), } def isinstance(obj, t): if not __builtin__.isinstance(t, type(())): # t is not a tuple return __builtin__.isinstance(obj, _builtin_type_map.get(t, t)) else: # t is a tuple for typ in t: if __builtin__.isinstance(obj, _builtin_type_map.get(typ, typ)): return True return False # vim:set ts=4 sw=4 sts=4 expandtab:
gpl-2.0
peterfpeterson/mantid
Testing/SystemTests/tests/framework/MagnetismReflectometryReductionTest.py
3
25066
# Mantid Repository : https://github.com/mantidproject/mantid # # Copyright &copy; 2018 ISIS Rutherford Appleton Laboratory UKRI, # NScD Oak Ridge National Laboratory, European Spallation Source, # Institut Laue - Langevin & CSNS, Institute of High Energy Physics, CAS # SPDX - License - Identifier: GPL - 3.0 + #pylint: disable=no-init,attribute-defined-outside-init import systemtesting from mantid import * from mantid.simpleapi import * import math class MagnetismReflectometryReductionTest(systemtesting.MantidSystemTest): def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') self.disableChecking.append('Axes') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MagnetismReflectometryReductionConstQTest(systemtesting.MantidSystemTest): def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=True, OutputWorkspace="r_24949") def validate(self): refl = mtd["r_24949"].dataY(0) return math.fabs(refl[1] - 0.648596877775159) < 0.002 class MagnetismReflectometryReductionSkipRebinTest(systemtesting.MantidSystemTest): def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, FinalRebin=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): q_values = mtd["r_24949"].dataX(0) return math.fabs(q_values[0] - 0.005) > 0.001 class MagnetismReflectometryReductionConstQWLCutTest(systemtesting.MantidSystemTest): def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=True, QMin=0.005, QStep=-0.01, TimeAxisStep=0.007, TimeAxisRange=[4.5, 10.5], SpecularPixel=126.9, ConstantQBinning=True, OutputWorkspace="r_24949") def validate(self): refl = mtd["r_24949"].dataY(0) return math.fabs(refl[1] - 0.648596877775159) < 0.002 class MRFilterCrossSectionsTest(systemtesting.MantidSystemTest): """ Test data loading and cross-section extraction """ def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") MagnetismReflectometryReduction(InputWorkspace=str(wsg[0]), NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') self.disableChecking.append('Axes') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MRFilterCrossSectionsWithWorkspaceTest(systemtesting.MantidSystemTest): """ Test data loading and cross-section extraction """ def runTest(self): ws_input = LoadEventNexus(Filename="REF_M_24949", NXentryName="entry-Off_Off", OutputWorkspace="r_24949") # Since we are using a older data file for testing, add the # polarizer/analyzer info. This will also test the edge case where # there is no analyzer or polarizer, which should just be the # same as a simple load. AddSampleLog(Workspace=ws_input, LogName='polarizer', LogText="0", LogType='Number Series', LogUnit='') AddSampleLog(Workspace=ws_input, LogName='analyzer', LogText="0", LogType='Number Series', LogUnit='') wsg = MRFilterCrossSections(InputWorkspace=ws_input) MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationRunNumber=24945, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') self.disableChecking.append('Axes') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MRNormaWorkspaceTest(systemtesting.MantidSystemTest): """ Test data loading and cross-section extraction """ def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") ws_norm = LoadEventNexus(Filename="REF_M_24945", NXentryName="entry-Off_Off", OutputWorkspace="r_24945") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationWorkspace=ws_norm, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') self.disableChecking.append('Axes') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MRDIRPIXTest(systemtesting.MantidSystemTest): """ Test data loading and cross-section extraction """ def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") ws_norm = LoadEventNexus(Filename="REF_M_24945", NXentryName="entry-Off_Off", OutputWorkspace="r_24945") #sc_angle = MRGetTheta(Workspace=wsg[0]) # The logs have DANGLE0 = 4.50514 and DIRPIX = 204 # Scatt angle = 0 # 131.9: 0.00989410349765 MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationWorkspace=ws_norm, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=136.9, UseSANGLE=False, DirectPixelOverwrite=214, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MRDANGLE0Test(systemtesting.MantidSystemTest): """ Test data loading and cross-section extraction """ def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") ws_norm = LoadEventNexus(Filename="REF_M_24945", NXentryName="entry-Off_Off", OutputWorkspace="r_24945") theta = MRGetTheta(Workspace=wsg[0], UseSANGLE=False, SpecularPixel=127.9) theta0 = MRGetTheta(Workspace=wsg[0], UseSANGLE=False, SpecularPixel=126.9) dangle0 = wsg[0].getRun()['DANGLE0'].getStatistics().mean dangle0 += (theta-theta0)*2.0*180./math.pi MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationWorkspace=ws_norm, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=127.9, UseSANGLE=False, DAngle0Overwrite=dangle0, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MROutputTest(systemtesting.MantidSystemTest): """ Test the MR output algorithm """ def runTest(self): wsg = MRFilterCrossSections(Filename="REF_M_24949") ws_norm = LoadEventNexus(Filename="REF_M_24945", NXentryName="entry-Off_Off", OutputWorkspace="r_24945") MagnetismReflectometryReduction(InputWorkspace=wsg[0], NormalizationWorkspace=ws_norm, SignalPeakPixelRange=[125, 129], SubtractSignalBackground=True, SignalBackgroundPixelRange=[15, 105], ApplyNormalization=True, NormPeakPixelRange=[201, 205], SubtractNormBackground=True, NormBackgroundPixelRange=[10,127], CutLowResDataAxis=True, LowResDataAxisPixelRange=[91, 161], CutLowResNormAxis=True, LowResNormAxisPixelRange=[86, 174], CutTimeAxis=True, UseWLTimeAxis=False, QMin=0.005, QStep=-0.01, TimeAxisStep=40, TimeAxisRange=[25000, 54000], SpecularPixel=126.9, ConstantQBinning=False, OutputWorkspace="r_24949") def validate(self): # Be more tolerant with the output, mainly because of the errors. # The following tolerance check the errors up to the third digit. self.disableChecking.append('Instrument') self.disableChecking.append('Sample') self.disableChecking.append('SpectraMap') self.disableChecking.append('Axes') return "r_24949", 'MagnetismReflectometryReductionTest.nxs' class MRInspectionTest(systemtesting.MantidSystemTest): def runTest(self): nxs_data = LoadEventNexus(Filename="REF_M_24949", NXentryName="entry-Off_Off", OutputWorkspace="r_24949") MRInspectData(Workspace=nxs_data) def validate(self): # Simple test to verify that we flagged the data correctly return mtd["r_24949"].getRun().getProperty("is_direct_beam").value == "False" class MRInspectionOverwritesTest(systemtesting.MantidSystemTest): def runTest(self): nxs_data = LoadEventNexus(Filename="REF_M_24949", NXentryName="entry-Off_Off", OutputWorkspace="r_24949") MRInspectData(Workspace=nxs_data, DirectPixelOverwrite=208.0, DAngle0Overwrite=5.0) def validate(self): # Simple test to verify that we flagged the data correctly return mtd["r_24949"].getRun().getProperty("is_direct_beam").value == "False" class MRGetThetaTest(systemtesting.MantidSystemTest): """ Test that the MRGetTheta algorithm produces correct results """ def runTest(self): nxs_data = LoadEventNexus(Filename="REF_M_24949", NXentryName="entry-Off_Off", OutputWorkspace="r_24949") self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, UseSANGLE=True), 0.606127/180.0*math.pi) self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, UseSANGLE=True, AngleOffset=math.pi), 180.606127/180.0*math.pi) self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, SpecularPixel=126.1), 0.61249193272/180.0*math.pi) # In the present case, DANGLE = DANGLE0, so we expect 0 if nothing else is passed self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data), 0.0) # The logs have DANGLE0 = 4.50514 and DIRPIX = 204 # Setting DIRPIX without setting a specular pixel shouldn't change anything self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, DirectPixelOverwrite=145), 0.0) # Setting DIRPIX and the specular pixel with move things # Move everything by 4 pixels and we should get the same answer (which depends only on the difference of the two) self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, DirectPixelOverwrite=208, SpecularPixel=130.1), 0.61249193272/180.0*math.pi) dangle0 = nxs_data.getRun()['DANGLE0'].value[0] self.assertAlmostEqual(MRGetTheta(Workspace=nxs_data, DAngle0Overwrite=dangle0+180.0), math.pi/2.0) def validate(self): return True
gpl-3.0
haroldl/homeworklog
django/contrib/comments/feeds.py
310
1439
from django.conf import settings from django.contrib.syndication.views import Feed from django.contrib.sites.models import Site from django.contrib import comments from django.utils.translation import ugettext as _ class LatestCommentFeed(Feed): """Feed of latest comments on the current site.""" def title(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return _("%(site_name)s comments") % dict(site_name=self._site.name) def link(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return "http://%s/" % (self._site.domain) def description(self): if not hasattr(self, '_site'): self._site = Site.objects.get_current() return _("Latest comments on %(site_name)s") % dict(site_name=self._site.name) def items(self): qs = comments.get_model().objects.filter( site__pk = settings.SITE_ID, is_public = True, is_removed = False, ) if getattr(settings, 'COMMENTS_BANNED_USERS_GROUP', None): where = ['user_id NOT IN (SELECT user_id FROM auth_user_groups WHERE group_id = %s)'] params = [settings.COMMENTS_BANNED_USERS_GROUP] qs = qs.extra(where=where, params=params) return qs.order_by('-submit_date')[:40] def item_pubdate(self, item): return item.submit_date
bsd-3-clause
colliell/Aerolyzer
aerolyzer/image_restriction_functions.py
1
4922
''' Image Restriction Function File Description: This file contains all functions for the verifying image restrictions. ''' import os import re import cv2 import yaml from datetime import datetime import exifread import numpy as np class imgRestFuncs(object): 'Class containing all image restriction functions' def __init__(self): self.criteria = self._import_yaml(os.getcwd() + "/../../Aerolyzer/aerolyzer/config/image_restriction_conf.yaml") ''' Purpose: The purpose of this function is to determine whether or not the device the image was taken on is an accepted mobile device. Inputs: string device Outputs: None Returns: Boolean Assumptions: N/A ''' def is_device(self, device): if device in self.criteria['acceptedMobileDevices']: return True else: return False ''' Purpose: The purpose of this function is to determine whether or not the image was altered from its original form. I.e. do the modification and creation dates coincide. Inputs: datetime created, datetime modified Outputs: None Returns: Boolean Assumptions: N/A ''' def is_edited(self, created, modified): if (created == modified): return True else: return False ''' Purpose: The purpose of this function is to determine whether or not the image contains a direct landscape with sky and view. Inputs: tuple lists of lists of rgb values (red, green, blue) Outputs: None Returns: Boolean Assumptions: N/A ''' def is_landscape(self, img): # Create a mask mask = np.zeros(img.shape[:2], np.uint8) mask[0:(img.shape[0]/2), 0:img.shape[1]] = 255 masked_img = cv2.bitwise_and(img,img,mask = mask) # Create histograms with 16 bins in range 0-255 hist_blue = cv2.calcHist([img],[0],mask,[16],[0,255]) hist_green = cv2.calcHist([img],[1],mask,[16],[0,255]) hist_red = cv2.calcHist([img],[2],mask,[16],[0,255]) return self._is_sky(hist_blue, hist_green, hist_red) ''' Purpose: The purpose of this function is to determine whether or not the size of the image is less than or equal to 200kb. Inputs: dict exifData, int imgMaxSize, int imgMaxSizeBytesShort, string fileSize Outputs: None Returns: Boolean Assumptions: N/A ''' def is_size(self, fileSize): if(fileSize > self.criteria['imgMaxSizeNumber']): return False else: return True ''' Purpose: The purpose of this function is to determine whether or not the image is an accepted file type. Inputs: string fileType Outputs: None Returns: Boolean Assumptions: N/A ''' def is_type(self, fileType): if fileType in self.criteria['acceptedFileTypes']: return True else: return False ''' Purpose: The purpose of this function is to determine whether or not the image exceeds the minimum resolution. Inputs: int imageWidth, int imageLength Outputs: None Returns: Boolean Assumptions: N/A ''' def is_res(self, imageWidth, imageLength): if (imageWidth >= self.criteria['imgWidthMin']) and (imageLength >= self.criteria['imgLengthMin']): if (imageWidth <= self.criteria['imgWidthMax']) and (imageLength <= self.criteria['imgLengthMax']): return True else: return False ''' Purpose: The purpose of this function is to determine whether or not the I and II quadrants of the image have rgb values indicitive of a sky Inputs: list of lists red_sky, list of lists green_sky, list of lists blue_sky Note: Each inner list contains rgb for each pixel in a horizontal row Outputs: None Returns: Boolean Assumptions: N/A ''' def _is_sky(self, red, green, blue): maxIndexRed = np.argmin(red) maxIndexBlue = np.argmin(blue) maxIndexGreen = np.argmin(green) #insert code to determine if range of max values is accepted as a sky return True ''' Purpose: The purpose of this function is to import the contents of the configuration file. Inputs: string conf_file Outputs: None Returns: reference to configuration file Assumptions: N/A ''' def _import_yaml(self, confFile): with open(confFile, 'r') as f: doc = yaml.load(f) f.close() return doc
apache-2.0
tcpcloud/openvstorage
webapps/api/backend/views/mgmtcenters.py
1
4478
# Copyright 2014 Open vStorage NV # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """ MgmtCenter module """ from rest_framework import viewsets from rest_framework.permissions import IsAuthenticated from ovs.dal.lists.mgmtcenterlist import MgmtCenterList from ovs.dal.hybrids.mgmtcenter import MgmtCenter from backend.serializers.serializers import FullSerializer from rest_framework.exceptions import NotAcceptable from rest_framework.response import Response from rest_framework import status from backend.decorators import required_roles, load, return_object, return_list, log from ovs.log.logHandler import LogHandler from ovs.lib.mgmtcenter import MgmtCenterController from celery.exceptions import TimeoutError logger = LogHandler.get('api', 'mgmtcenters') class MgmtCenterViewSet(viewsets.ViewSet): """ Information about mgmtCenters """ permission_classes = (IsAuthenticated,) prefix = r'mgmtcenters' base_name = 'mgmtcenters' @log() @required_roles(['read']) @return_list(MgmtCenter) @load() def list(self): """ Overview of all mgmtCenters """ return MgmtCenterList.get_mgmtcenters() @log() @required_roles(['read']) @return_object(MgmtCenter) @load(MgmtCenter) def retrieve(self, mgmtcenter): """ Load information about a given mgmtCenter """ return mgmtcenter @log() @required_roles(['read', 'write', 'manage']) @load(MgmtCenter) def destroy(self, mgmtcenter): """ Deletes a Management center """ mgmtcenter.delete(abandon=['pmachines']) return Response(status=status.HTTP_204_NO_CONTENT) @log() @required_roles(['read', 'write', 'manage']) @load() def create(self, request): """ Creates a Management Center """ serializer = FullSerializer(MgmtCenter, instance=MgmtCenter(), data=request.DATA, allow_passwords=True) if serializer.is_valid(): mgmt_center = serializer.object duplicate = MgmtCenterList.get_by_ip(mgmt_center.ip) if duplicate is None: mgmt_center.save() try: task_id = MgmtCenterController.test_connection.apply_async(kwargs = {'mgmt_center_guid': mgmt_center.guid}).id task = MgmtCenterController.test_connection.AsyncResult(task_id) except: mgmt_center.delete() raise try: is_mgmt_center = task.get(timeout = 60, propagate = True) except TimeoutError: mgmt_center.delete() logger.error('Timed out waiting for test_connection') raise NotAcceptable('Timed out waiting for test_connection') except Exception as ex: # propagate reraises the exception raised in the task mgmt_center.delete() logger.error('Task exception %s' % ex) raise NotAcceptable('Task exception') if is_mgmt_center is True: return Response(serializer.data, status=status.HTTP_201_CREATED) elif is_mgmt_center is None: mgmt_center.delete() raise NotAcceptable('The given information is invalid.') elif is_mgmt_center is False: mgmt_center.delete() raise NotAcceptable('The given information is not for a Management center.') else: mgmt_center.delete() raise NotAcceptable('Unexpected result %s' % is_mgmt_center) else: raise NotAcceptable('A Management Center with this ip already exists.') else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
apache-2.0
pratikmallya/hue
desktop/core/ext-py/Django-1.6.10/tests/one_to_one_regress/models.py
53
1430
from __future__ import unicode_literals from django.db import models from django.utils.encoding import python_2_unicode_compatible @python_2_unicode_compatible class Place(models.Model): name = models.CharField(max_length=50) address = models.CharField(max_length=80) def __str__(self): return "%s the place" % self.name @python_2_unicode_compatible class Restaurant(models.Model): place = models.OneToOneField(Place) serves_hot_dogs = models.BooleanField(default=False) serves_pizza = models.BooleanField(default=False) def __str__(self): return "%s the restaurant" % self.place.name @python_2_unicode_compatible class Bar(models.Model): place = models.OneToOneField(Place) serves_cocktails = models.BooleanField(default=True) def __str__(self): return "%s the bar" % self.place.name class UndergroundBar(models.Model): place = models.OneToOneField(Place, null=True) serves_cocktails = models.BooleanField(default=True) @python_2_unicode_compatible class Favorites(models.Model): name = models.CharField(max_length = 50) restaurants = models.ManyToManyField(Restaurant) def __str__(self): return "Favorites for %s" % self.name class Target(models.Model): pass class Pointer(models.Model): other = models.OneToOneField(Target, primary_key=True) class Pointer2(models.Model): other = models.OneToOneField(Target)
apache-2.0
yangming85/lettuce
tests/integration/lib/Django-1.3/django/contrib/gis/db/backends/oracle/models.py
310
2184
""" The GeometryColumns and SpatialRefSys models for the Oracle spatial backend. It should be noted that Oracle Spatial does not have database tables named according to the OGC standard, so the closest analogs are used. For example, the `USER_SDO_GEOM_METADATA` is used for the GeometryColumns model and the `SDO_COORD_REF_SYS` is used for the SpatialRefSys model. """ from django.contrib.gis.db import models from django.contrib.gis.db.models.fields import GeometryField from django.contrib.gis.db.backends.base import SpatialRefSysMixin class GeometryColumns(models.Model): "Maps to the Oracle USER_SDO_GEOM_METADATA table." table_name = models.CharField(max_length=32) column_name = models.CharField(max_length=1024) srid = models.IntegerField(primary_key=True) # TODO: Add support for `diminfo` column (type MDSYS.SDO_DIM_ARRAY). class Meta: db_table = 'USER_SDO_GEOM_METADATA' managed = False @classmethod def table_name_col(cls): """ Returns the name of the metadata column used to store the the feature table name. """ return 'table_name' @classmethod def geom_col_name(cls): """ Returns the name of the metadata column used to store the the feature geometry column. """ return 'column_name' def __unicode__(self): return '%s - %s (SRID: %s)' % (self.table_name, self.column_name, self.srid) class SpatialRefSys(models.Model, SpatialRefSysMixin): "Maps to the Oracle MDSYS.CS_SRS table." cs_name = models.CharField(max_length=68) srid = models.IntegerField(primary_key=True) auth_srid = models.IntegerField() auth_name = models.CharField(max_length=256) wktext = models.CharField(max_length=2046) # Optional geometry representing the bounds of this coordinate # system. By default, all are NULL in the table. cs_bounds = models.PolygonField(null=True) objects = models.GeoManager() class Meta: db_table = 'CS_SRS' managed = False @property def wkt(self): return self.wktext @classmethod def wkt_col(cls): return 'wktext'
gpl-3.0
robjohnson189/home-assistant
homeassistant/components/sensor/rest.py
23
4708
""" Support for RESTful API sensors. For more details about this platform, please refer to the documentation at https://home-assistant.io/components/sensor.rest/ """ import logging import voluptuous as vol import requests from requests.auth import HTTPBasicAuth, HTTPDigestAuth from homeassistant.components.sensor import PLATFORM_SCHEMA from homeassistant.const import ( CONF_PAYLOAD, CONF_NAME, CONF_VALUE_TEMPLATE, CONF_METHOD, CONF_RESOURCE, CONF_UNIT_OF_MEASUREMENT, STATE_UNKNOWN, CONF_VERIFY_SSL, CONF_USERNAME, CONF_PASSWORD, CONF_AUTHENTICATION, HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION, CONF_HEADERS) from homeassistant.helpers.entity import Entity import homeassistant.helpers.config_validation as cv _LOGGER = logging.getLogger(__name__) DEFAULT_METHOD = 'GET' DEFAULT_NAME = 'REST Sensor' DEFAULT_VERIFY_SSL = True PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_RESOURCE): cv.url, vol.Optional(CONF_AUTHENTICATION): vol.In([HTTP_BASIC_AUTHENTICATION, HTTP_DIGEST_AUTHENTICATION]), vol.Optional(CONF_HEADERS): {cv.string: cv.string}, vol.Optional(CONF_METHOD, default=DEFAULT_METHOD): vol.In(['POST', 'GET']), vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string, vol.Optional(CONF_PASSWORD): cv.string, vol.Optional(CONF_PAYLOAD): cv.string, vol.Optional(CONF_UNIT_OF_MEASUREMENT): cv.string, vol.Optional(CONF_USERNAME): cv.string, vol.Optional(CONF_VALUE_TEMPLATE): cv.template, vol.Optional(CONF_VERIFY_SSL, default=DEFAULT_VERIFY_SSL): cv.boolean, }) def setup_platform(hass, config, add_devices, discovery_info=None): """Setup the RESTful sensor.""" name = config.get(CONF_NAME) resource = config.get(CONF_RESOURCE) method = config.get(CONF_METHOD) payload = config.get(CONF_PAYLOAD) verify_ssl = config.get(CONF_VERIFY_SSL) username = config.get(CONF_USERNAME) password = config.get(CONF_PASSWORD) headers = config.get(CONF_HEADERS) unit = config.get(CONF_UNIT_OF_MEASUREMENT) value_template = config.get(CONF_VALUE_TEMPLATE) if value_template is not None: value_template.hass = hass if username and password: if config.get(CONF_AUTHENTICATION) == HTTP_DIGEST_AUTHENTICATION: auth = HTTPDigestAuth(username, password) else: auth = HTTPBasicAuth(username, password) else: auth = None rest = RestData(method, resource, auth, headers, payload, verify_ssl) rest.update() if rest.data is None: _LOGGER.error('Unable to fetch REST data') return False add_devices([RestSensor(hass, rest, name, unit, value_template)]) class RestSensor(Entity): """Implementation of a REST sensor.""" def __init__(self, hass, rest, name, unit_of_measurement, value_template): """Initialize the REST sensor.""" self._hass = hass self.rest = rest self._name = name self._state = STATE_UNKNOWN self._unit_of_measurement = unit_of_measurement self._value_template = value_template self.update() @property def name(self): """Return the name of the sensor.""" return self._name @property def unit_of_measurement(self): """Return the unit the value is expressed in.""" return self._unit_of_measurement @property def state(self): """Return the state of the device.""" return self._state def update(self): """Get the latest data from REST API and update the state.""" self.rest.update() value = self.rest.data if value is None: value = STATE_UNKNOWN elif self._value_template is not None: value = self._value_template.render_with_possible_json_value( value, STATE_UNKNOWN) self._state = value class RestData(object): """Class for handling the data retrieval.""" def __init__(self, method, resource, auth, headers, data, verify_ssl): """Initialize the data object.""" self._request = requests.Request( method, resource, headers=headers, auth=auth, data=data).prepare() self._verify_ssl = verify_ssl self.data = None def update(self): """Get the latest data from REST service with provided method.""" try: with requests.Session() as sess: response = sess.send( self._request, timeout=10, verify=self._verify_ssl) self.data = response.text except requests.exceptions.RequestException: _LOGGER.error("Error fetching data: %s", self._request) self.data = None
mit
jvanbrug/alanaldavista
boto/s3/multipart.py
17
12023
# Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/ # Copyright (c) 2012 Amazon.com, Inc. or its affiliates. # Copyright (c) 2010, Eucalyptus Systems, Inc. # All Rights Reserved # # Permission is hereby granted, free of charge, to any person obtaining a # copy of this software and associated documentation files (the # "Software"), to deal in the Software without restriction, including # without limitation the rights to use, copy, modify, merge, publish, dis- # tribute, sublicense, and/or sell copies of the Software, and to permit # persons to whom the Software is furnished to do so, subject to the fol- # lowing conditions: # # The above copyright notice and this permission notice shall be included # in all copies or substantial portions of the Software. # # THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS # OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL- # ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT # SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, # WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, # OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS # IN THE SOFTWARE. import user import key from boto import handler import xml.sax class CompleteMultiPartUpload(object): """ Represents a completed MultiPart Upload. Contains the following useful attributes: * location - The URI of the completed upload * bucket_name - The name of the bucket in which the upload is contained * key_name - The name of the new, completed key * etag - The MD5 hash of the completed, combined upload * version_id - The version_id of the completed upload * encrypted - The value of the encryption header """ def __init__(self, bucket=None): self.bucket = bucket self.location = None self.bucket_name = None self.key_name = None self.etag = None self.version_id = None self.encrypted = None def __repr__(self): return '<CompleteMultiPartUpload: %s.%s>' % (self.bucket_name, self.key_name) def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'Location': self.location = value elif name == 'Bucket': self.bucket_name = value elif name == 'Key': self.key_name = value elif name == 'ETag': self.etag = value else: setattr(self, name, value) class Part(object): """ Represents a single part in a MultiPart upload. Attributes include: * part_number - The integer part number * last_modified - The last modified date of this part * etag - The MD5 hash of this part * size - The size, in bytes, of this part """ def __init__(self, bucket=None): self.bucket = bucket self.part_number = None self.last_modified = None self.etag = None self.size = None def __repr__(self): if isinstance(self.part_number, int): return '<Part %d>' % self.part_number else: return '<Part %s>' % None def startElement(self, name, attrs, connection): return None def endElement(self, name, value, connection): if name == 'PartNumber': self.part_number = int(value) elif name == 'LastModified': self.last_modified = value elif name == 'ETag': self.etag = value elif name == 'Size': self.size = int(value) else: setattr(self, name, value) def part_lister(mpupload, part_number_marker=None): """ A generator function for listing parts of a multipart upload. """ more_results = True part = None while more_results: parts = mpupload.get_all_parts(None, part_number_marker) for part in parts: yield part part_number_marker = mpupload.next_part_number_marker more_results = mpupload.is_truncated class MultiPartUpload(object): """ Represents a MultiPart Upload operation. """ def __init__(self, bucket=None): self.bucket = bucket self.bucket_name = None self.key_name = None self.id = id self.initiator = None self.owner = None self.storage_class = None self.initiated = None self.part_number_marker = None self.next_part_number_marker = None self.max_parts = None self.is_truncated = False self._parts = None def __repr__(self): return '<MultiPartUpload %s>' % self.key_name def __iter__(self): return part_lister(self) def to_xml(self): s = '<CompleteMultipartUpload>\n' for part in self: s += ' <Part>\n' s += ' <PartNumber>%d</PartNumber>\n' % part.part_number s += ' <ETag>%s</ETag>\n' % part.etag s += ' </Part>\n' s += '</CompleteMultipartUpload>' return s def startElement(self, name, attrs, connection): if name == 'Initiator': self.initiator = user.User(self) return self.initiator elif name == 'Owner': self.owner = user.User(self) return self.owner elif name == 'Part': part = Part(self.bucket) self._parts.append(part) return part return None def endElement(self, name, value, connection): if name == 'Bucket': self.bucket_name = value elif name == 'Key': self.key_name = value elif name == 'UploadId': self.id = value elif name == 'StorageClass': self.storage_class = value elif name == 'PartNumberMarker': self.part_number_marker = value elif name == 'NextPartNumberMarker': self.next_part_number_marker = value elif name == 'MaxParts': self.max_parts = int(value) elif name == 'IsTruncated': if value == 'true': self.is_truncated = True else: self.is_truncated = False elif name == 'Initiated': self.initiated = value else: setattr(self, name, value) def get_all_parts(self, max_parts=None, part_number_marker=None, encoding_type=None): """ Return the uploaded parts of this MultiPart Upload. This is a lower-level method that requires you to manually page through results. To simplify this process, you can just use the object itself as an iterator and it will automatically handle all of the paging with S3. """ self._parts = [] query_args = 'uploadId=%s' % self.id if max_parts: query_args += '&max-parts=%d' % max_parts if part_number_marker: query_args += '&part-number-marker=%s' % part_number_marker if encoding_type: query_args += '&encoding-type=%s' % encoding_type response = self.bucket.connection.make_request('GET', self.bucket.name, self.key_name, query_args=query_args) body = response.read() if response.status == 200: h = handler.XmlHandler(self, self) xml.sax.parseString(body, h) return self._parts def upload_part_from_file(self, fp, part_num, headers=None, replace=True, cb=None, num_cb=10, md5=None, size=None): """ Upload another part of this MultiPart Upload. .. note:: After you initiate multipart upload and upload one or more parts, you must either complete or abort multipart upload in order to stop getting charged for storage of the uploaded parts. Only after you either complete or abort multipart upload, Amazon S3 frees up the parts storage and stops charging you for the parts storage. :type fp: file :param fp: The file object you want to upload. :type part_num: int :param part_num: The number of this part. The other parameters are exactly as defined for the :class:`boto.s3.key.Key` set_contents_from_file method. :rtype: :class:`boto.s3.key.Key` or subclass :returns: The uploaded part containing the etag. """ if part_num < 1: raise ValueError('Part numbers must be greater than zero') query_args = 'uploadId=%s&partNumber=%d' % (self.id, part_num) key = self.bucket.new_key(self.key_name) key.set_contents_from_file(fp, headers=headers, replace=replace, cb=cb, num_cb=num_cb, md5=md5, reduced_redundancy=False, query_args=query_args, size=size) return key def copy_part_from_key(self, src_bucket_name, src_key_name, part_num, start=None, end=None, src_version_id=None, headers=None): """ Copy another part of this MultiPart Upload. :type src_bucket_name: string :param src_bucket_name: Name of the bucket containing the source key :type src_key_name: string :param src_key_name: Name of the source key :type part_num: int :param part_num: The number of this part. :type start: int :param start: Zero-based byte offset to start copying from :type end: int :param end: Zero-based byte offset to copy to :type src_version_id: string :param src_version_id: version_id of source object to copy from :type headers: dict :param headers: Any headers to pass along in the request """ if part_num < 1: raise ValueError('Part numbers must be greater than zero') query_args = 'uploadId=%s&partNumber=%d' % (self.id, part_num) if start is not None and end is not None: rng = 'bytes=%s-%s' % (start, end) provider = self.bucket.connection.provider if headers is None: headers = {} else: headers = headers.copy() headers[provider.copy_source_range_header] = rng return self.bucket.copy_key(self.key_name, src_bucket_name, src_key_name, src_version_id=src_version_id, storage_class=None, headers=headers, query_args=query_args) def complete_upload(self): """ Complete the MultiPart Upload operation. This method should be called when all parts of the file have been successfully uploaded to S3. :rtype: :class:`boto.s3.multipart.CompletedMultiPartUpload` :returns: An object representing the completed upload. """ xml = self.to_xml() return self.bucket.complete_multipart_upload(self.key_name, self.id, xml) def cancel_upload(self): """ Cancels a MultiPart Upload operation. The storage consumed by any previously uploaded parts will be freed. However, if any part uploads are currently in progress, those part uploads might or might not succeed. As a result, it might be necessary to abort a given multipart upload multiple times in order to completely free all storage consumed by all parts. """ self.bucket.cancel_multipart_upload(self.key_name, self.id)
mit
AndroidGX/SimpleGX-MM-6.0_H815
scripts/gcc-wrapper.py
90
3524
#! /usr/bin/env python # -*- coding: utf-8 -*- # Copyright (c) 2011-2012, The Linux Foundation. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are met: # * Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # * Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in the # documentation and/or other materials provided with the distribution. # * Neither the name of The Linux Foundation nor # the names of its contributors may be used to endorse or promote # products derived from this software without specific prior written # permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" # AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE # IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND # NON-INFRINGEMENT ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; # OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, # WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR # OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF # ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # Invoke gcc, looking for warnings, and causing a failure if there are # non-whitelisted warnings. import errno import re import os import sys import subprocess # Note that gcc uses unicode, which may depend on the locale. TODO: # force LANG to be set to en_US.UTF-8 to get consistent warnings. allowed_warnings = set([ "return_address.c:63", "kprobes.c:1493", "rcutree.c:1614", "af_unix.c:893", "nl80211.c:58", "jhash.h:137", "cmpxchg.h:201", "ping.c:87", ]) # Capture the name of the object file, can find it. ofile = None warning_re = re.compile(r'''(.*/|)([^/]+\.[a-z]+:\d+):(\d+:)? warning:''') def interpret_warning(line): """Decode the message from gcc. The messages we care about have a filename, and a warning""" line = line.rstrip('\n') m = warning_re.match(line) if m and m.group(2) not in allowed_warnings: print "error, forbidden warning:", m.group(2) # If there is a warning, remove any object if it exists. if ofile: try: os.remove(ofile) except OSError: pass sys.exit(1) def run_gcc(): args = sys.argv[1:] # Look for -o try: i = args.index('-o') global ofile ofile = args[i+1] except (ValueError, IndexError): pass compiler = sys.argv[0] try: proc = subprocess.Popen(args, stderr=subprocess.PIPE) for line in proc.stderr: print line, interpret_warning(line) result = proc.wait() except OSError as e: result = e.errno if result == errno.ENOENT: print args[0] + ':',e.strerror print 'Is your PATH set correctly?' else: print ' '.join(args), str(e) return result if __name__ == '__main__': status = run_gcc() sys.exit(status)
gpl-2.0
MiLk/youtube-dl
youtube_dl/extractor/ooyala.py
12
2882
from __future__ import unicode_literals import re import json from .common import InfoExtractor from ..utils import unescapeHTML class OoyalaIE(InfoExtractor): _VALID_URL = r'(?:ooyala:|https?://.+?\.ooyala\.com/.*?(?:embedCode|ec)=)(?P<id>.+?)(&|$)' _TEST = { # From http://it.slashdot.org/story/13/04/25/178216/recovering-data-from-broken-hard-drives-and-ssds-video 'url': 'http://player.ooyala.com/player.js?embedCode=pxczE2YjpfHfn1f3M-ykG_AmJRRn0PD8', 'md5': '3f5cceb3a7bf461d6c29dc466cf8033c', 'info_dict': { 'id': 'pxczE2YjpfHfn1f3M-ykG_AmJRRn0PD8', 'ext': 'mp4', 'title': 'Explaining Data Recovery from Hard Drives and SSDs', 'description': 'How badly damaged does a drive have to be to defeat Russell and his crew? Apparently, smashed to bits.', }, } @staticmethod def _url_for_embed_code(embed_code): return 'http://player.ooyala.com/player.js?embedCode=%s' % embed_code @classmethod def _build_url_result(cls, embed_code): return cls.url_result(cls._url_for_embed_code(embed_code), ie=cls.ie_key()) def _extract_result(self, info, more_info): return { 'id': info['embedCode'], 'ext': 'mp4', 'title': unescapeHTML(info['title']), 'url': info.get('ipad_url') or info['url'], 'description': unescapeHTML(more_info['description']), 'thumbnail': more_info['promo'], } def _real_extract(self, url): mobj = re.match(self._VALID_URL, url) embedCode = mobj.group('id') player_url = 'http://player.ooyala.com/player.js?embedCode=%s' % embedCode player = self._download_webpage(player_url, embedCode) mobile_url = self._search_regex(r'mobile_player_url="(.+?)&device="', player, 'mobile player url') mobile_player = self._download_webpage(mobile_url, embedCode) videos_info = self._search_regex( r'var streams=window.oo_testEnv\?\[\]:eval\("\((\[{.*?}\])\)"\);', mobile_player, 'info').replace('\\"','"') videos_more_info = self._search_regex(r'eval\("\(({.*?\\"promo\\".*?})\)"', mobile_player, 'more info').replace('\\"','"') videos_info = json.loads(videos_info) videos_more_info =json.loads(videos_more_info) if videos_more_info.get('lineup'): videos = [self._extract_result(info, more_info) for (info, more_info) in zip(videos_info, videos_more_info['lineup'])] return { '_type': 'playlist', 'id': embedCode, 'title': unescapeHTML(videos_more_info['title']), 'entries': videos, } else: return self._extract_result(videos_info[0], videos_more_info)
unlicense
yunque/sms-tools
lectures/08-Sound-transformations/plots-code/stftFiltering-orchestra.py
18
1677
import numpy as np import time, os, sys import matplotlib.pyplot as plt sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/models/')) sys.path.append(os.path.join(os.path.dirname(os.path.realpath(__file__)), '../../../software/transformations/')) import utilFunctions as UF import stftTransformations as STFTT import stft as STFT (fs, x) = UF.wavread('../../../sounds/orchestra.wav') w = np.hamming(2048) N = 2048 H = 512 # design a band stop filter using a hanning window startBin = int(N*500.0/fs) nBins = int(N*2000.0/fs) bandpass = (np.hanning(nBins) * 65.0) - 60 filt = np.zeros(N/2+1)-60 filt[startBin:startBin+nBins] = bandpass y = STFTT.stftFiltering(x, fs, w, N, H, filt) mX,pX = STFT.stftAnal(x, fs, w, N, H) mY,pY = STFT.stftAnal(y, fs, w, N, H) plt.figure(1, figsize=(12, 9)) plt.subplot(311) numFrames = int(mX[:,0].size) frmTime = H*np.arange(numFrames)/float(fs) binFreq = np.arange(mX[0,:].size)*float(fs)/N plt.pcolormesh(frmTime, binFreq, np.transpose(mX)) plt.title('mX (orchestra.wav)') plt.autoscale(tight=True) plt.subplot(312) plt.plot(fs*np.arange(mX[0,:].size)/float(N), filt, 'k', lw=1.3) plt.axis([0, fs/2, -60, 7]) plt.title('filter shape') plt.subplot(313) numFrames = int(mY[:,0].size) frmTime = H*np.arange(numFrames)/float(fs) binFreq = np.arange(mY[0,:].size)*float(fs)/N plt.pcolormesh(frmTime, binFreq, np.transpose(mY)) plt.title('mY') plt.autoscale(tight=True) plt.tight_layout() UF.wavwrite(y, fs, 'orchestra-stft-filtering.wav') plt.savefig('stftFiltering-orchestra.png') plt.show()
agpl-3.0
donnadionne/grpc
src/python/grpcio/_parallel_compile_patch.py
10
2431
# Copyright 2018 The gRPC Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. """Patches the compile() to allow enable parallel compilation of C/C++. build_ext has lots of C/C++ files and normally them one by one. Enabling parallel build helps a lot. """ import distutils.ccompiler import os try: BUILD_EXT_COMPILER_JOBS = int( os.environ['GRPC_PYTHON_BUILD_EXT_COMPILER_JOBS']) except KeyError: import multiprocessing BUILD_EXT_COMPILER_JOBS = multiprocessing.cpu_count() except ValueError: BUILD_EXT_COMPILER_JOBS = 1 # monkey-patch for parallel compilation def _parallel_compile(self, sources, output_dir=None, macros=None, include_dirs=None, debug=0, extra_preargs=None, extra_postargs=None, depends=None): # setup the same way as distutils.ccompiler.CCompiler # https://github.com/python/cpython/blob/31368a4f0e531c19affe2a1becd25fc316bc7501/Lib/distutils/ccompiler.py#L564 macros, objects, extra_postargs, pp_opts, build = self._setup_compile( str(output_dir), macros, include_dirs, sources, depends, extra_postargs) cc_args = self._get_cc_args(pp_opts, debug, extra_preargs) def _compile_single_file(obj): try: src, ext = build[obj] except KeyError: return self._compile(obj, src, ext, cc_args, extra_postargs, pp_opts) # run compilation of individual files in parallel import multiprocessing.pool multiprocessing.pool.ThreadPool(BUILD_EXT_COMPILER_JOBS).map( _compile_single_file, objects) return objects def monkeypatch_compile_maybe(): """Monkeypatching is dumb, but the build speed gain is worth it.""" if BUILD_EXT_COMPILER_JOBS > 1: distutils.ccompiler.CCompiler.compile = _parallel_compile
apache-2.0
mancoast/CPythonPyc_test
cpython/270_test_pyclbr.py
3
7689
''' Test cases for pyclbr.py Nick Mathewson ''' from test.test_support import run_unittest, import_module import sys from types import ClassType, FunctionType, MethodType, BuiltinFunctionType import pyclbr from unittest import TestCase StaticMethodType = type(staticmethod(lambda: None)) ClassMethodType = type(classmethod(lambda c: None)) # Silence Py3k warning import_module('commands', deprecated=True) # This next line triggers an error on old versions of pyclbr. from commands import getstatus # Here we test the python class browser code. # # The main function in this suite, 'testModule', compares the output # of pyclbr with the introspected members of a module. Because pyclbr # is imperfect (as designed), testModule is called with a set of # members to ignore. class PyclbrTest(TestCase): def assertListEq(self, l1, l2, ignore): ''' succeed iff {l1} - {ignore} == {l2} - {ignore} ''' missing = (set(l1) ^ set(l2)) - set(ignore) if missing: print >>sys.stderr, "l1=%r\nl2=%r\nignore=%r" % (l1, l2, ignore) self.fail("%r missing" % missing.pop()) def assertHasattr(self, obj, attr, ignore): ''' succeed iff hasattr(obj,attr) or attr in ignore. ''' if attr in ignore: return if not hasattr(obj, attr): print "???", attr self.assertTrue(hasattr(obj, attr), 'expected hasattr(%r, %r)' % (obj, attr)) def assertHaskey(self, obj, key, ignore): ''' succeed iff key in obj or key in ignore. ''' if key in ignore: return if key not in obj: print >>sys.stderr, "***", key self.assertIn(key, obj) def assertEqualsOrIgnored(self, a, b, ignore): ''' succeed iff a == b or a in ignore or b in ignore ''' if a not in ignore and b not in ignore: self.assertEqual(a, b) def checkModule(self, moduleName, module=None, ignore=()): ''' succeed iff pyclbr.readmodule_ex(modulename) corresponds to the actual module object, module. Any identifiers in ignore are ignored. If no module is provided, the appropriate module is loaded with __import__.''' if module is None: # Import it. # ('<silly>' is to work around an API silliness in __import__) module = __import__(moduleName, globals(), {}, ['<silly>']) dict = pyclbr.readmodule_ex(moduleName) def ismethod(oclass, obj, name): classdict = oclass.__dict__ if isinstance(obj, FunctionType): if not isinstance(classdict[name], StaticMethodType): return False else: if not isinstance(obj, MethodType): return False if obj.im_self is not None: if (not isinstance(classdict[name], ClassMethodType) or obj.im_self is not oclass): return False else: if not isinstance(classdict[name], FunctionType): return False objname = obj.__name__ if objname.startswith("__") and not objname.endswith("__"): objname = "_%s%s" % (obj.im_class.__name__, objname) return objname == name # Make sure the toplevel functions and classes are the same. for name, value in dict.items(): if name in ignore: continue self.assertHasattr(module, name, ignore) py_item = getattr(module, name) if isinstance(value, pyclbr.Function): self.assertIsInstance(py_item, (FunctionType, BuiltinFunctionType)) if py_item.__module__ != moduleName: continue # skip functions that came from somewhere else self.assertEquals(py_item.__module__, value.module) else: self.assertIsInstance(py_item, (ClassType, type)) if py_item.__module__ != moduleName: continue # skip classes that came from somewhere else real_bases = [base.__name__ for base in py_item.__bases__] pyclbr_bases = [ getattr(base, 'name', base) for base in value.super ] try: self.assertListEq(real_bases, pyclbr_bases, ignore) except: print >>sys.stderr, "class=%s" % py_item raise actualMethods = [] for m in py_item.__dict__.keys(): if ismethod(py_item, getattr(py_item, m), m): actualMethods.append(m) foundMethods = [] for m in value.methods.keys(): if m[:2] == '__' and m[-2:] != '__': foundMethods.append('_'+name+m) else: foundMethods.append(m) try: self.assertListEq(foundMethods, actualMethods, ignore) self.assertEquals(py_item.__module__, value.module) self.assertEqualsOrIgnored(py_item.__name__, value.name, ignore) # can't check file or lineno except: print >>sys.stderr, "class=%s" % py_item raise # Now check for missing stuff. def defined_in(item, module): if isinstance(item, ClassType): return item.__module__ == module.__name__ if isinstance(item, FunctionType): return item.func_globals is module.__dict__ return False for name in dir(module): item = getattr(module, name) if isinstance(item, (ClassType, FunctionType)): if defined_in(item, module): self.assertHaskey(dict, name, ignore) def test_easy(self): self.checkModule('pyclbr') self.checkModule('doctest', ignore=("DocTestCase",)) # Silence Py3k warning rfc822 = import_module('rfc822', deprecated=True) self.checkModule('rfc822', rfc822) self.checkModule('difflib') def test_decorators(self): # XXX: See comment in pyclbr_input.py for a test that would fail # if it were not commented out. # self.checkModule('test.pyclbr_input') def test_others(self): cm = self.checkModule # These were once about the 10 longest modules cm('random', ignore=('Random',)) # from _random import Random as CoreGenerator cm('cgi', ignore=('log',)) # set with = in module cm('urllib', ignore=('_CFNumberToInt32', '_CStringFromCFString', '_CFSetup', 'getproxies_registry', 'proxy_bypass_registry', 'proxy_bypass_macosx_sysconf', 'open_https', 'getproxies_macosx_sysconf', 'getproxies_internetconfig',)) # not on all platforms cm('pickle') cm('aifc', ignore=('openfp',)) # set with = in module cm('Cookie') cm('sre_parse', ignore=('dump',)) # from sre_constants import * cm('pdb') cm('pydoc') # Tests for modules inside packages cm('email.parser') cm('test.test_pyclbr') def test_main(): run_unittest(PyclbrTest) if __name__ == "__main__": test_main()
gpl-3.0
jbenden/ansible
lib/ansible/modules/network/avi/avi_serverautoscalepolicy.py
7
7261
#!/usr/bin/python # # Created on Aug 25, 2016 # @author: Gaurav Rastogi (grastogi@avinetworks.com) # Eric Anderson (eanderson@avinetworks.com) # module_check: supported # # # This file is part of Ansible # # Ansible is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # Ansible is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with Ansible. If not, see <http://www.gnu.org/licenses/>. # ANSIBLE_METADATA = {'metadata_version': '1.0', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: avi_serverautoscalepolicy author: Gaurav Rastogi (grastogi@avinetworks.com) short_description: Module for setup of ServerAutoScalePolicy Avi RESTful Object description: - This module is used to configure ServerAutoScalePolicy object - more examples at U(https://github.com/avinetworks/devops) requirements: [ avisdk ] version_added: "2.4" options: state: description: - The state that should be applied on the entity. default: present choices: ["absent","present"] description: description: - User defined description for the object. intelligent_autoscale: description: - Use avi intelligent autoscale algorithm where autoscale is performed by comparing load on the pool against estimated capacity of all the servers. - Default value when not specified in API or module is interpreted by Avi Controller as False. intelligent_scalein_margin: description: - Maximum extra capacity as percentage of load used by the intelligent scheme. - Scalein is triggered when available capacity is more than this margin. - Allowed values are 1-99. - Default value when not specified in API or module is interpreted by Avi Controller as 40. intelligent_scaleout_margin: description: - Minimum extra capacity as percentage of load used by the intelligent scheme. - Scaleout is triggered when available capacity is less than this margin. - Allowed values are 1-99. - Default value when not specified in API or module is interpreted by Avi Controller as 20. max_scalein_adjustment_step: description: - Maximum number of servers to scalein simultaneously. - The actual number of servers to scalein is chosen such that target number of servers is always more than or equal to the min_size. - Default value when not specified in API or module is interpreted by Avi Controller as 1. max_scaleout_adjustment_step: description: - Maximum number of servers to scaleout simultaneously. - The actual number of servers to scaleout is chosen such that target number of servers is always less than or equal to the max_size. - Default value when not specified in API or module is interpreted by Avi Controller as 1. max_size: description: - Maximum number of servers after scaleout. - Allowed values are 0-400. min_size: description: - No scale-in happens once number of operationally up servers reach min_servers. - Allowed values are 0-400. name: description: - Name of the object. required: true scalein_alertconfig_refs: description: - Trigger scalein when alerts due to any of these alert configurations are raised. - It is a reference to an object of type alertconfig. scalein_cooldown: description: - Cooldown period during which no new scalein is triggered to allow previous scalein to successfully complete. - Default value when not specified in API or module is interpreted by Avi Controller as 300. scaleout_alertconfig_refs: description: - Trigger scaleout when alerts due to any of these alert configurations are raised. - It is a reference to an object of type alertconfig. scaleout_cooldown: description: - Cooldown period during which no new scaleout is triggered to allow previous scaleout to successfully complete. - Default value when not specified in API or module is interpreted by Avi Controller as 300. tenant_ref: description: - It is a reference to an object of type tenant. url: description: - Avi controller URL of the object. use_predicted_load: description: - Use predicted load rather than current load. - Default value when not specified in API or module is interpreted by Avi Controller as False. uuid: description: - Unique object identifier of the object. extends_documentation_fragment: - avi ''' EXAMPLES = """ - name: Example to create ServerAutoScalePolicy object avi_serverautoscalepolicy: controller: 10.10.25.42 username: admin password: something state: present name: sample_serverautoscalepolicy """ RETURN = ''' obj: description: ServerAutoScalePolicy (api/serverautoscalepolicy) object returned: success, changed type: dict ''' from ansible.module_utils.basic import AnsibleModule try: from ansible.module_utils.avi import ( avi_common_argument_spec, HAS_AVI, avi_ansible_api) except ImportError: HAS_AVI = False def main(): argument_specs = dict( state=dict(default='present', choices=['absent', 'present']), description=dict(type='str',), intelligent_autoscale=dict(type='bool',), intelligent_scalein_margin=dict(type='int',), intelligent_scaleout_margin=dict(type='int',), max_scalein_adjustment_step=dict(type='int',), max_scaleout_adjustment_step=dict(type='int',), max_size=dict(type='int',), min_size=dict(type='int',), name=dict(type='str', required=True), scalein_alertconfig_refs=dict(type='list',), scalein_cooldown=dict(type='int',), scaleout_alertconfig_refs=dict(type='list',), scaleout_cooldown=dict(type='int',), tenant_ref=dict(type='str',), url=dict(type='str',), use_predicted_load=dict(type='bool',), uuid=dict(type='str',), ) argument_specs.update(avi_common_argument_spec()) module = AnsibleModule( argument_spec=argument_specs, supports_check_mode=True) if not HAS_AVI: return module.fail_json(msg=( 'Avi python API SDK (avisdk>=17.1) is not installed. ' 'For more details visit https://github.com/avinetworks/sdk.')) return avi_ansible_api(module, 'serverautoscalepolicy', set([])) if __name__ == '__main__': main()
gpl-3.0
fxb22/BioGUI
plugins/Lists/Gene Expressions.py
1
3671
import os import sys import errno import time from datetime import datetime import filemtime as fmt import Catche as mP from xml.dom import minidom import tarfile def GetExec(): try: for names in os.listdir(r'.\CurrentCel/'): os.remove(r'.\CurrentCel/'+names) os.removedirs(r'.\CurrentCel') except: errno Recs = os.listdir(os.getcwd()) newList=[] j = 0 PForm = "" listdata=dict() GeoUntar = [] k = 0 ftime = open('lastChecked.txt','r') prevTime = float(ftime.readline()) ftime.close() f = open('lastChecked.txt','w') f.write(str(time.time())) f.close() for i in Recs: (nameLeft, ext) = os.path.splitext(i) if ext == '.tgz': newList.append([i]) geoListFile = nameLeft + ".pickle" if not os.path.isfile(geoListFile) or float(fmt.filemtime(i)) > prevTime: filelib = tarfile.TarFile.gzopen(i) #Istar = i GeoUntar.append(filelib) #print Istar nameHolder = filelib.getnames() ''''for k,itsgo in enumerate(nameHolder): try: if itsgo[-4:] =='.txt' : if itsgo[0:3] != r"GPL": newList.append(itsgo) elif itsgo[0:3] == r"GPL": PForm = itsgo[:-10] elif itsgo[-4:] == ".xml": f = filelib.extractfile(itsgo) minimal = minidom.parse(f).childNodes[0] titleText = minimal.childNodes[-2].childNodes[3].childNodes[0].toxml() except IOError, e: print e''' #print nameHolder[:5] PForm = nameHolder[1][:-10] #i = nameHolder[0] #print i f = filelib.extractfile(nameHolder[0]) minimal = minidom.parse(f).childNodes[0] titleText = minimal.childNodes[-2].childNodes[3].childNodes[0].toxml() #print len(minimal.childNodes) listdata[j] = str(nameLeft[:-4]),titleText, PForm, len(nameHolder)-2 rHoward = [nameLeft,titleText,PForm,len(nameHolder)-2] mP.spickle(geoListFile,rHoward) j += 1 else: rHoward = mP.opickle(geoListFile) listdata[j] = str(rHoward[0][:-4]),rHoward[1],rHoward[2],rHoward[3] j += 1 elif ext == r'.tar': filelib = tarfile.TarFile.taropen(i) nameHolder = filelib.getnames() cels = 0 for n in nameHolder: if n[-7:] == r'.CEL.gz': cels += 1 """ sys.path.append(r'..\plugins\Tools\ETOOLSPlugins') exTool = __import__('ESearch').GetExec('gds',str(nameLeft[:-4])) esTool = __import__('ESummary').GetExec('gds',str(exTool['IdList'][0])) titleText = '' PForm = '' for line in esTool.split('\n'): if len(line) > 32: if line[:34] == '\t<Item Name="title" Type="String">': titleText = line[34:-8] elif line[:32] == '\t<Item Name="GPL" Type="String">': PForm = 'GPL' + str(line[32:-7]) listdata[j] = str(nameLeft[:-4]),titleText, PForm, cels newList.append([i,PForm])""" return [newList,listdata]
gpl-2.0
mKeRix/home-assistant
tests/components/auth/test_mfa_setup_flow.py
24
3512
"""Tests for the mfa setup flow.""" from homeassistant import data_entry_flow from homeassistant.auth import auth_manager_from_config from homeassistant.components.auth import mfa_setup_flow from homeassistant.setup import async_setup_component from tests.common import CLIENT_ID, MockUser, ensure_auth_manager_loaded async def test_ws_setup_depose_mfa(hass, hass_ws_client): """Test set up mfa module for current user.""" hass.auth = await auth_manager_from_config( hass, provider_configs=[ { "type": "insecure_example", "users": [ { "username": "test-user", "password": "test-pass", "name": "Test Name", } ], } ], module_configs=[ { "type": "insecure_example", "id": "example_module", "data": [{"user_id": "mock-user", "pin": "123456"}], } ], ) ensure_auth_manager_loaded(hass.auth) await async_setup_component(hass, "auth", {"http": {}}) user = MockUser(id="mock-user").add_to_hass(hass) cred = await hass.auth.auth_providers[0].async_get_or_create_credentials( {"username": "test-user"} ) await hass.auth.async_link_user(user, cred) refresh_token = await hass.auth.async_create_refresh_token(user, CLIENT_ID) access_token = hass.auth.async_create_access_token(refresh_token) client = await hass_ws_client(hass, access_token) await client.send_json({"id": 10, "type": mfa_setup_flow.WS_TYPE_SETUP_MFA}) result = await client.receive_json() assert result["id"] == 10 assert result["success"] is False assert result["error"]["code"] == "no_module" await client.send_json( { "id": 11, "type": mfa_setup_flow.WS_TYPE_SETUP_MFA, "mfa_module_id": "example_module", } ) result = await client.receive_json() assert result["id"] == 11 assert result["success"] flow = result["result"] assert flow["type"] == data_entry_flow.RESULT_TYPE_FORM assert flow["handler"] == "example_module" assert flow["step_id"] == "init" assert flow["data_schema"][0] == {"type": "string", "name": "pin"} await client.send_json( { "id": 12, "type": mfa_setup_flow.WS_TYPE_SETUP_MFA, "flow_id": flow["flow_id"], "user_input": {"pin": "654321"}, } ) result = await client.receive_json() assert result["id"] == 12 assert result["success"] flow = result["result"] assert flow["type"] == data_entry_flow.RESULT_TYPE_CREATE_ENTRY assert flow["handler"] == "example_module" assert flow["data"]["result"] is None await client.send_json( { "id": 13, "type": mfa_setup_flow.WS_TYPE_DEPOSE_MFA, "mfa_module_id": "invalid_id", } ) result = await client.receive_json() assert result["id"] == 13 assert result["success"] is False assert result["error"]["code"] == "disable_failed" await client.send_json( { "id": 14, "type": mfa_setup_flow.WS_TYPE_DEPOSE_MFA, "mfa_module_id": "example_module", } ) result = await client.receive_json() assert result["id"] == 14 assert result["success"] assert result["result"] == "done"
mit
joolswhitehorn/stuff-projects
pinest.py
1
2587
#imports for thermometer reading import os import glob import time #imports for gmail reading import imaplib import email # wiringpi numbers import wiringpi2 as wiringpi wiringpi.wiringPiSetup() wiringpi.pinMode(0, 1) # sets WP pin 0 to output #Find temperature from thermometer os.system('modprobe w1-gpio') os.system('modprobe w1-therm') base_dir = '/sys/bus/w1/devices/' device_folder = glob.glob(base_dir + '28*')[0] device_file = device_folder + '/w1_slave' def read_temp_raw(): f = open(device_file, 'r') lines = f.readlines() f.close() return lines def read_temp(): lines = read_temp_raw() while lines[0].strip()[-3:] != 'YES': time.sleep(0.2) lines = read_temp_raw() equals_pos = lines[1].find('t=') if equals_pos != -1: temp_string = lines[1][equals_pos+2:] temp_c = float(temp_string) / 1000.0 temp_f = temp_c * 9.0 / 5.0 + 32.0 return temp_c #, temp_f #connect to gmail def read_gmail(): global varSubject mail = imaplib.IMAP4_SSL('imap.gmail.com') mail.login('YOUREMAILADDRESS@gmail.com','YOUR PASSWORD HERE') mail.select('inbox') mail.list() typ, data = mail.search(None, 'ALL') for num in data[0].split(): typ, data = mail.fetch(num, '(RFC822)') typ, data = mail.search(None, 'ALL') ids = data[0] id_list = ids.split() # Any Emails? # get most recent email id if id_list: latest_email_id = int( id_list[-1] ) for i in range( latest_email_id, latest_email_id-1, -1): typ, data = mail.fetch( i, '(RFC822)') for response_part in data: if isinstance(response_part, tuple): msg = email.message_from_string(response_part[1]) varSubject = msg['subject'] varFrom = msg['from'] varFrom = varFrom.replace('<','') varFrom = varFrom.replace('>','') #Remove used emails from mailbox typ, data = mail.search(None, 'ALL') for num in data[0].split(): mail.store(num, '+FLAGS', '\\Deleted') mail.expunge() mail.close() mail.logout() return int(varSubject) while True: print "Current temp" print read_temp() print "Target temp" print read_gmail() if (read_gmail() > read_temp()):#Compare varSubject to temp wiringpi.digitalWrite(0, 1) # sets port 0 to 1 (3.3V, on) print "HEATING ON\n" else: wiringpi.digitalWrite(0, 0) # sets port 0 to 0 (3.3V, off) print "HEATING OFF\n" time.sleep(5)
gpl-2.0
abstract-open-solutions/server-tools
__unported__/email_template_template/model/email_template.py
61
2701
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # This module copyright (C) 2013 Therp BV (<http://therp.nl>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from openerp.osv.orm import Model from openerp.osv import fields from openerp.addons.email_template.email_template import mako_template_env class email_template(Model): _inherit = 'email.template' def _get_is_template_template(self, cr, uid, ids, fields_name, arg, context=None): cr.execute('''select id, (select count(*) > 0 from email_template e where email_template_id=email_template.id) from email_template where id in %s''', (tuple(ids),)) return dict(cr.fetchall()) _columns = { 'email_template_id': fields.many2one('email.template', 'Template'), 'is_template_template': fields.function( _get_is_template_template, type='boolean', string='Is a template template'), } def get_email_template(self, cr, uid, template_id=False, record_id=None, context=None): this = super(email_template, self).get_email_template( cr, uid, template_id, record_id, context) if this.email_template_id and not this.is_template_template: for field in ['body_html']: if this[field] and this.email_template_id[field]: try: mako_template_env.autoescape = False this._data[this.id][field] = self.render_template( cr, uid, this.email_template_id[field], this.email_template_id.model, this.id, this._context) finally: mako_template_env.autoescape = True return this
agpl-3.0
40223208/CDB-Final-
static/Brython3.1.1-20150328-091302/Lib/browser/object_storage.py
627
1315
import pickle class __UnProvided(): pass class ObjectStorage(): def __init__(self, storage): self.storage = storage def __delitem__(self, key): del self.storage[pickle.dumps(key)] def __getitem__(self, key): return pickle.loads(self.storage[pickle.dumps(key)]) def __setitem__(self, key, value): self.storage[pickle.dumps(key)] = pickle.dumps(value) def __contains__(self, key): return pickle.dumps(key) in self.storage def get(self, key, default=None): if pickle.dumps(key) in self.storage: return self.storage[pickle.dumps(key)] return default def pop(self, key, default=__UnProvided()): if type(default) is __UnProvided or pickle.dumps(key) in self.storage: return pickle.loads(self.storage.pop(pickle.dumps(key))) return default def __iter__(self): keys = self.keys() return keys.__iter__() def keys(self): return [pickle.loads(key) for key in self.storage.keys()] def values(self): return [pickle.loads(val) for val in self.storage.values()] def items(self): return list(zip(self.keys(), self.values())) def clear(self): self.storage.clear() def __len__(self): return len(self.storage)
gpl-3.0
fenglu-g/incubator-airflow
tests/dags_with_system_exit/b_test_scheduler_dags.py
11
1125
# -*- coding: utf-8 -*- # # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. from datetime import datetime from airflow.models import DAG from airflow.operators.dummy_operator import DummyOperator DEFAULT_DATE = datetime(2000, 1, 1) dag1 = DAG( dag_id='exit_test_dag', start_date=DEFAULT_DATE) dag1_task1 = DummyOperator( task_id='dummy', dag=dag1, owner='airflow')
apache-2.0
equitania/myodoo-addons-v10
eq_sql_exec/eq_import_helper.py
1
1270
# -*- coding: utf-8 -*- ############################################################################## # # Odoo Addon, Open Source Management Solution # Copyright (C) 2014-now Equitania Software GmbH(<http://www.equitania.de>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #from openerp.osv import fields, osv from openerp import fields,models class eq_import_helper(models.Model): _name = 'eq_import_helper' ir_model = fields.Many2one('ir.model') old_id = fields.Integer('Old id') new_id = fields.Integer('New id')
agpl-3.0
GabrielNicolasAvellaneda/dd-agent
checks/datadog.py
25
14804
# stdlib from datetime import datetime import glob from itertools import groupby import os import re import sys import time import traceback # project from checks import LaconicFilter import modules from util import windows_friendly_colon_split from utils.tailfile import TailFile if hasattr('some string', 'partition'): def partition(s, sep): return s.partition(sep) else: def partition(s, sep): pos = s.find(sep) if pos == -1: return (s, sep, '') else: return s[0:pos], sep, s[pos + len(sep):] def point_sorter(p): # Sort and group by timestamp, metric name, host_name, device_name return (p[1], p[0], p[3].get('host_name', None), p[3].get('device_name', None)) class EventDefaults(object): EVENT_TYPE = 'dogstream_event' EVENT_OBJECT = 'dogstream_event:default' class Dogstreams(object): @classmethod def init(cls, logger, config): dogstreams_config = config.get('dogstreams', None) if dogstreams_config: dogstreams = cls._instantiate_dogstreams(logger, config, dogstreams_config) else: dogstreams = [] logger.info("Dogstream parsers: %s" % repr(dogstreams)) return cls(logger, dogstreams) def __init__(self, logger, dogstreams): self.logger = logger self.dogstreams = dogstreams @classmethod def _instantiate_dogstreams(cls, logger, config, dogstreams_config): """ Expecting dogstreams config value to look like: <dogstream value>, <dog stream value>, ... Where <dogstream value> looks like: <log path> or <log path>:<module>:<parser function> """ dogstreams = [] # Create a Dogstream object for each <dogstream value> for config_item in dogstreams_config.split(','): try: config_item = config_item.strip() parts = windows_friendly_colon_split(config_item) if len(parts) == 2: logger.warn("Invalid dogstream: %s" % ':'.join(parts)) continue log_path = cls._get_dogstream_log_paths(parts[0]) if len(parts) else [] parser_spec = ':'.join(parts[1:3]) if len(parts) >= 3 else None parser_args = parts[3:] if len(parts) >= 3 else None for path in log_path: dogstreams.append(Dogstream.init( logger, log_path=path, parser_spec=parser_spec, parser_args=parser_args, config=config)) except Exception: logger.exception("Cannot build dogstream") return dogstreams @classmethod def _get_dogstream_log_paths(cls, path): """ Paths may include wildcard *'s and ?'s. """ if '*' not in path: return [path] return glob.glob(path) def check(self, agentConfig, move_end=True): if not self.dogstreams: return {} output = {} for dogstream in self.dogstreams: try: result = dogstream.check(agentConfig, move_end) # result may contain {"dogstream": [new]}. # If output contains {"dogstream": [old]}, that old value will get concatenated with the new value assert type(result) == type(output), "dogstream.check must return a dictionary" for k in result: if k in output: output[k].extend(result[k]) else: output[k] = result[k] except Exception: self.logger.exception("Error in parsing %s" % (dogstream.log_path)) return output class Dogstream(object): @classmethod def init(cls, logger, log_path, parser_spec=None, parser_args=None, config=None): class_based = False parse_func = None parse_args = tuple(parser_args or ()) if parser_spec: try: parse_func = modules.load(parser_spec) if isinstance(parse_func, type): logger.info('Instantiating class-based dogstream') parse_func = parse_func( user_args=parse_args or (), logger=logger, log_path=log_path, config=config, ) parse_args = () class_based = True else: logger.info('Instantiating function-based dogstream') except Exception: logger.exception(traceback.format_exc()) logger.error('Could not load Dogstream line parser "%s" PYTHONPATH=%s' % ( parser_spec, os.environ.get('PYTHONPATH', '')) ) logger.info("dogstream: parsing %s with %s (requested %s)" % (log_path, parse_func, parser_spec)) else: logger.info("dogstream: parsing %s with default parser" % log_path) return cls(logger, log_path, parse_func, parse_args, class_based=class_based) def __init__(self, logger, log_path, parse_func=None, parse_args=(), class_based=False): self.logger = logger self.class_based = class_based # Apply LaconicFilter to avoid log flooding self.logger.addFilter(LaconicFilter("dogstream")) self.log_path = log_path self.parse_func = parse_func or self._default_line_parser self.parse_args = parse_args self._gen = None self._values = None self._freq = 15 # Will get updated on each check() self._error_count = 0L self._line_count = 0L self.parser_state = {} def check(self, agentConfig, move_end=True): if self.log_path: self._freq = int(agentConfig.get('check_freq', 15)) self._values = [] self._events = [] # Build our tail -f if self._gen is None: self._gen = TailFile(self.logger, self.log_path, self._line_parser).tail(line_by_line=False, move_end=move_end) # read until the end of file try: self._gen.next() self.logger.debug("Done dogstream check for file %s, found %s metric points" % (self.log_path, len(self._values))) except StopIteration, e: self.logger.exception(e) self.logger.warn("Can't tail %s file" % self.log_path) check_output = self._aggregate(self._values) if self._events: check_output.update({"dogstreamEvents": self._events}) return check_output else: return {} def _line_parser(self, line): try: # alq - Allow parser state to be kept between invocations # This means a new argument can be passed the custom parsing function # to store context that can be shared between parsing of lines. # One example is a running counter, which is incremented each time # a line is processed. parsed = None if self.class_based: parsed = self.parse_func.parse_line(line) else: try: parsed = self.parse_func(self.logger, line, self.parser_state, *self.parse_args) except TypeError, e: # Arity of parse_func is 3 (old-style), not 4 parsed = self.parse_func(self.logger, line) self._line_count += 1 if parsed is None: return if isinstance(parsed, (tuple, dict)): parsed = [parsed] for datum in parsed: # Check if it's an event if isinstance(datum, dict): # An event requires at least a title or a body if 'msg_title' not in datum and 'msg_text' not in datum: continue # Populate the default fields if 'event_type' not in datum: datum['event_type'] = EventDefaults.EVENT_TYPE if 'timestamp' not in datum: datum['timestamp'] = time.time() # Make sure event_object and aggregation_key (synonyms) are set # FIXME when the backend treats those as true synonyms, we can # deprecate event_object. if 'event_object' in datum or 'aggregation_key' in datum: datum['aggregation_key'] = datum.get('event_object', datum.get('aggregation_key')) else: datum['aggregation_key'] = EventDefaults.EVENT_OBJECT datum['event_object'] = datum['aggregation_key'] self._events.append(datum) continue # Otherwise, assume it's a metric try: metric, ts, value, attrs = datum except Exception: continue # Validation invalid_reasons = [] try: # Bucket points into 15 second buckets ts = (int(float(ts)) / self._freq) * self._freq date = datetime.fromtimestamp(ts) assert date.year > 1990 except Exception: invalid_reasons.append('invalid timestamp') try: value = float(value) except Exception: invalid_reasons.append('invalid metric value') if invalid_reasons: self.logger.debug('Invalid parsed values %s (%s): "%s"', repr(datum), ', '.join(invalid_reasons), line) else: self._values.append((metric, ts, value, attrs)) except Exception, e: self.logger.debug("Error while parsing line %s" % line, exc_info=True) self._error_count += 1 self.logger.error("Parser error: %s out of %s" % (self._error_count, self._line_count)) def _default_line_parser(self, logger, line): original_line = line sep = ' ' metric, _, line = partition(line.strip(), sep) timestamp, _, line = partition(line.strip(), sep) value, _, line = partition(line.strip(), sep) attributes = {} try: while line: keyval, _, line = partition(line.strip(), sep) key, val = keyval.split('=', 1) attributes[key] = val except Exception, e: logger.debug(traceback.format_exc()) return metric, timestamp, value, attributes def _aggregate(self, values): """ Aggregate values down to the second and store as: { "dogstream": [(metric, timestamp, value, {key: val})] } If there are many values per second for a metric, take the median """ output = [] values.sort(key=point_sorter) for (timestamp, metric, host_name, device_name), val_attrs in groupby(values, key=point_sorter): attributes = {} vals = [] for _metric, _timestamp, v, a in val_attrs: try: v = float(v) vals.append(v) attributes.update(a) except Exception: self.logger.debug("Could not convert %s into a float", v) if len(vals) == 1: val = vals[0] elif len(vals) > 1: val = vals[-1] else: # len(vals) == 0 continue metric_type = str(attributes.get('metric_type', '')).lower() if metric_type == 'gauge': val = float(val) elif metric_type == 'counter': val = sum(vals) output.append((metric, timestamp, val, attributes)) if output: return {"dogstream": output} else: return {} # Allow a smooth uninstall of previous version class RollupLP: pass class DdForwarder(object): QUEUE_SIZE = "queue_size" QUEUE_COUNT = "queue_count" RE_QUEUE_STAT = re.compile(r"\[.*\] Queue size: at (.*), (\d+) transaction\(s\), (\d+) KB") def __init__(self, logger, config): self.log_path = config.get('ddforwarder_log', '/var/log/ddforwarder.log') self.logger = logger self._gen = None def _init_metrics(self): self.metrics = {} def _add_metric(self, name, value, ts): if name in self.metrics: self.metrics[name].append((ts, value)) else: self.metrics[name] = [(ts, value)] def _parse_line(self, line): try: m = self.RE_QUEUE_STAT.match(line) if m is not None: ts, count, size = m.groups() self._add_metric(self.QUEUE_SIZE, size, round(float(ts))) self._add_metric(self.QUEUE_COUNT, count, round(float(ts))) except Exception, e: self.logger.exception(e) def check(self, agentConfig, move_end=True): if self.log_path and os.path.isfile(self.log_path): #reset metric points self._init_metrics() # Build our tail -f if self._gen is None: self._gen = TailFile(self.logger, self.log_path, self._parse_line).tail(line_by_line=False, move_end=move_end) # read until the end of file try: self._gen.next() self.logger.debug("Done ddforwarder check for file %s" % self.log_path) except StopIteration, e: self.logger.exception(e) self.logger.warn("Can't tail %s file" % self.log_path) return {'ddforwarder': self.metrics} else: self.logger.debug("Can't tail datadog forwarder log file: %s" % self.log_path) return {} def testddForwarder(): import logging logger = logging.getLogger("ddagent.checks.datadog") logger.setLevel(logging.DEBUG) logger.addHandler(logging.StreamHandler()) config = {'api_key':'my_apikey', 'ddforwarder_log': sys.argv[1]} dd = DdForwarder(logger, config) m = dd.check(config, move_end=False) while True: print m time.sleep(5) m = dd.check(config) if __name__ == '__main__': testddForwarder()
bsd-3-clause
appleseedhq/gaffer
python/GafferTest/StatsApplicationTest.py
1
3007
########################################################################## # # Copyright (c) 2016, Image Engine Design Inc. All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions are # met: # # * Redistributions of source code must retain the above # copyright notice, this list of conditions and the following # disclaimer. # # * Redistributions in binary form must reproduce the above # copyright notice, this list of conditions and the following # disclaimer in the documentation and/or other materials provided with # the distribution. # # * Neither the name of John Haddon nor the names of # any other contributors to this software may be used to endorse or # promote products derived from this software without specific prior # written permission. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS # IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, # THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR # PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR # CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, # EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, # PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR # PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF # LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING # NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS # SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # ########################################################################## import re import unittest import subprocess32 as subprocess import Gaffer import GafferTest class StatsApplicationTest( GafferTest.TestCase ) : def test( self ) : script = Gaffer.ScriptNode() script["frameRange"]["start"].setValue( 10 ) script["frameRange"]["end"].setValue( 50 ) script["variables"].addChild( Gaffer.NameValuePlug( "test", 20.5, flags = Gaffer.Plug.Flags.Default | Gaffer.Plug.Flags.Dynamic ) ) script["n"] = GafferTest.AddNode() script["b"] = Gaffer.Box() script["b"]["n"] = GafferTest.AddNode() script["fileName"].setValue( self.temporaryDirectory() + "/script.gfr" ) script.save() o = subprocess.check_output( [ "gaffer", "stats", script["fileName"].getValue() ] ) self.assertTrue( Gaffer.About.versionString() in o ) self.assertTrue( re.search( r"frameRange\.start\s*10", o ) ) self.assertTrue( re.search( r"frameRange\.end\s*50", o ) ) self.assertTrue( re.search( r"framesPerSecond\s*24.0", o ) ) self.assertTrue( re.search( r"test\s*20.5", o ) ) self.assertTrue( re.search( r"AddNode\s*2", o ) ) self.assertTrue( re.search( r"Box\s*1", o ) ) self.assertTrue( re.search( r"Total\s*3", o ) ) if __name__ == "__main__": unittest.main()
bsd-3-clause
iut-ibk/DynaMind-UrbanSim
3rdparty/opus/src/urbansim/zone/ln_price_SSS_times_ln_price_vacant_land.py
2
1942
# Opus/UrbanSim urban simulation software. # Copyright (C) 2005-2009 University of Washington # See opus_core/LICENSE from opus_core.variables.variable import Variable from variable_functions import my_attribute_label class ln_price_SSS_times_ln_price_vacant_land(Variable): """Multiplication of two zone variables""" _return_type = "float32" price_vacant_land = "ln_avg_val_per_unit_vacant_land" def __init__(self, type): self.price = "ln_avg_val_per_unit_%s" % type Variable.__init__(self) def dependencies(self): return [my_attribute_label(self.price), my_attribute_label(self.price_vacant_land)] def compute(self, dataset_pool): p = self.get_dataset().get_attribute(self.price) vp = self.get_dataset().get_attribute(self.price_vacant_land) return (p-p.mean()) * (vp-vp.mean()) from opus_core.tests import opus_unittest from urbansim.variable_test_toolbox import VariableTestToolbox from numpy import array from numpy import ma class Tests(opus_unittest.OpusTestCase): variable_name = "urbansim.zone.ln_price_residential_times_ln_price_vacant_land" def test_my_inputs(self): price_residential = array([21,22,3,42]) price_vacant_land = array([2,3,4.5,0]) values = VariableTestToolbox().compute_variable(self.variable_name, {"zone":{ "ln_avg_val_per_unit_residential": price_residential, "ln_avg_val_per_unit_vacant_land": price_vacant_land }, }, dataset = "zone") should_be = array([(21-22)*(2-2.375), 0, (3-22)*(4.5-2.375), (42-22)*(-2.375)]) self.assertEqual(ma.allclose(values, should_be, rtol=1e-10), True, msg = "Error in " + self.variable_name) if __name__=='__main__': opus_unittest.main()
gpl-2.0
mindnervestech/mnrp
addons/project_timesheet/report/task_report.py
336
4030
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## from datetime import datetime from dateutil.relativedelta import relativedelta from openerp.osv import fields,osv from openerp import tools class report_timesheet_task_user(osv.osv): _name = "report.timesheet.task.user" _auto = False _order = "name" def get_hrs_timesheet(self, cr, uid, ids, name, args, context): result = {} for record in self.browse(cr, uid, ids, context): last_date = datetime.strptime(record.name, '%Y-%m-%d') + relativedelta(months=1) - relativedelta(days=1) obj = self.pool.get('hr_timesheet_sheet.sheet.day') sheet_ids = obj.search(cr, uid, [('sheet_id.user_id','=',record.user_id.id),('name','>=',record.name),('name','<=',last_date.strftime('%Y-%m-%d'))]) data_days = obj.read(cr, uid, sheet_ids, ['name','sheet_id.user_id','total_attendance']) total = 0.0 for day_attendance in data_days: total += day_attendance['total_attendance'] result[record.id] = total return result _columns = { 'name': fields.char('Date'), 'year': fields.char('Year', size=4, required=False, readonly=True), 'month':fields.selection([('01','January'), ('02','February'), ('03','March'), ('04','April'), ('05','May'), ('06','June'), ('07','July'), ('08','August'), ('09','September'), ('10','October'), ('11','November'), ('12','December')],'Month', readonly=True), 'user_id': fields.many2one('res.users', 'User',readonly=True), 'timesheet_hrs': fields.function(get_hrs_timesheet, string="Timesheet Hours"), 'task_hrs' : fields.float('Task Hours'), } def init(self, cr): tools.drop_view_if_exists(cr, 'report_timesheet_task_user') cr.execute(""" create or replace view report_timesheet_task_user as ( select ((r.id*12)+to_number(months.m_id,'999999'))::integer as id, months.name as name, r.id as user_id, to_char(to_date(months.name, 'YYYY/MM/DD'),'YYYY') as year, to_char(to_date(months.name, 'YYYY/MM/DD'),'MM') as month, (select sum(hours) from project_task_work where user_id = r.id and date between to_date(months.name, 'YYYY/MM/DD') and (to_date(months.name, 'YYYY/MM/DD') + interval '1 month' - interval '1 day') ) as task_hrs from res_users r, (select to_char(p.date,'YYYY-MM-01') as name, to_char(p.date,'YYYYMM') as m_id from project_task_work p union select to_char(h.name,'YYYY-MM-01') as name, to_char(h.name,'YYYYMM') as m_id from hr_timesheet_sheet_sheet_day h) as months group by r.id,months.m_id,months.name, to_char(to_date(months.name, 'YYYY/MM/DD'),'YYYY') , to_char(to_date(months.name, 'YYYY/MM/DD'),'MM') ) """) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0
tdtrask/ansible
lib/ansible/modules/notification/nexmo.py
23
3629
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2014, Matt Martz <matt@sivel.net> # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = """ module: nexmo short_description: Send a SMS via nexmo description: - Send a SMS message via nexmo version_added: 1.6 author: "Matt Martz (@sivel)" options: api_key: description: - Nexmo API Key required: true api_secret: description: - Nexmo API Secret required: true src: description: - Nexmo Number to send from required: true dest: description: - Phone number(s) to send SMS message to required: true msg: description: - Message to text to send. Messages longer than 160 characters will be split into multiple messages required: true validate_certs: description: - If C(no), SSL certificates will not be validated. This should only be used on personally controlled sites using self-signed certificates. required: false default: 'yes' choices: - 'yes' - 'no' extends_documentation_fragment: - url """ EXAMPLES = """ - name: Send notification message via Nexmo nexmo: api_key: 640c8a53 api_secret: 0ce239a6 src: 12345678901 dest: - 10987654321 - 16789012345 msg: '{{ inventory_hostname }} completed' delegate_to: localhost """ import json from ansible.module_utils.six.moves.urllib.parse import urlencode from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.urls import fetch_url, url_argument_spec NEXMO_API = 'https://rest.nexmo.com/sms/json' def send_msg(module): failed = list() responses = dict() msg = { 'api_key': module.params.get('api_key'), 'api_secret': module.params.get('api_secret'), 'from': module.params.get('src'), 'text': module.params.get('msg') } for number in module.params.get('dest'): msg['to'] = number url = "%s?%s" % (NEXMO_API, urlencode(msg)) headers = dict(Accept='application/json') response, info = fetch_url(module, url, headers=headers) if info['status'] != 200: failed.append(number) responses[number] = dict(failed=True) try: responses[number] = json.load(response) except: failed.append(number) responses[number] = dict(failed=True) else: for message in responses[number]['messages']: if int(message['status']) != 0: failed.append(number) responses[number] = dict(failed=True, **responses[number]) if failed: msg = 'One or messages failed to send' else: msg = '' module.exit_json(failed=bool(failed), msg=msg, changed=False, responses=responses) def main(): argument_spec = url_argument_spec() argument_spec.update( dict( api_key=dict(required=True, no_log=True), api_secret=dict(required=True, no_log=True), src=dict(required=True, type='int'), dest=dict(required=True, type='list'), msg=dict(required=True), ), ) module = AnsibleModule( argument_spec=argument_spec ) send_msg(module) if __name__ == '__main__': main()
gpl-3.0
ubc/edx-platform
common/djangoapps/student/tests/test_password_policy.py
113
12723
# -*- coding: utf-8 -*- """ This test file will verify proper password policy enforcement, which is an option feature """ import json from django.test import TestCase from django.test.client import RequestFactory from django.core.urlresolvers import reverse from django.contrib.auth.models import AnonymousUser from django.utils.importlib import import_module from django.test.utils import override_settings from django.conf import settings from mock import patch from edxmako.tests import mako_middleware_process_request from external_auth.models import ExternalAuthMap from student.views import create_account @patch.dict("django.conf.settings.FEATURES", {'ENFORCE_PASSWORD_POLICY': True}) class TestPasswordPolicy(TestCase): """ Go through some password policy tests to make sure things are properly working """ def setUp(self): super(TestPasswordPolicy, self).setUp() self.url = reverse('create_account') self.request_factory = RequestFactory() self.url_params = { 'username': 'username', 'email': 'foo_bar@bar.com', 'name': 'username', 'terms_of_service': 'true', 'honor_code': 'true', } @override_settings(PASSWORD_MIN_LENGTH=6) def test_password_length_too_short(self): self.url_params['password'] = 'aaa' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Invalid Length (must be 6 characters or more)", ) @override_settings(PASSWORD_MIN_LENGTH=6) def test_password_length_long_enough(self): self.url_params['password'] = 'ThisIsALongerPassword' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @override_settings(PASSWORD_MAX_LENGTH=12) def test_password_length_too_long(self): self.url_params['password'] = 'ThisPasswordIsWayTooLong' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Invalid Length (must be 12 characters or less)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'UPPER': 3}) def test_password_not_enough_uppercase(self): self.url_params['password'] = 'thisshouldfail' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Must be more complex (must contain 3 or more uppercase characters)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'UPPER': 3}) def test_password_enough_uppercase(self): self.url_params['password'] = 'ThisShouldPass' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'LOWER': 3}) def test_password_not_enough_lowercase(self): self.url_params['password'] = 'THISSHOULDFAIL' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Must be more complex (must contain 3 or more lowercase characters)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'LOWER': 3}) def test_password_enough_lowercase(self): self.url_params['password'] = 'ThisShouldPass' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'DIGITS': 3}) def test_not_enough_digits(self): self.url_params['password'] = 'thishasnodigits' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Must be more complex (must contain 3 or more digits)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'DIGITS': 3}) def test_enough_digits(self): self.url_params['password'] = 'Th1sSh0uldPa88' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'PUNCTUATION': 3}) def test_not_enough_punctuations(self): self.url_params['password'] = 'thisshouldfail' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Must be more complex (must contain 3 or more punctuation characters)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'PUNCTUATION': 3}) def test_enough_punctuations(self): self.url_params['password'] = 'Th!sSh.uldPa$*' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'WORDS': 3}) def test_not_enough_words(self): self.url_params['password'] = 'thisshouldfail' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Must be more complex (must contain 3 or more unique words)", ) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", {'WORDS': 3}) def test_enough_wordss(self): self.url_params['password'] = u'this should pass' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", { 'PUNCTUATION': 3, 'WORDS': 3, 'DIGITS': 3, 'LOWER': 3, 'UPPER': 3, }) def test_multiple_errors_fail(self): self.url_params['password'] = 'thisshouldfail' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) errstring = ( "Password: Must be more complex (" "must contain 3 or more uppercase characters, " "must contain 3 or more digits, " "must contain 3 or more punctuation characters, " "must contain 3 or more unique words" ")" ) self.assertEqual(obj['value'], errstring) @patch.dict("django.conf.settings.PASSWORD_COMPLEXITY", { 'PUNCTUATION': 3, 'WORDS': 3, 'DIGITS': 3, 'LOWER': 3, 'UPPER': 3, }) def test_multiple_errors_pass(self): self.url_params['password'] = u'tH1s Sh0u!d P3#$' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @override_settings(PASSWORD_DICTIONARY=['foo', 'bar']) @override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1) def test_dictionary_similarity_fail1(self): self.url_params['password'] = 'foo' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Too similar to a restricted dictionary word.", ) @override_settings(PASSWORD_DICTIONARY=['foo', 'bar']) @override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1) def test_dictionary_similarity_fail2(self): self.url_params['password'] = 'bar' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Too similar to a restricted dictionary word.", ) @override_settings(PASSWORD_DICTIONARY=['foo', 'bar']) @override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1) def test_dictionary_similarity_fail3(self): self.url_params['password'] = 'fo0' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Password: Too similar to a restricted dictionary word.", ) @override_settings(PASSWORD_DICTIONARY=['foo', 'bar']) @override_settings(PASSWORD_DICTIONARY_EDIT_DISTANCE_THRESHOLD=1) def test_dictionary_similarity_pass(self): self.url_params['password'] = 'this_is_ok' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) def test_with_unicode(self): self.url_params['password'] = u'四節比分和七年前' response = self.client.post(self.url, self.url_params) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) @override_settings(PASSWORD_MIN_LENGTH=6, SESSION_ENGINE='django.contrib.sessions.backends.cache') def test_ext_auth_password_length_too_short(self): """ Tests that even if password policy is enforced, ext_auth registrations aren't subject to it """ self.url_params['password'] = 'aaa' # shouldn't pass validation request = self.request_factory.post(self.url, self.url_params) # now indicate we are doing ext_auth by setting 'ExternalAuthMap' in the session. request.session = import_module(settings.SESSION_ENGINE).SessionStore() # empty session extauth = ExternalAuthMap(external_id='withmap@stanford.edu', external_email='withmap@stanford.edu', internal_password=self.url_params['password'], external_domain='shib:https://idp.stanford.edu/') request.session['ExternalAuthMap'] = extauth request.user = AnonymousUser() mako_middleware_process_request(request) response = create_account(request) self.assertEqual(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success']) class TestUsernamePasswordNonmatch(TestCase): """ Test that registration username and password fields differ """ def setUp(self): super(TestUsernamePasswordNonmatch, self).setUp() self.url = reverse('create_account') self.url_params = { 'username': 'username', 'email': 'foo_bar@bar.com', 'name': 'username', 'terms_of_service': 'true', 'honor_code': 'true', } def test_with_username_password_match(self): self.url_params['username'] = "foobar" self.url_params['password'] = "foobar" response = self.client.post(self.url, self.url_params) self.assertEquals(response.status_code, 400) obj = json.loads(response.content) self.assertEqual( obj['value'], "Username and password fields cannot match", ) def test_with_username_password_nonmatch(self): self.url_params['username'] = "foobar" self.url_params['password'] = "nonmatch" response = self.client.post(self.url, self.url_params) self.assertEquals(response.status_code, 200) obj = json.loads(response.content) self.assertTrue(obj['success'])
agpl-3.0
vigilv/scikit-learn
sklearn/feature_extraction/tests/test_dict_vectorizer.py
276
3790
# Authors: Lars Buitinck <L.J.Buitinck@uva.nl> # Dan Blanchard <dblanchard@ets.org> # License: BSD 3 clause from random import Random import numpy as np import scipy.sparse as sp from numpy.testing import assert_array_equal from sklearn.utils.testing import (assert_equal, assert_in, assert_false, assert_true) from sklearn.feature_extraction import DictVectorizer from sklearn.feature_selection import SelectKBest, chi2 def test_dictvectorizer(): D = [{"foo": 1, "bar": 3}, {"bar": 4, "baz": 2}, {"bar": 1, "quux": 1, "quuux": 2}] for sparse in (True, False): for dtype in (int, np.float32, np.int16): for sort in (True, False): for iterable in (True, False): v = DictVectorizer(sparse=sparse, dtype=dtype, sort=sort) X = v.fit_transform(iter(D) if iterable else D) assert_equal(sp.issparse(X), sparse) assert_equal(X.shape, (3, 5)) assert_equal(X.sum(), 14) assert_equal(v.inverse_transform(X), D) if sparse: # CSR matrices can't be compared for equality assert_array_equal(X.A, v.transform(iter(D) if iterable else D).A) else: assert_array_equal(X, v.transform(iter(D) if iterable else D)) if sort: assert_equal(v.feature_names_, sorted(v.feature_names_)) def test_feature_selection(): # make two feature dicts with two useful features and a bunch of useless # ones, in terms of chi2 d1 = dict([("useless%d" % i, 10) for i in range(20)], useful1=1, useful2=20) d2 = dict([("useless%d" % i, 10) for i in range(20)], useful1=20, useful2=1) for indices in (True, False): v = DictVectorizer().fit([d1, d2]) X = v.transform([d1, d2]) sel = SelectKBest(chi2, k=2).fit(X, [0, 1]) v.restrict(sel.get_support(indices=indices), indices=indices) assert_equal(v.get_feature_names(), ["useful1", "useful2"]) def test_one_of_k(): D_in = [{"version": "1", "ham": 2}, {"version": "2", "spam": .3}, {"version=3": True, "spam": -1}] v = DictVectorizer() X = v.fit_transform(D_in) assert_equal(X.shape, (3, 5)) D_out = v.inverse_transform(X) assert_equal(D_out[0], {"version=1": 1, "ham": 2}) names = v.get_feature_names() assert_true("version=2" in names) assert_false("version" in names) def test_unseen_or_no_features(): D = [{"camelot": 0, "spamalot": 1}] for sparse in [True, False]: v = DictVectorizer(sparse=sparse).fit(D) X = v.transform({"push the pram a lot": 2}) if sparse: X = X.toarray() assert_array_equal(X, np.zeros((1, 2))) X = v.transform({}) if sparse: X = X.toarray() assert_array_equal(X, np.zeros((1, 2))) try: v.transform([]) except ValueError as e: assert_in("empty", str(e)) def test_deterministic_vocabulary(): # Generate equal dictionaries with different memory layouts items = [("%03d" % i, i) for i in range(1000)] rng = Random(42) d_sorted = dict(items) rng.shuffle(items) d_shuffled = dict(items) # check that the memory layout does not impact the resulting vocabulary v_1 = DictVectorizer().fit([d_sorted]) v_2 = DictVectorizer().fit([d_shuffled]) assert_equal(v_1.vocabulary_, v_2.vocabulary_)
bsd-3-clause
erjohnso/ansible
lib/ansible/modules/storage/infinidat/infini_export_client.py
29
5572
#!/usr/bin/python # -*- coding: utf-8 -*- # (c) 2016, Gregory Shulov (gregory.shulov@gmail.com) # GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt) from __future__ import absolute_import, division, print_function __metaclass__ = type ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ['preview'], 'supported_by': 'community'} DOCUMENTATION = ''' --- module: infini_export_client version_added: 2.3 short_description: Create, Delete or Modify NFS Client(s) for existing exports on Infinibox description: - This module creates, deletes or modifys NFS client(s) for existing exports on Infinibox. author: Gregory Shulov (@GR360RY) options: client: description: - Client IP or Range. Ranges can be defined as follows 192.168.0.1-192.168.0.254. aliases: ['name'] required: true state: description: - Creates/Modifies client when present and removes when absent. required: false default: "present" choices: [ "present", "absent" ] access_mode: description: - Read Write or Read Only Access. choices: [ "RW", "RO" ] default: RW required: false no_root_squash: description: - Don't squash root user to anonymous. Will be set to "no" on creation if not specified explicitly. choices: [ "yes", "no" ] default: no required: false export: description: - Name of the export. required: true extends_documentation_fragment: - infinibox requirements: - munch ''' EXAMPLES = ''' - name: Make sure nfs client 10.0.0.1 is configured for export. Allow root access infini_export_client: client: 10.0.0.1 access_mode: RW no_root_squash: yes export: /data user: admin password: secret system: ibox001 - name: Add multiple clients with RO access. Squash root privileges infini_export_client: client: "{{ item }}" access_mode: RO no_root_squash: no export: /data user: admin password: secret system: ibox001 with_items: - 10.0.0.2 - 10.0.0.3 ''' RETURN = ''' ''' try: from munch import Munch, unmunchify HAS_MUNCH = True except ImportError: HAS_MUNCH = False from ansible.module_utils.basic import AnsibleModule from ansible.module_utils.infinibox import HAS_INFINISDK, api_wrapper, get_system, infinibox_argument_spec def transform(d): return frozenset(d.items()) @api_wrapper def get_export(module, system): """Retrun export if found. Fail module if not found""" try: export = system.exports.get(export_path=module.params['export']) except: module.fail_json(msg="Export with export path {} not found".format(module.params['export'])) return export @api_wrapper def update_client(module, export): """Update export client list""" changed = False client = module.params['client'] access_mode = module.params['access_mode'] no_root_squash = module.params['no_root_squash'] client_list = export.get_permissions() client_not_in_list = True for index, item in enumerate(client_list): if item.client == client: client_not_in_list = False if item.access != access_mode: item.access = access_mode changed = True if item.no_root_squash is not no_root_squash: item.no_root_squash = no_root_squash changed = True # If access_mode and/or no_root_squash not passed as arguments to the module, # use access_mode with RW value and set no_root_squash to False if client_not_in_list: changed = True client_list.append(Munch(client=client, access=access_mode, no_root_squash=no_root_squash)) if changed: for index, item in enumerate(client_list): client_list[index] = unmunchify(item) if not module.check_mode: export.update_permissions(client_list) module.exit_json(changed=changed) @api_wrapper def delete_client(module, export): """Update export client list""" changed = False client = module.params['client'] client_list = export.get_permissions() for index, item in enumerate(client_list): if item.client == client: changed = True del client_list[index] if changed: for index, item in enumerate(client_list): client_list[index] = unmunchify(item) if not module.check_mode: export.update_permissions(client_list) module.exit_json(changed=changed) def main(): argument_spec = infinibox_argument_spec() argument_spec.update( dict( client = dict(required=True), access_mode = dict(choices=['RO', 'RW'], default='RW'), no_root_squash = dict(type='bool', default='no'), state = dict(default='present', choices=['present', 'absent']), export = dict(required=True) ) ) module = AnsibleModule(argument_spec, supports_check_mode=True) if not HAS_INFINISDK: module.fail_json(msg='infinisdk is required for this module') if not HAS_MUNCH: module.fail_json(msg='the python munch library is required for this module') system = get_system(module) export = get_export(module, system) if module.params['state'] == 'present': update_client(module, export) else: delete_client(module, export) if __name__ == '__main__': main()
gpl-3.0
haeusser/tensorflow
tensorflow/contrib/slim/python/slim/data/test_utils.py
163
3795
# Copyright 2016 The TensorFlow Authors. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. # ============================================================================== """Contains test utilities.""" from __future__ import absolute_import from __future__ import division from __future__ import print_function import os import numpy as np from tensorflow.core.example import example_pb2 from tensorflow.core.example import feature_pb2 from tensorflow.python.framework import constant_op from tensorflow.python.framework import dtypes from tensorflow.python.lib.io import tf_record from tensorflow.python.ops import image_ops def _encoded_int64_feature(ndarray): return feature_pb2.Feature(int64_list=feature_pb2.Int64List( value=ndarray.flatten().tolist())) def _encoded_bytes_feature(tf_encoded): encoded = tf_encoded.eval() def string_to_bytes(value): return feature_pb2.BytesList(value=[value]) return feature_pb2.Feature(bytes_list=string_to_bytes(encoded)) def _string_feature(value): value = value.encode('utf-8') return feature_pb2.Feature(bytes_list=feature_pb2.BytesList(value=[value])) def _encoder(image, image_format): assert image_format in ['jpeg', 'png'] if image_format == 'jpeg': tf_image = constant_op.constant(image, dtype=dtypes.uint8) return image_ops.encode_jpeg(tf_image) if image_format == 'png': tf_image = constant_op.constant(image, dtype=dtypes.uint8) return image_ops.encode_png(tf_image) def generate_image(image_shape, image_format='jpeg', label=0): """Generates an image and an example containing the encoded image. GenerateImage must be called within an active session. Args: image_shape: the shape of the image to generate. image_format: the encoding format of the image. label: the int64 labels for the image. Returns: image: the generated image. example: a TF-example with a feature key 'image/encoded' set to the serialized image and a feature key 'image/format' set to the image encoding format ['jpeg', 'png']. """ image = np.random.random_integers(0, 255, size=image_shape) tf_encoded = _encoder(image, image_format) example = example_pb2.Example(features=feature_pb2.Features(feature={ 'image/encoded': _encoded_bytes_feature(tf_encoded), 'image/format': _string_feature(image_format), 'image/class/label': _encoded_int64_feature(np.array(label)), })) return image, example.SerializeToString() def create_tfrecord_files(output_dir, num_files=3, num_records_per_file=10): """Creates TFRecords files. The method must be called within an active session. Args: output_dir: The directory where the files are stored. num_files: The number of files to create. num_records_per_file: The number of records per file. Returns: A list of the paths to the TFRecord files. """ tfrecord_paths = [] for i in range(num_files): path = os.path.join(output_dir, 'flowers.tfrecord-%d-of-%s' % (i, num_files)) tfrecord_paths.append(path) writer = tf_record.TFRecordWriter(path) for _ in range(num_records_per_file): _, example = generate_image(image_shape=(10, 10, 3)) writer.write(example) writer.close() return tfrecord_paths
apache-2.0
tstillwell2k17/GAE-BLOGAPP-MULTI_USER-DEMO
libs/webencodings/tests.py
168
6563
# coding: utf-8 """ webencodings.tests ~~~~~~~~~~~~~~~~~~ A basic test suite for Encoding. :copyright: Copyright 2012 by Simon Sapin :license: BSD, see LICENSE for details. """ from __future__ import unicode_literals from . import (lookup, LABELS, decode, encode, iter_decode, iter_encode, IncrementalDecoder, IncrementalEncoder, UTF8) def assert_raises(exception, function, *args, **kwargs): try: function(*args, **kwargs) except exception: return else: # pragma: no cover raise AssertionError('Did not raise %s.' % exception) def test_labels(): assert lookup('utf-8').name == 'utf-8' assert lookup('Utf-8').name == 'utf-8' assert lookup('UTF-8').name == 'utf-8' assert lookup('utf8').name == 'utf-8' assert lookup('utf8').name == 'utf-8' assert lookup('utf8 ').name == 'utf-8' assert lookup(' \r\nutf8\t').name == 'utf-8' assert lookup('u8') is None # Python label. assert lookup('utf-8 ') is None # Non-ASCII white space. assert lookup('US-ASCII').name == 'windows-1252' assert lookup('iso-8859-1').name == 'windows-1252' assert lookup('latin1').name == 'windows-1252' assert lookup('LATIN1').name == 'windows-1252' assert lookup('latin-1') is None assert lookup('LATİN1') is None # ASCII-only case insensitivity. def test_all_labels(): for label in LABELS: assert decode(b'', label) == ('', lookup(label)) assert encode('', label) == b'' for repeat in [0, 1, 12]: output, _ = iter_decode([b''] * repeat, label) assert list(output) == [] assert list(iter_encode([''] * repeat, label)) == [] decoder = IncrementalDecoder(label) assert decoder.decode(b'') == '' assert decoder.decode(b'', final=True) == '' encoder = IncrementalEncoder(label) assert encoder.encode('') == b'' assert encoder.encode('', final=True) == b'' # All encoding names are valid labels too: for name in set(LABELS.values()): assert lookup(name).name == name def test_invalid_label(): assert_raises(LookupError, decode, b'\xEF\xBB\xBF\xc3\xa9', 'invalid') assert_raises(LookupError, encode, 'é', 'invalid') assert_raises(LookupError, iter_decode, [], 'invalid') assert_raises(LookupError, iter_encode, [], 'invalid') assert_raises(LookupError, IncrementalDecoder, 'invalid') assert_raises(LookupError, IncrementalEncoder, 'invalid') def test_decode(): assert decode(b'\x80', 'latin1') == ('€', lookup('latin1')) assert decode(b'\x80', lookup('latin1')) == ('€', lookup('latin1')) assert decode(b'\xc3\xa9', 'utf8') == ('é', lookup('utf8')) assert decode(b'\xc3\xa9', UTF8) == ('é', lookup('utf8')) assert decode(b'\xc3\xa9', 'ascii') == ('é', lookup('ascii')) assert decode(b'\xEF\xBB\xBF\xc3\xa9', 'ascii') == ('é', lookup('utf8')) # UTF-8 with BOM assert decode(b'\xFE\xFF\x00\xe9', 'ascii') == ('é', lookup('utf-16be')) # UTF-16-BE with BOM assert decode(b'\xFF\xFE\xe9\x00', 'ascii') == ('é', lookup('utf-16le')) # UTF-16-LE with BOM assert decode(b'\xFE\xFF\xe9\x00', 'ascii') == ('\ue900', lookup('utf-16be')) assert decode(b'\xFF\xFE\x00\xe9', 'ascii') == ('\ue900', lookup('utf-16le')) assert decode(b'\x00\xe9', 'UTF-16BE') == ('é', lookup('utf-16be')) assert decode(b'\xe9\x00', 'UTF-16LE') == ('é', lookup('utf-16le')) assert decode(b'\xe9\x00', 'UTF-16') == ('é', lookup('utf-16le')) assert decode(b'\xe9\x00', 'UTF-16BE') == ('\ue900', lookup('utf-16be')) assert decode(b'\x00\xe9', 'UTF-16LE') == ('\ue900', lookup('utf-16le')) assert decode(b'\x00\xe9', 'UTF-16') == ('\ue900', lookup('utf-16le')) def test_encode(): assert encode('é', 'latin1') == b'\xe9' assert encode('é', 'utf8') == b'\xc3\xa9' assert encode('é', 'utf8') == b'\xc3\xa9' assert encode('é', 'utf-16') == b'\xe9\x00' assert encode('é', 'utf-16le') == b'\xe9\x00' assert encode('é', 'utf-16be') == b'\x00\xe9' def test_iter_decode(): def iter_decode_to_string(input, fallback_encoding): output, _encoding = iter_decode(input, fallback_encoding) return ''.join(output) assert iter_decode_to_string([], 'latin1') == '' assert iter_decode_to_string([b''], 'latin1') == '' assert iter_decode_to_string([b'\xe9'], 'latin1') == 'é' assert iter_decode_to_string([b'hello'], 'latin1') == 'hello' assert iter_decode_to_string([b'he', b'llo'], 'latin1') == 'hello' assert iter_decode_to_string([b'hell', b'o'], 'latin1') == 'hello' assert iter_decode_to_string([b'\xc3\xa9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xEF\xBB\xBF\xc3\xa9'], 'latin1') == 'é' assert iter_decode_to_string([ b'\xEF\xBB\xBF', b'\xc3', b'\xa9'], 'latin1') == 'é' assert iter_decode_to_string([ b'\xEF\xBB\xBF', b'a', b'\xc3'], 'latin1') == 'a\uFFFD' assert iter_decode_to_string([ b'', b'\xEF', b'', b'', b'\xBB\xBF\xc3', b'\xa9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xEF\xBB\xBF'], 'latin1') == '' assert iter_decode_to_string([b'\xEF\xBB'], 'latin1') == 'ï»' assert iter_decode_to_string([b'\xFE\xFF\x00\xe9'], 'latin1') == 'é' assert iter_decode_to_string([b'\xFF\xFE\xe9\x00'], 'latin1') == 'é' assert iter_decode_to_string([ b'', b'\xFF', b'', b'', b'\xFE\xe9', b'\x00'], 'latin1') == 'é' assert iter_decode_to_string([ b'', b'h\xe9', b'llo'], 'x-user-defined') == 'h\uF7E9llo' def test_iter_encode(): assert b''.join(iter_encode([], 'latin1')) == b'' assert b''.join(iter_encode([''], 'latin1')) == b'' assert b''.join(iter_encode(['é'], 'latin1')) == b'\xe9' assert b''.join(iter_encode(['', 'é', '', ''], 'latin1')) == b'\xe9' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16')) == b'\xe9\x00' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16le')) == b'\xe9\x00' assert b''.join(iter_encode(['', 'é', '', ''], 'utf-16be')) == b'\x00\xe9' assert b''.join(iter_encode([ '', 'h\uF7E9', '', 'llo'], 'x-user-defined')) == b'h\xe9llo' def test_x_user_defined(): encoded = b'2,\x0c\x0b\x1aO\xd9#\xcb\x0f\xc9\xbbt\xcf\xa8\xca' decoded = '2,\x0c\x0b\x1aO\uf7d9#\uf7cb\x0f\uf7c9\uf7bbt\uf7cf\uf7a8\uf7ca' encoded = b'aa' decoded = 'aa' assert decode(encoded, 'x-user-defined') == (decoded, lookup('x-user-defined')) assert encode(decoded, 'x-user-defined') == encoded
mit
simontorres/goodman
dev-tools/ccd-list-based.py
1
2086
from __future__ import (absolute_import, division, print_function, unicode_literals) import argparse from goodman_ccd.image_processor import ImageProcessor def get_args(arguments=None): parser = argparse.ArgumentParser(description='''Extracts pipeline spectra and does wavelength calibration.''') parser.add_argument('--bias-list', action='store', default='bias', type=str, metavar='bias list', dest='bias_list', help='Name of the bias list') parser.add_argument('--flat-list', action='store', default='flat', type=str, metavar='flat list', dest='flat_list', help='Name of the flat list') parser.add_argument('--object-list', action='store', default='object', type=str, metavar='object list', dest='object_list', help='Name of the object list') parser.add_argument('--instrument', action='store', default='blue', choices=['red', 'blue'], type=str, metavar='instrument', dest='instrument', help='Name of the instrument or camera') parser.add_argument('--technique', action='store', default='spectroscopy', choices=['imaging', 'spectroscopy'], type=str, metavar='technique', dest='technique', help='Name of the technique used') args = parser.parse_args(args=arguments) return args if __name__ == '__main__': args = get_args() print(args)
bsd-3-clause
cmdunkers/DeeperMind
PythonEnv/lib/python2.7/site-packages/scipy/ndimage/tests/test_regression.py
123
1429
from __future__ import division, print_function, absolute_import import numpy as np from numpy.testing import assert_array_almost_equal, run_module_suite import scipy.ndimage as ndimage def test_byte_order_median(): """Regression test for #413: median_filter does not handle bytes orders.""" a = np.arange(9, dtype='<f4').reshape(3, 3) ref = ndimage.filters.median_filter(a,(3, 3)) b = np.arange(9, dtype='>f4').reshape(3, 3) t = ndimage.filters.median_filter(b, (3, 3)) assert_array_almost_equal(ref, t) def test_zoom_output_shape(): """Ticket #643""" x = np.arange(12).reshape((3,4)) ndimage.zoom(x, 2, output=np.zeros((6,8))) def test_ticket_742(): def SE(img, thresh=.7, size=4): mask = img > thresh rank = len(mask.shape) la, co = ndimage.label(mask, ndimage.generate_binary_structure(rank, rank)) slices = ndimage.find_objects(la) if np.dtype(np.intp) != np.dtype('i'): shape = (3,1240,1240) a = np.random.rand(np.product(shape)).reshape(shape) # shouldn't crash SE(a) def test_gh_issue_3025(): """Github issue #3025 - improper merging of labels""" d = np.zeros((60,320)) d[:,:257] = 1 d[:,260:] = 1 d[36,257] = 1 d[35,258] = 1 d[35,259] = 1 assert ndimage.label(d, np.ones((3,3)))[1] == 1 if __name__ == "__main__": run_module_suite()
bsd-3-clause
Mchakravartula/rockstor-core
src/rockstor/storageadmin/tests/test_nfs_export.py
4
12792
""" Copyright (c) 2012-2013 RockStor, Inc. <http://rockstor.com> This file is part of RockStor. RockStor is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. RockStor is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>. """ from rest_framework import status from rest_framework.test import APITestCase from system.services import systemctl import mock from mock import patch from storageadmin.tests.test_api import APITestMixin class NFSExportTests(APITestMixin, APITestCase): fixtures = ['fix4.json'] BASE_URL = '/api/nfs-exports' @classmethod def setUpClass(cls): super(NFSExportTests, cls).setUpClass() # post mocks cls.patch_mount_share = patch('storageadmin.views.nfs_exports.mount_share') cls.mock_mount_share = cls.patch_mount_share.start() cls.mock_mount_share.return_value = 'out','err', 0 cls.patch_is_share_mounted = patch('storageadmin.views.nfs_exports.is_share_mounted') cls.mock_is_share_mounted = cls.patch_is_share_mounted.start() cls.mock_is_share_mounted.return_value = False cls.patch_refresh_nfs_exports = patch('storageadmin.views.nfs_exports.refresh_nfs_exports') cls.mock_refresh_nfs_exports = cls.patch_refresh_nfs_exports.start() cls.mock_refresh_nfs_exports.return_value = 'out','err', 0 @classmethod def tearDownClass(cls): super(NFSExportTests, cls).tearDownClass() def test_get(self): """ Test GET request 1. Get base URL 2. Get request with id """ # get base URL self.get_base(self.BASE_URL) # get nfs-export with id response = self.client.get('%s/11' % self.BASE_URL) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response) def test_invalid_get(self): # get nfs-export with invalid id response = self.client.get('%s/12' % self.BASE_URL) self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND, msg=response) def test_post_requests(self): """ invalid nfs-export api operations 1. Add nfs-export without providing share names 2 Add nfs-export 3. Add nfs-export for the share that is already been exported """ # Add nfs-export without providing share names self.mock_refresh_nfs_exports.side_effect = None self.mock_refresh_nfs_exports.return_value = 'out','err', 0 data = {'host_str': '*','mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Cannot export without specifying shares') self.assertEqual(response.data['detail'], e_msg) # happy path data1 = {'shares':('share2',), 'host_str': '*.edu' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data1) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) # Add nfs-export for the share that is already been exported data1 = {'shares':('share1',), 'host_str': '*' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data1) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('An export already exists for the host string: *') self.assertEqual(response.data['detail'], e_msg) # Add nfs-export with invalid nfs-client data1 = {'shares':('share1',), 'host_str': '*' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data1) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('An export already exists for the host string: *') self.assertEqual(response.data['detail'], e_msg) def test_no_nfs_client(self): # Add nfs-export without specifying nfs-clients(host string). The # server side defaults the host string to * self.mock_refresh_nfs_exports.side_effect = None data1 = {'shares':('clone1',) , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data1) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) self.assertEqual(response.data['host_str'], '*') def test_invalid_nfs_client2(self): # invalid post request # Add nfs-export providing invalid nfs client self.mock_refresh_nfs_exports.side_effect = Exception() data1 = {'shares':('clone1',) , 'host_str': 'host%%%edu' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data1) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid Hostname or IP: host%%%edu') self.assertEqual(response.data['detail'], e_msg) self.mock_refresh_nfs_exports.side_effect = None def test_invalid_nfs_client3(self): # invalid put request # edit nfs-export providing invalid nfs-client self.mock_refresh_nfs_exports.side_effect = Exception() nfs_id = 11 data = {'shares':('share2',), 'host_str': 'host%%%edu' ,'admin_host':' ', 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid Hostname or IP: host%%%edu') self.assertEqual(response.data['detail'], e_msg) self.mock_refresh_nfs_exports.side_effect = None def test_invalid_admin_host1(self): # invalid post request # Add nfs-export providing invalid admin host self.mock_refresh_nfs_exports.side_effect = Exception() data = {'shares':('clone1',), 'host_str': '*.edu' ,'admin_host':'admin%host', 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.post(self.BASE_URL, data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid admin host: admin%host') self.assertEqual(response.data['detail'], e_msg) self.mock_refresh_nfs_exports.side_effect = None def test_invalid_admin_host2(self): # invalid put request # edit nfs-export providing invalid admin host self.mock_refresh_nfs_exports.side_effect = Exception() nfs_id = 11 data = {'shares':('share2',), 'host_str': '*.edu' ,'admin_host':'admin%host', 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid admin host: admin%host') self.assertEqual(response.data['detail'], e_msg) self.mock_refresh_nfs_exports.side_effect = None def test_put_requests(self): """ 1. Edit nfs-export 2. Edit nfs-export with no shares 3. Edit nfs-export that does not exists """ # Edit nfs-export with no shares self.mock_refresh_nfs_exports.side_effect = None self.mock_refresh_nfs_exports.return_value = 'out','err', 0 nfs_id = 11 data = {'host_str': '*.edu' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Cannot export without specifying shares') self.assertEqual(response.data['detail'], e_msg) # happy path nfs_id = 11 data = {'shares':('share2',), 'host_str': '*.edu' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) # happy path2 with admin host nfs_id = 11 data = {'shares':('share2',), 'host_str': '*.edu' , 'admin_host':'host', 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) # edit nfs-export that does not exist nfs_id = 5 data = {'shares':('share2',), 'host_str': '*.edu' , 'mod_choice': 'rw','sync_choice': 'async', } response = self.client.put('%s/%d' % (self.BASE_URL, nfs_id), data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('NFS export with id: 5 does not exist') self.assertEqual(response.data['detail'], e_msg) def test_delete_requests(self): """ 1. Delete nfs-export 2. Delete nfs-export that does not exist """ # happy path nfs_id = 11 response = self.client.delete('%s/%d' % (self.BASE_URL, nfs_id)) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) # Delete nfs-export that does nor exists nfs_id = 5 response = self.client.delete('%s/%d' % (self.BASE_URL, nfs_id)) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('NFS export with id: 5 does not exist') self.assertEqual(response.data['detail'], e_msg) def test_adv_nfs_get(self): """ Test GET request 1. Get base URL 2. Get request with id """ # get base URL self.get_base('/api/adv-nfs-exports') def test_adv_nfs_post_requests(self): # without specifying entries data = { } response = self.client.post('/api/adv-nfs-exports', data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Cannot export without specifying entries') self.assertEqual(response.data['detail'], e_msg) # happy path data = {'entries':["/export/share2 *.edu(rw,async,insecure)"] } response = self.client.post('/api/adv-nfs-exports', data=data) self.assertEqual(response.status_code, status.HTTP_200_OK, msg=response.data) # Invalid entries data = {'entries':["/export/share2"] } response = self.client.post('/api/adv-nfs-exports', data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid exports input -- /export/share2') self.assertEqual(response.data['detail'], e_msg) # Invalid entries data = {'entries':["/export/share2 *.edu(rw,async,insecure"] } response = self.client.post('/api/adv-nfs-exports', data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid exports input -- /export/share2 *.edu(rw,async,insecure. offending section: *.edu(rw,async,insecure') self.assertEqual(response.data['detail'], e_msg) # Invalid entries data = {'entries':['invalid'] } response = self.client.post('/api/adv-nfs-exports', data=data) self.assertEqual(response.status_code, status.HTTP_500_INTERNAL_SERVER_ERROR, msg=response.data) e_msg = ('Invalid exports input -- invalid') self.assertEqual(response.data['detail'], e_msg)
gpl-3.0
watonyweng/nova
nova/tests/functional/v3/test_access_ips.py
3
4341
# Copyright 2012 Nebula, Inc. # Copyright 2013 IBM Corp. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. from oslo_config import cfg from nova.tests.functional.v3 import api_sample_base from nova.tests.unit.image import fake CONF = cfg.CONF CONF.import_opt('osapi_compute_extension', 'nova.api.openstack.compute.legacy_v2.extensions') class AccessIPsSampleJsonTest(api_sample_base.ApiSampleTestBaseV3): extension_name = 'os-access-ips' # TODO(gmann): Overriding '_api_version' till all functional tests # are merged between v2 and v2.1. After that base class variable # itself can be changed to 'v2' _api_version = 'v2' def _get_flags(self): f = super(AccessIPsSampleJsonTest, self)._get_flags() f['osapi_compute_extension'] = CONF.osapi_compute_extension[:] f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.keypairs.Keypairs') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips.Extended_ips') f['osapi_compute_extension'].append( 'nova.api.openstack.compute.contrib.extended_ips_mac.' 'Extended_ips_mac') return f def _servers_post(self, subs): response = self._do_post('servers', 'server-post-req', subs) subs.update(self._get_regexes()) return self._verify_response('server-post-resp', subs, response, 202) def test_servers_post(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } self._servers_post(subs) def test_servers_get(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) response = self._do_get('servers/%s' % uuid) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-get-resp', subs, response, 200) def test_servers_details(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) response = self._do_get('servers/detail') subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('servers-details-resp', subs, response, 200) def test_servers_rebuild(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) subs['access_ip_v4'] = "4.3.2.1" subs['access_ip_v6'] = '80fe::' response = self._do_post('servers/%s/action' % uuid, 'server-action-rebuild', subs) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-action-rebuild-resp', subs, response, 202) def test_servers_update(self): subs = { 'image_id': fake.get_valid_image_id(), 'host': self._get_host(), 'access_ip_v4': '1.2.3.4', 'access_ip_v6': 'fe80::' } uuid = self._servers_post(subs) subs['access_ip_v4'] = "4.3.2.1" subs['access_ip_v6'] = '80fe::' response = self._do_put('servers/%s' % uuid, 'server-put-req', subs) subs['hostid'] = '[a-f0-9]+' subs['id'] = uuid self._verify_response('server-put-resp', subs, response, 200)
apache-2.0
caidongyun/pylearn2
pylearn2/sandbox/rnn/models/mlp_hook.py
44
17390
""" Code to hook into the MLP framework """ import functools import inspect import logging from theano.compat.six.moves import xrange from pylearn2.sandbox.rnn.space import SequenceSpace, SequenceDataSpace from pylearn2.space import CompositeSpace from pylearn2.utils.track_version import MetaLibVersion log = logging.getLogger(__name__) # These layers are guaranteed to be wrapped without issues WHITELIST = [ 'Softmax', 'SoftmaxPool', 'Linear', 'ProjectionLayer', 'Tanh', 'Sigmoid', 'RectifiedLinear', 'Softplus', 'SpaceConverter', 'WindowLayer' ] # These can't be wrapped BLACKLIST = [ 'CompositeLayer', 'FlattenerLayer' # Double check this ] class RNNWrapper(MetaLibVersion): """ This metaclass wraps the Layer class and all its children by intercepting the class creation. Methods can be wrapped by defining a `_wrapper` method. Note that the MLP class isn't wrapped in general, it suffices to wrap the component layers. Parameters ---------- See https://docs.python.org/2/reference/datamodel.html#object.__new__ """ def __new__(cls, name, bases, dct): wrappers = [attr[:-8] for attr in cls.__dict__.keys() if attr.endswith('_wrapper')] for wrapper in wrappers: if wrapper not in dct: for base in bases: method = getattr(base, wrapper, None) if method is not None: break else: method = dct[wrapper] dct[wrapper] = getattr(cls, wrapper + '_wrapper')(name, method) dct['rnn_friendly'] = False dct['_requires_reshape'] = False dct['_requires_unmask'] = False return type.__new__(cls, name, bases, dct) @classmethod def fprop_wrapper(cls, name, fprop): """ If a layer receives a SequenceSpace it should receive a tuple of (data, mask). For layers that cannot deal with this we do the following: - Unpack (data, mask) and perform the fprop with the data only - Add the mask back just before returning, so that the next layer receives a tuple again Besides the mask, we also need to take are of reshaping the data. This reshaping needs to happen even if we receive SequenceDataSpace data instead of SequenceSpace data. The format is (time, batch, data, ..., data) which needs to be reshaped to (time * batch, data, ..., data) before calling the original fprop, after which we need to reshape it back. Parameters ---------- fprop : method The fprop method to be wrapped """ @functools.wraps(fprop) def outer(self, state_below, return_all=False): if self._requires_reshape: if self._requires_unmask: state_below, mask = state_below if isinstance(state_below, tuple): ndim = state_below[0].ndim reshape_size = state_below[0].shape else: ndim = state_below.ndim reshape_size = state_below.shape if ndim > 2: if isinstance(state_below, tuple): inp_shape = ([[state_below[j].shape[0] * state_below[j].shape[1]] + [state_below[j].shape[i] for i in xrange(2, state_below[j].ndim)] for j in xrange(len(state_below))]) reshaped_below = () for i in xrange(len(state_below)): reshaped_below +=\ (state_below[i].reshape(inp_shape[i]),) else: inp_shape = ([state_below.shape[0] * state_below.shape[1]] + [state_below.shape[i] for i in xrange(2, state_below.ndim)]) reshaped_below = state_below.reshape(inp_shape) reshaped = fprop(self, reshaped_below) if isinstance(reshaped, tuple): output_shape = ([[reshape_size[0], reshape_size[1]] + [reshaped[j].shape[i] for i in xrange(1, reshaped[j].ndim)] for j in xrange(len(reshaped))]) state = () for i in xrange(len(reshaped)): state += (reshaped[i].reshape(output_shape[i]),) else: output_shape = ([reshape_size[0], reshape_size[1]] + [reshaped.shape[i] for i in xrange(1, reshaped.ndim)]) state = reshaped.reshape(output_shape) else: state = fprop(self, state_below) if self._requires_unmask: return (state, mask) else: return state else: # Not RNN-friendly, but not requiring reshape if return_all: return fprop(self, state_below, return_all) else: return fprop(self, state_below) return outer @classmethod def get_layer_monitoring_channels_wrapper(cls, name, get_layer_monitoring_channels): """ Reshapes and unmasks the data before retrieving the monitoring channels Parameters ---------- get_layer_monitoring_channels : method The get_layer_monitoring_channels method to be wrapped """ @functools.wraps(get_layer_monitoring_channels) def outer(self, state_below=None, state=None, targets=None): if self._requires_reshape and self.__class__.__name__ == name: if self._requires_unmask: if state_below is not None: state_below, state_below_mask = state_below if state is not None: state, state_mask = state if targets is not None: targets, targets_mask = targets if state_below is not None: state_below_shape = ([state_below.shape[0] * state_below.shape[1]] + [state_below.shape[i] for i in xrange(2, state_below.ndim)]) state_below = state_below.reshape(state_below_shape) if self._requires_unmask: state_below = state_below[ state_below_mask.flatten().nonzero() ] if state is not None: state_shape = ([state.shape[0] * state.shape[1]] + [state.shape[i] for i in xrange(2, state.ndim)]) state = state.reshape(state_shape) if self._requires_unmask: state = state[state_mask.flatten().nonzero()] if targets is not None: targets_shape = ([targets.shape[0] * targets.shape[1]] + [targets.shape[i] for i in xrange(2, targets.ndim)]) targets = targets.reshape(targets_shape) if self._requires_unmask: targets = targets[targets_mask.flatten().nonzero()] return get_layer_monitoring_channels(self, state_below, state, targets) else: # Not RNN-friendly, but not requiring reshape return get_layer_monitoring_channels(self, state_below, state, targets) return outer @classmethod def cost_wrapper(cls, name, cost): """ This layer wraps cost methods by reshaping the tensor (merging the time and batch axis) and then taking out all the masked values before applying the cost method. """ @functools.wraps(cost) def outer(self, Y, Y_hat): if self._requires_reshape: if self._requires_unmask: try: Y, Y_mask = Y Y_hat, Y_hat_mask = Y_hat except: log.warning("Lost the mask when wrapping cost. This " "can happen if this function is called " "from within another wrapped function. " "Most likely this won't cause any problem") return cost(self, Y, Y_hat) input_shape = ([Y.shape[0] * Y.shape[1]] + [Y.shape[i] for i in xrange(2, Y.ndim)]) reshaped_Y = Y.reshape(input_shape) if isinstance(Y_hat, tuple): input_shape = ([[Y_hat[j].shape[0] * Y_hat[j].shape[1]] + [Y_hat[j].shape[i] for i in xrange(2, Y_hat[j].ndim)] for j in xrange(len(Y_hat))]) reshaped_Y_hat = [] for i in xrange(len(Y_hat)): reshaped_Y_hat.append(Y_hat[i].reshape(input_shape[i])) reshaped_Y_hat = tuple(reshaped_Y_hat) else: input_shape = ([Y_hat.shape[0] * Y_hat.shape[1]] + [Y_hat.shape[i] for i in xrange(2, Y_hat.ndim)]) reshaped_Y_hat = Y_hat.reshape(input_shape) # Here we need to take the indices of only the unmasked data if self._requires_unmask: return cost(self, reshaped_Y[Y_mask.flatten().nonzero()], reshaped_Y_hat[Y_mask.flatten().nonzero()]) return cost(self, reshaped_Y, reshaped_Y_hat) else: # Not RNN-friendly, but not requiring reshape return cost(self, Y, Y_hat) return outer @classmethod def cost_matrix_wrapper(cls, name, cost_matrix): """ If the cost_matrix is called from within a cost function, everything is fine, since things were reshaped and unpacked. In any other case we raise a warning (after which it most likely crashes). """ @functools.wraps(cost_matrix) def outer(self, Y, Y_hat): if self._requires_reshape and inspect.stack()[1][3] != 'cost': log.warning("You are using the `cost_matrix` method on a " "layer which has been wrapped to accept sequence " "input, might or might not be problematic.") return cost_matrix(self, Y, Y_hat) return outer @classmethod def cost_from_cost_matrix_wrapper(cls, name, cost_from_cost_matrix): """ If the cost_from_cost_matrix is called from within a cost function, everything is fine, since things were reshaped and unpacked. In any other case we raise a warning (after which it most likely crashes). """ @functools.wraps(cost_from_cost_matrix) def outer(self, cost_matrix): if self._requires_reshape and inspect.stack()[1][3] != 'cost': log.warning("You are using the `cost_from_cost_matrix` method " "on a layer which has been wrapped to accept " "sequence input, might or might not be " "problematic.") return cost_from_cost_matrix(self, cost_matrix) return outer @classmethod def set_input_space_wrapper(cls, name, set_input_space): """ If this layer is not RNN-adapted, we intercept the call to the set_input_space method and set the space to a non-sequence space. This transformation is only applied to whitelisted layers. Parameters ---------- set_input_space : method The set_input_space method to be wrapped """ @functools.wraps(set_input_space) def outer(self, input_space): # The set_input_space method could be called for nested MLPs if not self.rnn_friendly and name != 'MLP': def find_sequence_space(input_space): """ Recursive helper function that searches the (possibly nested) input space to see if it contains SequenceSpace """ if isinstance(input_space, CompositeSpace): return any(find_sequence_space(component) for component in input_space.components) if isinstance(input_space, SequenceDataSpace): return True return False if find_sequence_space(input_space): if name in BLACKLIST: raise ValueError("%s received a SequenceSpace input, " "but is unable to deal with it. " "Please use an RNN-friendly " "alternative from the sandbox " "instead" % self) elif name not in WHITELIST: log.warning("%s received a SequenceSpace but " "is not able to deal with it. We will try " "to change to non-sequence spaces and " "reshape the data, but this is not " "guaranteed to work! It normally works if " "your input and output space are not " "nested and you are not calling other " "fprop methods from within your fprop." % self) if isinstance(input_space, SequenceSpace): self._requires_unmask = True self._requires_reshape = True input_space = input_space.space elif isinstance(input_space, SequenceDataSpace): self._requires_reshape = True input_space = input_space.space return set_input_space(self, input_space) return outer @classmethod def get_output_space_wrapper(cls, name, get_output_space): """ Same thing as set_input_space_wrapper. Parameters ---------- get_output_space : method The get_output_space method to be wrapped """ @functools.wraps(get_output_space) def outer(self): if (not self.rnn_friendly and self._requires_reshape and (not isinstance(get_output_space(self), SequenceSpace) and not isinstance(get_output_space(self), SequenceDataSpace))): if isinstance(self.mlp.input_space, SequenceSpace): return SequenceSpace(get_output_space(self)) elif isinstance(self.mlp.input_space, SequenceDataSpace): return SequenceDataSpace(get_output_space(self)) else: return get_output_space(self) return outer @classmethod def get_target_space_wrapper(cls, name, get_target_space): """ Same thing as set_input_space_wrapper. Parameters ---------- get_target_space : method The get_target_space method to be wrapped """ @functools.wraps(get_target_space) def outer(self): if (not self.rnn_friendly and self._requires_reshape and (not isinstance(get_target_space(self), SequenceSpace) and not isinstance(get_target_space(self), SequenceDataSpace))): if isinstance(self.mlp.input_space, SequenceSpace): return SequenceSpace(get_target_space(self)) elif isinstance(self.mlp.input_space, SequenceDataSpace): return SequenceDataSpace(get_target_space(self)) else: return get_target_space(self) return outer
bsd-3-clause
Erethon/synnefo
snf-cyclades-app/synnefo/management/pprint.py
8
17329
# Copyright (C) 2010-2014 GRNET S.A. # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. import sys from snf_django.management.utils import pprint_table from synnefo.lib.ordereddict import OrderedDict from snf_django.lib.astakos import UserCache from synnefo.settings import (CYCLADES_SERVICE_TOKEN as ASTAKOS_TOKEN, ASTAKOS_AUTH_URL) from synnefo.db.models import Backend, pooled_rapi_client from synnefo.logic.rapi import GanetiApiError from synnefo.logic.reconciliation import (nics_from_instance, disks_from_instance) from synnefo.management.common import get_image def pprint_network(network, display_mails=False, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Network %s in DB" % network.id ucache = UserCache(ASTAKOS_AUTH_URL, ASTAKOS_TOKEN) userid = network.userid total_ips, free_ips = network.ip_count() db_network = OrderedDict([ ("name", network.name), ("backend-name", network.backend_id), ("state", network.state), ("userid", userid), ("username", ucache.get_name(userid) if (display_mails and userid is not None) else None), ("public", network.public), ("floating_ip_pool", network.floating_ip_pool), ("external_router", network.external_router), ("drained", network.drained), ("MAC prefix", network.mac_prefix), ("flavor", network.flavor), ("link", network.link), ("mode", network.mode), ("deleted", network.deleted), ("tags", "), ".join(network.backend_tag)), ("action", network.action), ("free IPs", free_ips), ("total IPs", total_ips), ]) pprint_table(stdout, db_network.items(), None, separator=" | ", title=title) def pprint_network_subnets(network, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "Subnets of network %s" % network.id subnets = list(network.subnets.values_list("id", "name", "ipversion", "cidr", "gateway", "dhcp", "deleted")) headers = ["ID", "Name", "Version", "CIDR", "Gateway", "DHCP", "Deleted"] pprint_table(stdout, subnets, headers, separator=" | ", title=title) def pprint_network_backends(network, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Network %s in DB for each backend" % network.id bnets = list(network.backend_networks.values_list( "backend__clustername", "operstate", "deleted", "backendjobid", "backendopcode", "backendjobstatus")) headers = ["Backend", "State", "Deleted", "JobID", "Opcode", "JobStatus"] pprint_table(stdout, bnets, headers, separator=" | ", title=title) def pprint_network_in_ganeti(network, stdout=None): if stdout is None: stdout = sys.stdout for backend in Backend.objects.exclude(offline=True): with pooled_rapi_client(backend) as client: try: g_net = client.GetNetwork(network.backend_id) ip_map = g_net.pop("map", {}) pprint_table(stdout, g_net.items(), None, title="State of network in backend: %s" % backend.clustername) if network.subnet4 is not None: pprint_pool(None, ip_map, 80, stdout) except GanetiApiError as e: if e.code == 404: stdout.write('Network does not exist in backend %s\n' % backend.clustername) else: raise e def pprint_subnet_in_db(subnet, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Subnet %s in DB" % subnet.id info = OrderedDict([("ID", subnet.id), ("Network_ID", subnet.network.id), # If a user names his subnet "-", what happens then? ("User_ID", subnet.userid), ("Name", "-" if subnet.name == "" else subnet.name), ("IP_Version", subnet.ipversion), ("CIDR", subnet.cidr), ("Gateway", subnet.gateway), ("Public", subnet.public), ("DHCP/SLAAC", subnet.dhcp), ("Host_Routes", subnet.host_routes), ("DNS_Nameservers", subnet.dns_nameservers)]) pprint_table(stdout, info.items(), None, separator=" | ", title=title) def pprint_ippool(subnet, stdout=None, title=None): """Pretty print IP Pools of a subnet. Only IPv4 subnets have IP Pools""" if int(subnet.ipversion) != 4: return 0 if stdout is None: stdout = sys.stdout stdout.write("IP Pools of subnet %s:\n\n" % subnet.id) for pool in subnet.get_ip_pools(): size = pool.pool_size info = OrderedDict([("First_IP", pool.return_start()), ("Last_IP", pool.return_end()), ("Size", size), ("Available", pool.count_available()), ("Reserved", pool.count_reserved())]) pprint_table(stdout, info.items(), None, separator=" | ", title=None) reserved = [pool.index_to_value(index) for index, ip in enumerate(pool.reserved[:size]) if ip is False] if reserved != []: stdout.write("\nExternally Reserved IPs:\n\n") stdout.write(", ".join(reserved) + "\n") pprint_pool(None, pool.to_map(), 80, stdout) stdout.write("\n\n") def pool_map_chunks(smap, step=64): for i in xrange(0, len(smap), step): yield smap[i:i + step] def splitPoolMap(s, count): chunks = pool_map_chunks(s, count) acc = [] count = 0 for chunk in chunks: chunk_len = len(chunk) acc.append(str(count).rjust(3) + ' ' + chunk + ' ' + str(count + chunk_len - 1).ljust(4)) count += chunk_len return '\n' + '\n'.join(acc) def pprint_pool(name, pool_map, step=80, stdout=None): if stdout is None: stdout = sys.stdout if name is not None: stdout.write("Pool: %s\n" % name) stdout.write(splitPoolMap(pool_map, count=step)) stdout.write("\n") def pprint_port(port, display_mails=False, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Port %s in DB" % port.id ucache = UserCache(ASTAKOS_AUTH_URL, ASTAKOS_TOKEN) userid = port.userid port = OrderedDict([ ("id", port.id), ("name", port.name), ("userid", port.userid), ("username", ucache.get_name(userid) if display_mails else None), ("server", port.machine_id), ("network", port.network_id), ("device_owner", port.device_owner), ("mac", port.mac), ("state", port.state)]) pprint_table(stdout, port.items(), None, separator=" | ", title=title) def pprint_port_ips(port, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "IP Addresses of Port %s" % port.id ips = list(port.ips.values_list("address", "network_id", "subnet_id", "subnet__cidr", "floating_ip")) headers = ["Address", "Network", "Subnet", "CIDR", "is_floating"] pprint_table(stdout, ips, headers, separator=" | ", title=title) def pprint_port_in_ganeti(port, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Port %s in Ganeti" % port.id vm = port.machine if vm is None: stdout.write("Port is not attached to any instance.\n") return client = vm.get_client() try: vm_info = client.GetInstance(vm.backend_vm_id) except GanetiApiError as e: if e.code == 404: stdout.write("Port seems attached to server %s, but" " server does not exist in backend.\n" % vm) return raise e nics = nics_from_instance(vm_info) try: gnt_nic = filter(lambda nic: nic.get("name") == port.backend_uuid, nics)[0] gnt_nic["instance"] = vm_info["name"] except IndexError: stdout.write("Port %s is not attached to instance %s\n" % (port.id, vm.id)) return pprint_table(stdout, gnt_nic.items(), None, separator=" | ", title=title) vm.put_client(client) def pprint_server(server, display_mails=False, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Server %s in DB" % server.id ucache = UserCache(ASTAKOS_AUTH_URL, ASTAKOS_TOKEN) userid = server.userid try: image = get_image(server.imageid, server.userid)['name'] except: image = server.imageid server_dict = OrderedDict([ ("id", server.id), ("name", server.name), ("userid", server.userid), ("username", ucache.get_name(userid) if display_mails else None), ("flavor_id", server.flavor_id), ("flavor_name", server.flavor.name), ("imageid", server.imageid), ("image_name", image), ("state", server.operstate), ("backend", server.backend), ("deleted", server.deleted), ("action", server.action), ("task", server.task), ("task_job_id", server.task_job_id), ("backendjobid", server.backendjobid), ("backendopcode", server.backendopcode), ("backendjobstatus", server.backendjobstatus), ("backend_time", server.backendtime), ]) pprint_table(stdout, server_dict.items(), None, separator=" | ", title=title) def pprint_server_nics(server, stdout=None, title=None): if title is None: title = "Ports of Server %s" % server.id if stdout is None: stdout = sys.stdout nics = [] for nic in server.nics.all(): nics.append((nic.id, nic.name, nic.index, nic.mac, nic.ipv4_address, nic.ipv6_address, nic.network, nic.firewall_profile, nic.state)) headers = ["ID", "Name", "Index", "MAC", "IPv4 Address", "IPv6 Address", "Network", "Firewall", "State"] pprint_table(stdout, nics, headers, separator=" | ", title=title) def pprint_server_volumes(server, stdout=None, title=None): if title is None: title = "Volumes of Server %s" % server.id if stdout is None: stdout = sys.stdout vols = [] for vol in server.volumes.filter(deleted=False): volume_type = vol.volume_type vols.append((vol.id, vol.name, vol.index, vol.size, volume_type.template, volume_type.provider, vol.status, vol.source)) headers = ["ID", "Name", "Index", "Size", "Template", "Provider", "Status", "Source"] pprint_table(stdout, vols, headers, separator=" | ", title=title) def pprint_server_in_ganeti(server, print_jobs=False, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of Server %s in Ganeti" % server.id client = server.get_client() try: server_info = client.GetInstance(server.backend_vm_id) except GanetiApiError as e: if e.code == 404: stdout.write("Server '%s' does not exist in backend '%s'\n" % (server.id, server.backend.clustername)) return raise e server.put_client(client) GANETI_INSTANCE_FIELDS = ('name', 'oper_state', 'admin_state', 'status', 'pnode', 'snode', 'network_port', 'disk_template', 'disk_usage', 'oper_ram', 'oper_vcpus', 'mtime') server_dict = OrderedDict([(k, server_info.get(k)) for k in GANETI_INSTANCE_FIELDS]) pprint_table(stdout, server_dict.items(), None, separator=" | ", title=title) stdout.write("\n") nics = nics_from_instance(server_info) nics_keys = ["ip", "mac", "name", "network"] nics_values = [[nic[key] for key in nics_keys] for nic in nics] pprint_table(stdout, nics_values, nics_keys, separator=" | ", title="NICs of Server %s in Ganeti" % server.id) stdout.write("\n") disks = disks_from_instance(server_info) disks_keys = ["name", "size"] disks_values = [[disk[key] for key in disks_keys] for disk in disks] pprint_table(stdout, disks_values, disks_keys, separator=" | ", title="Disks of Server %s in Ganeti" % server.id) if not print_jobs: return client = server.get_client() jobs = client.GetJobs(bulk=True) server_jobs = filter( lambda x: server.backend_vm_id in (" ".join(x.get("summary"))), jobs) GANETI_JOB_FIELDS = ('id', 'status', 'summary', 'opresult', 'opstatus', 'oplog', 'start_ts', 'end_ts') for server_job in server_jobs: stdout.write("\n") values = [server_job.get(k) for k in GANETI_JOB_FIELDS] pprint_table(stdout, zip(GANETI_JOB_FIELDS, values), None, separator=" | ", title="Ganeti Job %s" % server_job["id"]) server.put_client(client) def pprint_volume(volume, display_mails=False, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of volume %s in DB" % volume.id ucache = UserCache(ASTAKOS_AUTH_URL, ASTAKOS_TOKEN) userid = volume.userid volume_type = volume.volume_type volume_dict = OrderedDict([ ("id", volume.id), ("size", volume.size), ("disk_template", volume_type.template), ("disk_provider", volume_type.provider), ("server_id", volume.machine_id), ("userid", volume.userid), ("project", volume.project), ("username", ucache.get_name(userid) if display_mails else None), ("index", volume.index), ("name", volume.name), ("state", volume.status), ("delete_on_termination", volume.delete_on_termination), ("deleted", volume.deleted), ("backendjobid", volume.backendjobid), ]) pprint_table(stdout, volume_dict.items(), None, separator=" | ", title=title) def pprint_volume_in_ganeti(volume, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "State of volume %s in Ganeti" % volume.id vm = volume.machine if vm is None: stdout.write("volume is not attached to any instance.\n") return client = vm.get_client() try: vm_info = client.GetInstance(vm.backend_vm_id) except GanetiApiError as e: if e.code == 404: stdout.write("Volume seems attached to server %s, but" " server does not exist in backend.\n" % vm) return raise e disks = disks_from_instance(vm_info) try: gnt_disk = filter(lambda disk: disk.get("name") == volume.backend_volume_uuid, disks)[0] gnt_disk["instance"] = vm_info["name"] except IndexError: stdout.write("Volume %s is not attached to instance %s\n" % (volume.id, vm.id)) return pprint_table(stdout, gnt_disk.items(), None, separator=" | ", title=title) vm.put_client(client) def pprint_volume_type(volume_type, stdout=None, title=None): if stdout is None: stdout = sys.stdout if title is None: title = "Volume Type %s" % volume_type.id vtype_info = OrderedDict([ ("name", volume_type.name), ("disk template", volume_type.disk_template), ("deleted", volume_type.deleted), ]) pprint_table(stdout, vtype_info.items(), separator=" | ", title=title)
gpl-3.0
yu239/Paddle
v1_api_demo/quick_start/api_predict.py
11
4496
# Copyright (c) 2016 PaddlePaddle Authors. All Rights Reserved # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os, sys import numpy as np from optparse import OptionParser from py_paddle import swig_paddle, DataProviderConverter from paddle.trainer.PyDataProvider2 import sparse_binary_vector from paddle.trainer.config_parser import parse_config """ Usage: run following command to show help message. python api_predict.py -h """ class QuickStartPrediction(): def __init__(self, train_conf, dict_file, model_dir=None, label_file=None): """ train_conf: trainer configure. dict_file: word dictionary file name. model_dir: directory of model. """ self.train_conf = train_conf self.dict_file = dict_file self.word_dict = {} self.dict_dim = self.load_dict() self.model_dir = model_dir if model_dir is None: self.model_dir = os.path.dirname(train_conf) self.label = None if label_file is not None: self.load_label(label_file) conf = parse_config(train_conf, "is_predict=1") self.network = swig_paddle.GradientMachine.createFromConfigProto( conf.model_config) self.network.loadParameters(self.model_dir) input_types = [sparse_binary_vector(self.dict_dim)] self.converter = DataProviderConverter(input_types) def load_dict(self): """ Load dictionary from self.dict_file. """ for line_count, line in enumerate(open(self.dict_file, 'r')): self.word_dict[line.strip().split('\t')[0]] = line_count return len(self.word_dict) def load_label(self, label_file): """ Load label. """ self.label = {} for v in open(label_file, 'r'): self.label[int(v.split('\t')[1])] = v.split('\t')[0] def get_index(self, data): """ transform word into integer index according to the dictionary. """ words = data.strip().split() word_slot = [self.word_dict[w] for w in words if w in self.word_dict] return word_slot def batch_predict(self, data_batch): input = self.converter(data_batch) output = self.network.forwardTest(input) prob = output[0]["id"].tolist() print("predicting labels is:") print prob def option_parser(): usage = "python predict.py -n config -w model_dir -d dictionary -i input_file " parser = OptionParser(usage="usage: %s [options]" % usage) parser.add_option( "-n", "--tconf", action="store", dest="train_conf", help="network config") parser.add_option( "-d", "--dict", action="store", dest="dict_file", help="dictionary file") parser.add_option( "-b", "--label", action="store", dest="label", default=None, help="dictionary file") parser.add_option( "-c", "--batch_size", type="int", action="store", dest="batch_size", default=1, help="the batch size for prediction") parser.add_option( "-w", "--model", action="store", dest="model_path", default=None, help="model path") return parser.parse_args() def main(): options, args = option_parser() train_conf = options.train_conf batch_size = options.batch_size dict_file = options.dict_file model_path = options.model_path label = options.label swig_paddle.initPaddle("--use_gpu=0") predict = QuickStartPrediction(train_conf, dict_file, model_path, label) batch = [] labels = [] for line in sys.stdin: [label, text] = line.split("\t") labels.append(int(label)) batch.append([predict.get_index(text)]) print("labels is:") print labels predict.batch_predict(batch) if __name__ == '__main__': main()
apache-2.0
mihailignatenko/erp
openerp/tools/amount_to_text.py
393
7719
# -*- coding: utf-8 -*- ############################################################################## # # OpenERP, Open Source Management Solution # Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). # # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU Affero General Public License as # published by the Free Software Foundation, either version 3 of the # License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU Affero General Public License for more details. # # You should have received a copy of the GNU Affero General Public License # along with this program. If not, see <http://www.gnu.org/licenses/>. # ############################################################################## #------------------------------------------------------------- # French #------------------------------------------------------------- to_19_fr = ( u'zéro', 'un', 'deux', 'trois', 'quatre', 'cinq', 'six', 'sept', 'huit', 'neuf', 'dix', 'onze', 'douze', 'treize', 'quatorze', 'quinze', 'seize', 'dix-sept', 'dix-huit', 'dix-neuf' ) tens_fr = ( 'vingt', 'trente', 'quarante', 'Cinquante', 'Soixante', 'Soixante-dix', 'Quatre-vingts', 'Quatre-vingt Dix') denom_fr = ( '', 'Mille', 'Millions', 'Milliards', 'Billions', 'Quadrillions', 'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion', 'Décillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion', 'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Icosillion', 'Vigintillion' ) def _convert_nn_fr(val): """ convert a value < 100 to French """ if val < 20: return to_19_fr[val] for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_fr)): if dval + 10 > val: if val % 10: return dcap + '-' + to_19_fr[val % 10] return dcap def _convert_nnn_fr(val): """ convert a value < 1000 to french special cased because it is the level that kicks off the < 100 special case. The rest are more general. This also allows you to get strings in the form of 'forty-five hundred' if called directly. """ word = '' (mod, rem) = (val % 100, val // 100) if rem > 0: word = to_19_fr[rem] + ' Cent' if mod > 0: word += ' ' if mod > 0: word += _convert_nn_fr(mod) return word def french_number(val): if val < 100: return _convert_nn_fr(val) if val < 1000: return _convert_nnn_fr(val) for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_fr))): if dval > val: mod = 1000 ** didx l = val // mod r = val - (l * mod) ret = _convert_nnn_fr(l) + ' ' + denom_fr[didx] if r > 0: ret = ret + ', ' + french_number(r) return ret def amount_to_text_fr(number, currency): number = '%.2f' % number units_name = currency list = str(number).split('.') start_word = french_number(abs(int(list[0]))) end_word = french_number(int(list[1])) cents_number = int(list[1]) cents_name = (cents_number > 1) and ' Cents' or ' Cent' final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name return final_result #------------------------------------------------------------- # Dutch #------------------------------------------------------------- to_19_nl = ( 'Nul', 'Een', 'Twee', 'Drie', 'Vier', 'Vijf', 'Zes', 'Zeven', 'Acht', 'Negen', 'Tien', 'Elf', 'Twaalf', 'Dertien', 'Veertien', 'Vijftien', 'Zestien', 'Zeventien', 'Achttien', 'Negentien' ) tens_nl = ( 'Twintig', 'Dertig', 'Veertig', 'Vijftig', 'Zestig', 'Zeventig', 'Tachtig', 'Negentig') denom_nl = ( '', 'Duizend', 'Miljoen', 'Miljard', 'Triljoen', 'Quadriljoen', 'Quintillion', 'Sextiljoen', 'Septillion', 'Octillion', 'Nonillion', 'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion', 'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' ) def _convert_nn_nl(val): """ convert a value < 100 to Dutch """ if val < 20: return to_19_nl[val] for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens_nl)): if dval + 10 > val: if val % 10: return dcap + '-' + to_19_nl[val % 10] return dcap def _convert_nnn_nl(val): """ convert a value < 1000 to Dutch special cased because it is the level that kicks off the < 100 special case. The rest are more general. This also allows you to get strings in the form of 'forty-five hundred' if called directly. """ word = '' (mod, rem) = (val % 100, val // 100) if rem > 0: word = to_19_nl[rem] + ' Honderd' if mod > 0: word += ' ' if mod > 0: word += _convert_nn_nl(mod) return word def dutch_number(val): if val < 100: return _convert_nn_nl(val) if val < 1000: return _convert_nnn_nl(val) for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom_nl))): if dval > val: mod = 1000 ** didx l = val // mod r = val - (l * mod) ret = _convert_nnn_nl(l) + ' ' + denom_nl[didx] if r > 0: ret = ret + ', ' + dutch_number(r) return ret def amount_to_text_nl(number, currency): number = '%.2f' % number units_name = currency list = str(number).split('.') start_word = dutch_number(int(list[0])) end_word = dutch_number(int(list[1])) cents_number = int(list[1]) cents_name = (cents_number > 1) and 'cent' or 'cent' final_result = start_word +' '+units_name+' '+ end_word +' '+cents_name return final_result #------------------------------------------------------------- # Generic functions #------------------------------------------------------------- _translate_funcs = {'fr' : amount_to_text_fr, 'nl' : amount_to_text_nl} def add_amount_to_text_function(lang, func): _translate_funcs[lang] = func #TODO: we should use the country AND language (ex: septante VS soixante dix) #TODO: we should use en by default, but the translation func is yet to be implemented def amount_to_text(nbr, lang='fr', currency='euro'): """ Converts an integer to its textual representation, using the language set in the context if any. Example:: 1654: mille six cent cinquante-quatre. """ # if nbr > 1000000: ##TODO: use logger # print "WARNING: number too large '%d', can't translate it!" % (nbr,) # return str(nbr) if not _translate_funcs.has_key(lang): #TODO: use logger print "WARNING: no translation function found for lang: '%s'" % (lang,) #TODO: (default should be en) same as above lang = 'fr' return _translate_funcs[lang](abs(nbr), currency) if __name__=='__main__': from sys import argv lang = 'nl' if len(argv) < 2: for i in range(1,200): print i, ">>", amount_to_text(i, lang) for i in range(200,999999,139): print i, ">>", amount_to_text(i, lang) else: print amount_to_text(int(argv[1]), lang) # vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
agpl-3.0