repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
Eyepea/pytest-asyncio | tests/test_simple.py | 1 | 2346 | """Quick'n'dirty unit tests for provided fixtures and markers."""
import asyncio
import os
import pytest
@asyncio.coroutine
def async_coro(loop):
yield from asyncio.sleep(0, loop=loop)
return 'ok'
def test_event_loop_fixture(event_loop):
"""Test the injection of the event_loop fixture."""
assert event_loop
ret = event_loop.run_until_complete(async_coro(event_loop))
assert ret == 'ok'
def test_event_loop_processpool_fixture(event_loop_process_pool):
"""Test the injection of the event_loop with a process pool fixture."""
assert event_loop_process_pool
ret = event_loop_process_pool.run_until_complete(
async_coro(event_loop_process_pool))
assert ret == 'ok'
this_pid = os.getpid()
future = event_loop_process_pool.run_in_executor(None, os.getpid)
pool_pid = event_loop_process_pool.run_until_complete(future)
assert this_pid != pool_pid
@pytest.mark.asyncio
def test_asyncio_marker():
"""Test the asyncio pytest marker."""
yield # sleep(0)
@pytest.mark.asyncio
def test_asyncio_marker_with_default_param(a_param=None):
"""Test the asyncio pytest marker."""
yield # sleep(0)
@pytest.mark.asyncio_process_pool
def test_asyncio_process_pool_marker(event_loop):
"""Test the asyncio pytest marker."""
ret = yield from async_coro(event_loop)
assert ret == 'ok'
@pytest.mark.asyncio
def test_unused_port_fixture(unused_tcp_port, event_loop):
"""Test the unused TCP port fixture."""
@asyncio.coroutine
def closer(_, writer):
writer.close()
server1 = yield from asyncio.start_server(closer, host='localhost',
port=unused_tcp_port,
loop=event_loop)
with pytest.raises(IOError):
yield from asyncio.start_server(closer, host='localhost',
port=unused_tcp_port,
loop=event_loop)
server1.close()
yield from server1.wait_closed()
class Test:
"""Test that asyncio marked functions work in test methods."""
@pytest.mark.asyncio
def test_asyncio_marker_method(self, event_loop):
"""Test the asyncio pytest marker in a Test class."""
ret = yield from async_coro(event_loop)
assert ret == 'ok'
| apache-2.0 |
beni55/edx-platform | cms/djangoapps/contentstore/management/commands/tests/test_create_course.py | 137 | 2495 | """
Unittests for creating a course in an chosen modulestore
"""
import unittest
import ddt
from django.core.management import CommandError, call_command
from contentstore.management.commands.create_course import Command
from xmodule.modulestore import ModuleStoreEnum
from xmodule.modulestore.tests.django_utils import ModuleStoreTestCase
from xmodule.modulestore.django import modulestore
class TestArgParsing(unittest.TestCase):
"""
Tests for parsing arguments for the `create_course` management command
"""
def setUp(self):
super(TestArgParsing, self).setUp()
self.command = Command()
def test_no_args(self):
errstring = "create_course requires 5 arguments"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle('create_course')
def test_invalid_store(self):
with self.assertRaises(CommandError):
self.command.handle("foo", "user@foo.org", "org", "course", "run")
def test_xml_store(self):
with self.assertRaises(CommandError):
self.command.handle(ModuleStoreEnum.Type.xml, "user@foo.org", "org", "course", "run")
def test_nonexistent_user_id(self):
errstring = "No user 99 found"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("split", "99", "org", "course", "run")
def test_nonexistent_user_email(self):
errstring = "No user fake@example.com found"
with self.assertRaisesRegexp(CommandError, errstring):
self.command.handle("mongo", "fake@example.com", "org", "course", "run")
@ddt.ddt
class TestCreateCourse(ModuleStoreTestCase):
"""
Unit tests for creating a course in either old mongo or split mongo via command line
"""
def setUp(self):
super(TestCreateCourse, self).setUp(create_user=True)
@ddt.data(ModuleStoreEnum.Type.mongo, ModuleStoreEnum.Type.split)
def test_all_stores_user_email(self, store):
call_command(
"create_course",
store,
str(self.user.email),
"org", "course", "run"
)
new_key = modulestore().make_course_key("org", "course", "run")
self.assertTrue(
modulestore().has_course(new_key),
"Could not find course in {}".format(store)
)
# pylint: disable=protected-access
self.assertEqual(store, modulestore()._get_modulestore_for_courselike(new_key).get_modulestore_type())
| agpl-3.0 |
kurikaesu/arsenalsuite | cpp/lib/PyQt4/examples/mainwindows/menus.py | 20 | 11396 | #!/usr/bin/env python
#############################################################################
##
## Copyright (C) 2010 Riverbank Computing Limited.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
#############################################################################
from PyQt4 import QtCore, QtGui
class MainWindow(QtGui.QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
widget = QtGui.QWidget()
self.setCentralWidget(widget)
topFiller = QtGui.QWidget()
topFiller.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
self.infoLabel = QtGui.QLabel(
"<i>Choose a menu option, or right-click to invoke a context menu</i>",
alignment=QtCore.Qt.AlignCenter)
self.infoLabel.setFrameStyle(QtGui.QFrame.StyledPanel | QtGui.QFrame.Sunken)
bottomFiller = QtGui.QWidget()
bottomFiller.setSizePolicy(QtGui.QSizePolicy.Expanding,
QtGui.QSizePolicy.Expanding)
vbox = QtGui.QVBoxLayout()
vbox.setMargin(5)
vbox.addWidget(topFiller)
vbox.addWidget(self.infoLabel)
vbox.addWidget(bottomFiller)
widget.setLayout(vbox)
self.createActions()
self.createMenus()
message = "A context menu is available by right-clicking"
self.statusBar().showMessage(message)
self.setWindowTitle("Menus")
self.setMinimumSize(160,160)
self.resize(480,320)
def contextMenuEvent(self, event):
menu = QtGui.QMenu(self)
menu.addAction(self.cutAct)
menu.addAction(self.copyAct)
menu.addAction(self.pasteAct)
menu.exec_(event.globalPos())
def newFile(self):
self.infoLabel.setText("Invoked <b>File|New</b>")
def open(self):
self.infoLabel.setText("Invoked <b>File|Open</b>")
def save(self):
self.infoLabel.setText("Invoked <b>File|Save</b>")
def print_(self):
self.infoLabel.setText("Invoked <b>File|Print</b>")
def undo(self):
self.infoLabel.setText("Invoked <b>Edit|Undo</b>")
def redo(self):
self.infoLabel.setText("Invoked <b>Edit|Redo</b>")
def cut(self):
self.infoLabel.setText("Invoked <b>Edit|Cut</b>")
def copy(self):
self.infoLabel.setText("Invoked <b>Edit|Copy</b>")
def paste(self):
self.infoLabel.setText("Invoked <b>Edit|Paste</b>")
def bold(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Bold</b>")
def italic(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Italic</b>")
def leftAlign(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Left Align</b>")
def rightAlign(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Right Align</b>")
def justify(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Justify</b>")
def center(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Center</b>")
def setLineSpacing(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Set Line Spacing</b>")
def setParagraphSpacing(self):
self.infoLabel.setText("Invoked <b>Edit|Format|Set Paragraph Spacing</b>")
def about(self):
self.infoLabel.setText("Invoked <b>Help|About</b>")
QtGui.QMessageBox.about(self, "About Menu",
"The <b>Menu</b> example shows how to create menu-bar menus "
"and context menus.")
def aboutQt(self):
self.infoLabel.setText("Invoked <b>Help|About Qt</b>")
def createActions(self):
self.newAct = QtGui.QAction("&New", self,
shortcut=QtGui.QKeySequence.New,
statusTip="Create a new file", triggered=self.newFile)
self.openAct = QtGui.QAction("&Open...", self,
shortcut=QtGui.QKeySequence.Open,
statusTip="Open an existing file", triggered=self.open)
self.saveAct = QtGui.QAction("&Save", self,
shortcut=QtGui.QKeySequence.Save,
statusTip="Save the document to disk", triggered=self.save)
self.printAct = QtGui.QAction("&Print...", self,
shortcut=QtGui.QKeySequence.Print,
statusTip="Print the document", triggered=self.print_)
self.exitAct = QtGui.QAction("E&xit", self, shortcut="Ctrl+Q",
statusTip="Exit the application", triggered=self.close)
self.undoAct = QtGui.QAction("&Undo", self,
shortcut=QtGui.QKeySequence.Undo,
statusTip="Undo the last operation", triggered=self.undo)
self.redoAct = QtGui.QAction("&Redo", self,
shortcut=QtGui.QKeySequence.Redo,
statusTip="Redo the last operation", triggered=self.redo)
self.cutAct = QtGui.QAction("Cu&t", self,
shortcut=QtGui.QKeySequence.Cut,
statusTip="Cut the current selection's contents to the clipboard",
triggered=self.cut)
self.copyAct = QtGui.QAction("&Copy", self,
shortcut=QtGui.QKeySequence.Copy,
statusTip="Copy the current selection's contents to the clipboard",
triggered=self.copy)
self.pasteAct = QtGui.QAction("&Paste", self,
shortcut=QtGui.QKeySequence.Paste,
statusTip="Paste the clipboard's contents into the current selection",
triggered=self.paste)
self.boldAct = QtGui.QAction("&Bold", self, checkable=True,
shortcut="Ctrl+B", statusTip="Make the text bold",
triggered=self.bold)
boldFont = self.boldAct.font()
boldFont.setBold(True)
self.boldAct.setFont(boldFont)
self.italicAct = QtGui.QAction("&Italic", self, checkable=True,
shortcut="Ctrl+I", statusTip="Make the text italic",
triggered=self.italic)
italicFont = self.italicAct.font()
italicFont.setItalic(True)
self.italicAct.setFont(italicFont)
self.setLineSpacingAct = QtGui.QAction("Set &Line Spacing...", self,
statusTip="Change the gap between the lines of a paragraph",
triggered=self.setLineSpacing)
self.setParagraphSpacingAct = QtGui.QAction(
"Set &Paragraph Spacing...", self,
statusTip="Change the gap between paragraphs",
triggered=self.setParagraphSpacing)
self.aboutAct = QtGui.QAction("&About", self,
statusTip="Show the application's About box",
triggered=self.about)
self.aboutQtAct = QtGui.QAction("About &Qt", self,
statusTip="Show the Qt library's About box",
triggered=self.aboutQt)
self.aboutQtAct.triggered.connect(QtGui.qApp.aboutQt)
self.leftAlignAct = QtGui.QAction("&Left Align", self, checkable=True,
shortcut="Ctrl+L", statusTip="Left align the selected text",
triggered=self.leftAlign)
self.rightAlignAct = QtGui.QAction("&Right Align", self,
checkable=True, shortcut="Ctrl+R",
statusTip="Right align the selected text",
triggered=self.rightAlign)
self.justifyAct = QtGui.QAction("&Justify", self, checkable=True,
shortcut="Ctrl+J", statusTip="Justify the selected text",
triggered=self.justify)
self.centerAct = QtGui.QAction("&Center", self, checkable=True,
shortcut="Ctrl+C", statusTip="Center the selected text",
triggered=self.center)
self.alignmentGroup = QtGui.QActionGroup(self)
self.alignmentGroup.addAction(self.leftAlignAct)
self.alignmentGroup.addAction(self.rightAlignAct)
self.alignmentGroup.addAction(self.justifyAct)
self.alignmentGroup.addAction(self.centerAct)
self.leftAlignAct.setChecked(True)
def createMenus(self):
self.fileMenu = self.menuBar().addMenu("&File")
self.fileMenu.addAction(self.newAct)
self.fileMenu.addAction(self.openAct)
self.fileMenu.addAction(self.saveAct)
self.fileMenu.addAction(self.printAct)
self.fileMenu.addSeparator()
self.fileMenu.addAction(self.exitAct)
self.editMenu = self.menuBar().addMenu("&Edit")
self.editMenu.addAction(self.undoAct)
self.editMenu.addAction(self.redoAct)
self.editMenu.addSeparator()
self.editMenu.addAction(self.cutAct)
self.editMenu.addAction(self.copyAct)
self.editMenu.addAction(self.pasteAct)
self.editMenu.addSeparator()
self.helpMenu = self.menuBar().addMenu("&Help")
self.helpMenu.addAction(self.aboutAct)
self.helpMenu.addAction(self.aboutQtAct)
self.formatMenu = self.editMenu.addMenu("&Format")
self.formatMenu.addAction(self.boldAct)
self.formatMenu.addAction(self.italicAct)
self.formatMenu.addSeparator().setText("Alignment")
self.formatMenu.addAction(self.leftAlignAct)
self.formatMenu.addAction(self.rightAlignAct)
self.formatMenu.addAction(self.justifyAct)
self.formatMenu.addAction(self.centerAct)
self.formatMenu.addSeparator()
self.formatMenu.addAction(self.setLineSpacingAct)
self.formatMenu.addAction(self.setParagraphSpacingAct)
if __name__ == '__main__':
import sys
app = QtGui.QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| gpl-2.0 |
eri-trabiccolo/exaile | xl/version.py | 1 | 2005 | # Copyright (C) 2019 Adam Olsen
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
#
# The developers of the Exaile media player hereby grant permission
# for non-GPL compatible GStreamer and Exaile plugins to be used and
# distributed together with GStreamer and Exaile. This permission is
# above and beyond the permissions granted by the GPL license by which
# Exaile is covered. If you modify this code, you may extend this
# exception to your version of the code, but you are not obligated to
# do so. If you do not wish to do so, delete this exception statement
# from your version.
import os
import xdg
major = "3.4"
minor = "5"
extra = ""
def get_current_revision(directory):
"""
Get the latest revision identifier for the branch contained in
'directory'. Returns None if the directory is not a branch or
the revision identifier cannot be found.
"""
import subprocess
try:
with open(os.devnull, 'w') as devnull:
return subprocess.check_output([
'git', 'rev-parse', '--short=7', 'HEAD'
], stderr=devnull).strip()
except (subprocess.CalledProcessError, OSError):
return None
if xdg.local_hack:
revision = get_current_revision(xdg.exaile_dir)
if revision is not None:
extra += "+" + revision
__version__ = major + "." + minor + extra
| gpl-2.0 |
tdfischer/organizer | organizer/viewsets.py | 1 | 1313 | from rest_framework import viewsets, status
from rest_framework.decorators import list_route, detail_route
from django.db.models import Q
class IntrospectiveViewSet(viewsets.ModelViewSet):
@list_route(methods=['get'])
def fields(self, request):
fields = []
for fieldName, field in self.get_serializer().fields.iteritems():
fields.append({'label': field.label, 'key': fieldName})
return Response({'fields': fields})
def get_sorts(self):
sortKeys = []
if 'sort' in self.request.query_params:
sortKeys = [self.request.query_params.get('sort')]
return sortKeys
def get_filter(self):
filterArg = Q()
for param, value in self.request.query_params.iteritems():
if param == "sort":
continue
if param == "page":
continue
if param.endswith("__in"):
filterArg &= Q(**{param: [value]})
else:
filterArg &= Q(**{param: value})
return filterArg
def get_queryset(self):
results = super(IntrospectiveViewSet,
self).get_queryset().filter(self.get_filter())
for sortKey in self.get_sorts():
results = results.order_by(sortKey)
return results
| agpl-3.0 |
NetDBNCKU/GAE-Conference-Web-App | django/contrib/localflavor/gb/gb_regions.py | 199 | 3504 | """
Sources:
English regions: http://www.statistics.gov.uk/geography/downloads/31_10_01_REGION_names_and_codes_12_00.xls
Northern Ireland regions: http://en.wikipedia.org/wiki/List_of_Irish_counties_by_area
Welsh regions: http://en.wikipedia.org/wiki/Preserved_counties_of_Wales
Scottish regions: http://en.wikipedia.org/wiki/Regions_and_districts_of_Scotland
"""
from django.utils.translation import ugettext_lazy as _
ENGLAND_REGION_CHOICES = (
("Bedfordshire", _("Bedfordshire")),
("Buckinghamshire", _("Buckinghamshire")),
("Cambridgeshire", ("Cambridgeshire")),
("Cheshire", _("Cheshire")),
("Cornwall and Isles of Scilly", _("Cornwall and Isles of Scilly")),
("Cumbria", _("Cumbria")),
("Derbyshire", _("Derbyshire")),
("Devon", _("Devon")),
("Dorset", _("Dorset")),
("Durham", _("Durham")),
("East Sussex", _("East Sussex")),
("Essex", _("Essex")),
("Gloucestershire", _("Gloucestershire")),
("Greater London", _("Greater London")),
("Greater Manchester", _("Greater Manchester")),
("Hampshire", _("Hampshire")),
("Hertfordshire", _("Hertfordshire")),
("Kent", _("Kent")),
("Lancashire", _("Lancashire")),
("Leicestershire", _("Leicestershire")),
("Lincolnshire", _("Lincolnshire")),
("Merseyside", _("Merseyside")),
("Norfolk", _("Norfolk")),
("North Yorkshire", _("North Yorkshire")),
("Northamptonshire", _("Northamptonshire")),
("Northumberland", _("Northumberland")),
("Nottinghamshire", _("Nottinghamshire")),
("Oxfordshire", _("Oxfordshire")),
("Shropshire", _("Shropshire")),
("Somerset", _("Somerset")),
("South Yorkshire", _("South Yorkshire")),
("Staffordshire", _("Staffordshire")),
("Suffolk", _("Suffolk")),
("Surrey", _("Surrey")),
("Tyne and Wear", _("Tyne and Wear")),
("Warwickshire", _("Warwickshire")),
("West Midlands", _("West Midlands")),
("West Sussex", _("West Sussex")),
("West Yorkshire", _("West Yorkshire")),
("Wiltshire", _("Wiltshire")),
("Worcestershire", _("Worcestershire")),
)
NORTHERN_IRELAND_REGION_CHOICES = (
("County Antrim", _("County Antrim")),
("County Armagh", _("County Armagh")),
("County Down", _("County Down")),
("County Fermanagh", _("County Fermanagh")),
("County Londonderry", _("County Londonderry")),
("County Tyrone", _("County Tyrone")),
)
WALES_REGION_CHOICES = (
("Clwyd", _("Clwyd")),
("Dyfed", _("Dyfed")),
("Gwent", _("Gwent")),
("Gwynedd", _("Gwynedd")),
("Mid Glamorgan", _("Mid Glamorgan")),
("Powys", _("Powys")),
("South Glamorgan", _("South Glamorgan")),
("West Glamorgan", _("West Glamorgan")),
)
SCOTTISH_REGION_CHOICES = (
("Borders", _("Borders")),
("Central Scotland", _("Central Scotland")),
("Dumfries and Galloway", _("Dumfries and Galloway")),
("Fife", _("Fife")),
("Grampian", _("Grampian")),
("Highland", _("Highland")),
("Lothian", _("Lothian")),
("Orkney Islands", _("Orkney Islands")),
("Shetland Islands", _("Shetland Islands")),
("Strathclyde", _("Strathclyde")),
("Tayside", _("Tayside")),
("Western Isles", _("Western Isles")),
)
GB_NATIONS_CHOICES = (
("England", _("England")),
("Northern Ireland", _("Northern Ireland")),
("Scotland", _("Scotland")),
("Wales", _("Wales")),
)
GB_REGION_CHOICES = ENGLAND_REGION_CHOICES + NORTHERN_IRELAND_REGION_CHOICES + WALES_REGION_CHOICES + SCOTTISH_REGION_CHOICES
| bsd-3-clause |
svenstaro/ansible | lib/ansible/modules/cloud/amazon/rds_subnet_group.py | 71 | 5419 | #!/usr/bin/python
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: rds_subnet_group
version_added: "1.5"
short_description: manage RDS database subnet groups
description:
- Creates, modifies, and deletes RDS database subnet groups. This module has a dependency on python-boto >= 2.5.
options:
state:
description:
- Specifies whether the subnet should be present or absent.
required: true
default: present
aliases: []
choices: [ 'present' , 'absent' ]
name:
description:
- Database subnet group identifier.
required: true
default: null
aliases: []
description:
description:
- Database subnet group description. Only set when a new group is added.
required: false
default: null
aliases: []
subnets:
description:
- List of subnet IDs that make up the database subnet group.
required: false
default: null
aliases: []
author: "Scott Anderson (@tastychutney)"
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Add or change a subnet group
- rds_subnet_group:
state: present
name: norwegian-blue
description: My Fancy Ex Parrot Subnet Group
subnets:
- subnet-aaaaaaaa
- subnet-bbbbbbbb
# Remove a subnet group
- rds_subnet_group:
state: absent
name: norwegian-blue
'''
try:
import boto.rds
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(dict(
state = dict(required=True, choices=['present', 'absent']),
name = dict(required=True),
description = dict(required=False),
subnets = dict(required=False, type='list'),
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
state = module.params.get('state')
group_name = module.params.get('name').lower()
group_description = module.params.get('description')
group_subnets = module.params.get('subnets') or {}
if state == 'present':
for required in ['name', 'description', 'subnets']:
if not module.params.get(required):
module.fail_json(msg = str("Parameter %s required for state='present'" % required))
else:
for not_allowed in ['description', 'subnets']:
if module.params.get(not_allowed):
module.fail_json(msg = str("Parameter %s not allowed for state='absent'" % not_allowed))
# Retrieve any AWS settings from the environment.
region, ec2_url, aws_connect_kwargs = get_aws_connection_info(module)
if not region:
module.fail_json(msg = str("Either region or AWS_REGION or EC2_REGION environment variable or boto config aws_region or ec2_region must be set."))
try:
conn = connect_to_aws(boto.rds, region, **aws_connect_kwargs)
except boto.exception.BotoServerError as e:
module.fail_json(msg = e.error_message)
try:
changed = False
exists = False
try:
matching_groups = conn.get_all_db_subnet_groups(group_name, max_records=100)
exists = len(matching_groups) > 0
except BotoServerError as e:
if e.error_code != 'DBSubnetGroupNotFoundFault':
module.fail_json(msg = e.error_message)
if state == 'absent':
if exists:
conn.delete_db_subnet_group(group_name)
changed = True
else:
if not exists:
new_group = conn.create_db_subnet_group(group_name, desc=group_description, subnet_ids=group_subnets)
changed = True
else:
# Sort the subnet groups before we compare them
matching_groups[0].subnet_ids.sort()
group_subnets.sort()
if (matching_groups[0].name != group_name or
matching_groups[0].description != group_description or
matching_groups[0].subnet_ids != group_subnets):
changed_group = conn.modify_db_subnet_group(group_name, description=group_description, subnet_ids=group_subnets)
changed = True
except BotoServerError as e:
module.fail_json(msg = e.error_message)
module.exit_json(changed=changed)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.ec2 import *
if __name__ == '__main__':
main()
| gpl-3.0 |
root-mirror/root | bindings/pyroot/pythonizations/python/ROOT/pythonization/_drawables.py | 22 | 2760 | # Author: Enric Tejedor CERN 04/2019
################################################################################
# Copyright (C) 1995-2019, Rene Brun and Fons Rademakers. #
# All rights reserved. #
# #
# For the licensing terms see $ROOTSYS/LICENSE. #
# For the list of contributors see $ROOTSYS/README/CREDITS. #
################################################################################
from ROOT import pythonization
from cppyy.gbl import kCanDelete
from libcppyy import SetOwnership
def _Draw(self, *args):
# Parameters:
# self: Object being drawn
# args: arguments for Draw
self._OriginalDraw(*args)
# When drawing a TPad, it gets added to the list of primititves of its
# mother TPad (fMother) with kCanDelete == 1. This means that, when
# fMother is destructed, it will attempt to destroy its child TPad too.
# To prevent a double delete, here we instruct the Python proxy of the
# child C++ TPad being drawn not to destroy the latter (ROOT-10060).
#
# A similar principle is applied to TButton, TColorWheel, TPolyLine3D,
# TPolyMarker and TPolyMarker3D, whose kCanDelete bit is set in one of
# their constructors. Later, when being drawn, they are appended to
# the list of primitives of gPad.
if self.TestBit(kCanDelete):
SetOwnership(self, False)
self.Draw = self._OriginalDraw
def _init(self, *args):
# Parameters:
# self: Object being initialized
# args: arguments for __init__
self._original__init__(*args)
# TSlider is a special case, since it is appended to gPad already
# in one of its constructors, after setting kCanDelete.
# Therefore, we need to set the ownership here and not in Draw
# (TSlider does not need to be drawn). This is ROOT-10095.
if self.TestBit(kCanDelete):
SetOwnership(self, False)
# We have already set the ownership while initializing,
# so we do not need the custom Draw inherited from TPad to
# do it again in case it is executed.
self.Draw = self._OriginalDraw
@pythonization()
def pythonize_drawables(klass, name):
# Parameters:
# klass: class to be pythonized
# name: name of the class
if name in {'TPad', 'TButton', 'TColorWheel',
'TPolyLine3D', 'TPolyMarker', 'TPolyMarker3D'}:
# Draw
klass._OriginalDraw = klass.Draw
klass.Draw = _Draw
elif name == 'TSlider':
# __init__
klass._original__init__ = klass.__init__
klass.__init__ = _init
return True
| lgpl-2.1 |
anubhav929/eden | modules/tests/hrm/create_volunteer.py | 1 | 2605 | """ Sahana Eden Automated Test - HRM002 Create Volunteer
@copyright: 2011-2012 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
from gluon import current
import unittest
from tests.web2unittest import SeleniumUnitTest
from selenium.common.exceptions import NoSuchElementException
from s3 import s3_debug
from tests import *
#import unittest, re, time
import time
class CreateVolunteer(SeleniumUnitTest):
def test_hrm002_create_volunteer(self):
"""
@case: HRM002
@description: Create Volunteer
@TestDoc: https://docs.google.com/spreadsheet/ccc?key=0AmB3hMcgB-3idG1XNGhhRG9QWF81dUlKLXpJaFlCMFE
@Test Wiki: http://eden.sahanafoundation.org/wiki/DeveloperGuidelines/Testing
"""
print "\n"
self.login(account="admin", nexturl="vol/volunteer/create")
self.create("hrm_human_resource",
[( "organisation_id",
"Acme Suppliers",
"autocomplete"),
( "first_name",
"John",
"pr_person"),
( "last_name",
"Thompson",
"pr_person"),
( "email",
"test8@notavalidemail.com",
"pr_person"),
( "job_role_id",
"Security",
"option"),
]
)
| mit |
akretion/project-service | analytic_hours_block/__openerp__.py | 21 | 2052 | # -*- coding: utf-8 -*-
##############################################################################
#
# Author: Vincent Renaville, ported by Joel Grand-Guillaume
# Copyright 2010-2012 Camptocamp SA
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
"name": "Project Hours Blocks Management",
"version": "1.5",
"category": "Generic Modules/Projects & Services",
"description": """
Project Hours Blocks Management
===============================
This module allows you to handle hours blocks,
to follow for example the user support contracts.
This means, you sell a product of type "hours block"
then you input the spent hours on the hours block and
you can track and follow how much has been used.
""",
"author": "Camptocamp,Odoo Community Association (OCA)",
"license": 'AGPL-3',
"website": "http://www.camptocamp.com",
"depends": [
"account",
"hr_timesheet_invoice",
"analytic",
"project",
],
"data": [
"report.xml",
"hours_block_view.xml",
"hours_block_data.xml",
"hours_block_menu.xml",
"product_view.xml",
"project_view.xml",
"report.xml",
"security/hours_block_security.xml",
"security/ir.model.access.csv",
],
"active": False,
"installable": False
}
| agpl-3.0 |
rocopartners/django-oscar | src/oscar/templatetags/history_tags.py | 27 | 1936 | from django.utils import six
from django.utils.six.moves.urllib import parse
from django import template
from oscar.core.loading import get_model
from django.utils.translation import ugettext_lazy as _
from django.core.urlresolvers import resolve, Resolver404
from oscar.apps.customer import history
Site = get_model('sites', 'Site')
register = template.Library()
@register.inclusion_tag('customer/history/recently_viewed_products.html',
takes_context=True)
def recently_viewed_products(context):
"""
Inclusion tag listing the most recently viewed products
"""
request = context['request']
products = history.get(request)
return {'products': products,
'request': request}
@register.assignment_tag(takes_context=True) # noqa (too complex (11))
def get_back_button(context):
"""
Show back button, custom title available for different urls, for
example 'Back to search results', no back button if user came from other
site
"""
request = context.get('request', None)
if not request:
raise Exception('Cannot get request from context')
referrer = request.META.get('HTTP_REFERER', None)
if not referrer:
return None
try:
url = parse.urlparse(referrer)
except:
return None
if request.get_host() != url.netloc:
try:
Site.objects.get(domain=url.netloc)
except Site.DoesNotExist:
# Came from somewhere else, don't show back button:
return None
try:
match = resolve(url.path)
except Resolver404:
return None
# This dict can be extended to link back to other browsing pages
titles = {
'search:search': _('Back to search results'),
}
title = titles.get(match.view_name, None)
if title is None:
return None
return {'url': referrer, 'title': six.text_type(title), 'match': match}
| bsd-3-clause |
flipjack/suventa | allauth/socialaccount/providers/__init__.py | 7 | 1349 | from django.conf import settings
from allauth.compat import importlib
class ProviderRegistry(object):
def __init__(self):
self.provider_map = {}
self.loaded = False
def get_list(self):
self.load()
return self.provider_map.values()
def register(self, cls):
self.provider_map[cls.id] = cls()
def by_id(self, id):
self.load()
return self.provider_map[id]
def as_choices(self):
self.load()
for provider in self.get_list():
yield (provider.id, provider.name)
def load(self):
# TODO: Providers register with the provider registry when
# loaded. Here, we build the URLs for all registered providers. So, we
# really need to be sure all providers did register, which is why we're
# forcefully importing the `provider` modules here. The overall
# mechanism is way to magical and depends on the import order et al, so
# all of this really needs to be revisited.
if not self.loaded:
for app in settings.INSTALLED_APPS:
provider_module = app + '.provider'
try:
importlib.import_module(provider_module)
except ImportError:
pass
self.loaded = True
registry = ProviderRegistry()
| bsd-3-clause |
roadmapper/ansible | lib/ansible/modules/network/meraki/meraki_syslog.py | 6 | 9145 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Kevin Breit (@kbreit) <kevin.breit@kevinbreit.net>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = r'''
---
module: meraki_syslog
short_description: Manage syslog server settings in the Meraki cloud.
version_added: "2.8"
description:
- Allows for creation and management of Syslog servers within Meraki.
notes:
- Changes to existing syslog servers replaces existing configuration. If you need to add to an
existing configuration set state to query to gather the existing configuration and then modify or add.
options:
auth_key:
description:
- Authentication key provided by the dashboard. Required if environmental variable MERAKI_KEY is not set.
type: str
state:
description:
- Query or edit syslog servers
- To delete a syslog server, do not include server in list of servers
choices: [present, query]
default: present
type: str
net_name:
description:
- Name of a network.
aliases: [name, network]
type: str
net_id:
description:
- ID number of a network.
type: str
servers:
description:
- List of syslog server settings.
type: list
suboptions:
host:
description:
- IP address or hostname of Syslog server.
type: str
port:
description:
- Port number Syslog server is listening on.
default: "514"
type: int
roles:
description:
- List of applicable Syslog server roles.
choices: ['Wireless Event log',
'Appliance event log',
'Switch event log',
'Air Marshal events',
'Flows',
'URLs',
'IDS alerts',
'Security events']
type: list
author:
- Kevin Breit (@kbreit)
extends_documentation_fragment: meraki
'''
EXAMPLES = r'''
- name: Query syslog configurations on network named MyNet in the YourOrg organization
meraki_syslog:
auth_key: abc12345
status: query
org_name: YourOrg
net_name: MyNet
delegate_to: localhost
- name: Add single syslog server with Appliance event log role
meraki_syslog:
auth_key: abc12345
status: query
org_name: YourOrg
net_name: MyNet
servers:
- host: 192.0.1.2
port: 514
roles:
- Appliance event log
delegate_to: localhost
- name: Add multiple syslog servers
meraki_syslog:
auth_key: abc12345
status: query
org_name: YourOrg
net_name: MyNet
servers:
- host: 192.0.1.2
port: 514
roles:
- Appliance event log
- host: 192.0.1.3
port: 514
roles:
- Appliance event log
- Flows
delegate_to: localhost
'''
RETURN = r'''
data:
description: Information about the created or manipulated object.
returned: info
type: complex
contains:
host:
description: Hostname or IP address of syslog server.
returned: success
type: str
sample: 192.0.1.1
port:
description: Port number for syslog communication.
returned: success
type: str
sample: 443
roles:
description: List of roles assigned to syslog server.
returned: success
type: list
sample: "Wireless event log, URLs"
'''
from ansible.module_utils.basic import AnsibleModule, json
from ansible.module_utils.common.dict_transformations import recursive_diff
from ansible.module_utils.network.meraki.meraki import MerakiModule, meraki_argument_spec
def main():
# define the available arguments/parameters that a user can pass to
# the module
server_arg_spec = dict(host=dict(type='str'),
port=dict(type='int', default="514"),
roles=dict(type='list', choices=['Wireless Event log',
'Appliance event log',
'Switch event log',
'Air Marshal events',
'Flows',
'URLs',
'IDS alerts',
'Security events',
]),
)
argument_spec = meraki_argument_spec()
argument_spec.update(net_id=dict(type='str'),
servers=dict(type='list', elements='dict', options=server_arg_spec),
state=dict(type='str', choices=['present', 'query'], default='present'),
net_name=dict(type='str', aliases=['name', 'network']),
)
# the AnsibleModule object will be our abstraction working with Ansible
# this includes instantiation, a couple of common attr would be the
# args/params passed to the execution, as well as if the module
# supports check mode
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True,
)
meraki = MerakiModule(module, function='syslog')
module.params['follow_redirects'] = 'all'
payload = None
syslog_urls = {'syslog': '/networks/{net_id}/syslogServers'}
meraki.url_catalog['query_update'] = syslog_urls
if not meraki.params['org_name'] and not meraki.params['org_id']:
meraki.fail_json(msg='org_name or org_id parameters are required')
if meraki.params['state'] != 'query':
if not meraki.params['net_name'] and not meraki.params['net_id']:
meraki.fail_json(msg='net_name or net_id is required for present or absent states')
if meraki.params['net_name'] and meraki.params['net_id']:
meraki.fail_json(msg='net_name and net_id are mutually exclusive')
# if the user is working with this module in only check mode we do not
# want to make any changes to the environment, just return the current
# state with no modifications
# manipulate or modify the state as needed (this is going to be the
# part where your module will do what it needs to do)
org_id = meraki.params['org_id']
if not org_id:
org_id = meraki.get_org_id(meraki.params['org_name'])
net_id = meraki.params['net_id']
if net_id is None:
nets = meraki.get_nets(org_id=org_id)
net_id = meraki.get_net_id(net_name=meraki.params['net_name'], data=nets)
if meraki.params['state'] == 'query':
path = meraki.construct_path('query_update', net_id=net_id)
r = meraki.request(path, method='GET')
if meraki.status == 200:
meraki.result['data'] = r
elif meraki.params['state'] == 'present':
# Construct payload
payload = dict()
payload['servers'] = meraki.params['servers']
# Convert port numbers to string for idempotency checks
for server in payload['servers']:
if server['port']:
server['port'] = str(server['port'])
path = meraki.construct_path('query_update', net_id=net_id)
r = meraki.request(path, method='GET')
if meraki.status == 200:
original = dict()
original['servers'] = r
if meraki.is_update_required(original, payload):
if meraki.module.check_mode is True:
diff = recursive_diff(original, payload)
original.update(payload)
meraki.result['diff'] = {'before': diff[0],
'after': diff[1]}
meraki.result['data'] = original
meraki.result['changed'] = True
meraki.exit_json(**meraki.result)
path = meraki.construct_path('query_update', net_id=net_id)
r = meraki.request(path, method='PUT', payload=json.dumps(payload))
if meraki.status == 200:
meraki.result['data'] = r
meraki.result['changed'] = True
else:
if meraki.module.check_mode is True:
meraki.result['data'] = original
meraki.exit_json(**meraki.result)
meraki.result['data'] = original
# in the event of a successful module execution, you will want to
# simple AnsibleModule.exit_json(), passing the key/value results
meraki.exit_json(**meraki.result)
if __name__ == '__main__':
main()
| gpl-3.0 |
edmorley/django | tests/sites_tests/tests.py | 40 | 13012 | from django.apps import apps
from django.apps.registry import Apps
from django.conf import settings
from django.contrib.sites import models
from django.contrib.sites.management import create_default_site
from django.contrib.sites.middleware import CurrentSiteMiddleware
from django.contrib.sites.models import Site, clear_site_cache
from django.contrib.sites.requests import RequestSite
from django.contrib.sites.shortcuts import get_current_site
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db.models.signals import post_migrate
from django.http import HttpRequest, HttpResponse
from django.test import TestCase, modify_settings, override_settings
from django.test.utils import captured_stdout
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class SitesFrameworkTests(TestCase):
multi_db = True
def setUp(self):
self.site = Site(
id=settings.SITE_ID,
domain="example.com",
name="example.com",
)
self.site.save()
def tearDown(self):
Site.objects.clear_cache()
def test_site_manager(self):
# Make sure that get_current() does not return a deleted Site object.
s = Site.objects.get_current()
self.assertIsInstance(s, Site)
s.delete()
with self.assertRaises(ObjectDoesNotExist):
Site.objects.get_current()
def test_site_cache(self):
# After updating a Site object (e.g. via the admin), we shouldn't return a
# bogus value from the SITE_CACHE.
site = Site.objects.get_current()
self.assertEqual("example.com", site.name)
s2 = Site.objects.get(id=settings.SITE_ID)
s2.name = "Example site"
s2.save()
site = Site.objects.get_current()
self.assertEqual("Example site", site.name)
def test_delete_all_sites_clears_cache(self):
# When all site objects are deleted the cache should also
# be cleared and get_current() should raise a DoesNotExist.
self.assertIsInstance(Site.objects.get_current(), Site)
Site.objects.all().delete()
with self.assertRaises(Site.DoesNotExist):
Site.objects.get_current()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_get_current_site(self):
# The correct Site object is returned
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
site = get_current_site(request)
self.assertIsInstance(site, Site)
self.assertEqual(site.id, settings.SITE_ID)
# An exception is raised if the sites framework is installed
# but there is no matching Site
site.delete()
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# A RequestSite is returned if the sites framework is not installed
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
site = get_current_site(request)
self.assertIsInstance(site, RequestSite)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com'])
def test_get_current_site_no_site_id(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
del settings.SITE_ID
site = get_current_site(request)
self.assertEqual(site.name, "example.com")
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com'])
def test_get_current_site_host_with_trailing_dot(self):
"""
The site is matched if the name in the request has a trailing dot.
"""
request = HttpRequest()
request.META = {
'SERVER_NAME': 'example.com.',
'SERVER_PORT': '80',
}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example.com', 'example.net'])
def test_get_current_site_no_site_id_and_handle_port_fallback(self):
request = HttpRequest()
s1 = self.site
s2 = Site.objects.create(domain='example.com:80', name='example.com:80')
# Host header without port
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with port - match, no fallback without port
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site, s2)
# Host header with port - no match, fallback without port
request.META = {'HTTP_HOST': 'example.com:81'}
site = get_current_site(request)
self.assertEqual(site, s1)
# Host header with non-matching domain
request.META = {'HTTP_HOST': 'example.net'}
with self.assertRaises(ObjectDoesNotExist):
get_current_site(request)
# Ensure domain for RequestSite always matches host header
with self.modify_settings(INSTALLED_APPS={'remove': 'django.contrib.sites'}):
request.META = {'HTTP_HOST': 'example.com'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com')
request.META = {'HTTP_HOST': 'example.com:80'}
site = get_current_site(request)
self.assertEqual(site.name, 'example.com:80')
def test_domain_name_with_whitespaces(self):
# Regression for #17320
# Domain names are not allowed contain whitespace characters
site = Site(name="test name", domain="test test")
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ttest"
with self.assertRaises(ValidationError):
site.full_clean()
site.domain = "test\ntest"
with self.assertRaises(ValidationError):
site.full_clean()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_clear_site_cache(self):
request = HttpRequest()
request.META = {
"SERVER_NAME": "example.com",
"SERVER_PORT": "80",
}
self.assertEqual(models.SITE_CACHE, {})
get_current_site(request)
expected_cache = {self.site.id: self.site}
self.assertEqual(models.SITE_CACHE, expected_cache)
with self.settings(SITE_ID=''):
get_current_site(request)
expected_cache.update({self.site.domain: self.site})
self.assertEqual(models.SITE_CACHE, expected_cache)
clear_site_cache(Site, instance=self.site, using='default')
self.assertEqual(models.SITE_CACHE, {})
@override_settings(SITE_ID='', ALLOWED_HOSTS=['example2.com'])
def test_clear_site_cache_domain(self):
site = Site.objects.create(name='example2.com', domain='example2.com')
request = HttpRequest()
request.META = {
"SERVER_NAME": "example2.com",
"SERVER_PORT": "80",
}
get_current_site(request) # prime the models.SITE_CACHE
expected_cache = {site.domain: site}
self.assertEqual(models.SITE_CACHE, expected_cache)
# Site exists in 'default' database so using='other' shouldn't clear.
clear_site_cache(Site, instance=site, using='other')
self.assertEqual(models.SITE_CACHE, expected_cache)
# using='default' should clear.
clear_site_cache(Site, instance=site, using='default')
self.assertEqual(models.SITE_CACHE, {})
def test_unique_domain(self):
site = Site(domain=self.site.domain)
msg = 'Site with this Domain name already exists.'
with self.assertRaisesMessage(ValidationError, msg):
site.validate_unique()
def test_site_natural_key(self):
self.assertEqual(Site.objects.get_by_natural_key(self.site.domain), self.site)
self.assertEqual(self.site.natural_key(), (self.site.domain,))
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_requestsite_save_notimplemented_msg(self):
# Test response msg for RequestSite.save NotImplementedError
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
}
msg = 'RequestSite cannot be saved.'
with self.assertRaisesMessage(NotImplementedError, msg):
RequestSite(request).save()
@override_settings(ALLOWED_HOSTS=['example.com'])
def test_requestsite_delete_notimplemented_msg(self):
# Test response msg for RequestSite.delete NotImplementedError
request = HttpRequest()
request.META = {
"HTTP_HOST": "example.com",
}
msg = 'RequestSite cannot be deleted.'
with self.assertRaisesMessage(NotImplementedError, msg):
RequestSite(request).delete()
class JustOtherRouter:
def allow_migrate(self, db, app_label, **hints):
return db == 'other'
@modify_settings(INSTALLED_APPS={'append': 'django.contrib.sites'})
class CreateDefaultSiteTests(TestCase):
multi_db = True
def setUp(self):
self.app_config = apps.get_app_config('sites')
# Delete the site created as part of the default migration process.
Site.objects.all().delete()
def test_basic(self):
"""
#15346, #15573 - create_default_site() creates an example site only if
none exist.
"""
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertIn("Creating example.com", stdout.getvalue())
with captured_stdout() as stdout:
create_default_site(self.app_config)
self.assertEqual(Site.objects.count(), 1)
self.assertEqual("", stdout.getvalue())
@override_settings(DATABASE_ROUTERS=[JustOtherRouter()])
def test_multi_db_with_router(self):
"""
#16353, #16828 - The default site creation should respect db routing.
"""
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertFalse(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_multi_db(self):
create_default_site(self.app_config, using='default', verbosity=0)
create_default_site(self.app_config, using='other', verbosity=0)
self.assertTrue(Site.objects.using('default').exists())
self.assertTrue(Site.objects.using('other').exists())
def test_save_another(self):
"""
#17415 - Another site can be created right after the default one.
On some backends the sequence needs to be reset after saving with an
explicit ID. There shouldn't be a sequence collisions by saving another
site. This test is only meaningful with databases that use sequences
for automatic primary keys such as PostgreSQL and Oracle.
"""
create_default_site(self.app_config, verbosity=0)
Site(domain='example2.com', name='example2.com').save()
def test_signal(self):
"""
#23641 - Sending the ``post_migrate`` signal triggers creation of the
default site.
"""
post_migrate.send(sender=self.app_config, app_config=self.app_config, verbosity=0)
self.assertTrue(Site.objects.exists())
@override_settings(SITE_ID=35696)
def test_custom_site_id(self):
"""
#23945 - The configured ``SITE_ID`` should be respected.
"""
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 35696)
@override_settings() # Restore original ``SITE_ID`` afterwards.
def test_no_site_id(self):
"""
#24488 - The pk should default to 1 if no ``SITE_ID`` is configured.
"""
del settings.SITE_ID
create_default_site(self.app_config, verbosity=0)
self.assertEqual(Site.objects.get().pk, 1)
def test_unavailable_site_model(self):
"""
#24075 - A Site shouldn't be created if the model isn't available.
"""
apps = Apps()
create_default_site(self.app_config, verbosity=0, apps=apps)
self.assertFalse(Site.objects.exists())
class MiddlewareTest(TestCase):
def test_old_style_request(self):
"""The request has correct `site` attribute."""
middleware = CurrentSiteMiddleware()
request = HttpRequest()
middleware.process_request(request)
self.assertEqual(request.site.id, settings.SITE_ID)
def test_request(self):
def get_response(request):
return HttpResponse(str(request.site.id))
response = CurrentSiteMiddleware(get_response)(HttpRequest())
self.assertContains(response, settings.SITE_ID)
| bsd-3-clause |
philoniare/horizon | openstack_dashboard/dashboards/project/stacks/resource_types/views.py | 51 | 2821 | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import yaml
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import tables
from horizon import tabs
from openstack_dashboard import api
import openstack_dashboard.dashboards.project.stacks.resource_types.tables \
as project_tables
import openstack_dashboard.dashboards.project.stacks.resource_types.tabs \
as project_tabs
class ResourceTypesView(tables.DataTableView):
table_class = project_tables.ResourceTypesTable
template_name = 'project/stacks.resource_types/index.html'
page_title = _("Resource Types")
def get_data(self):
try:
r_types = sorted(api.heat.resource_types_list(self.request),
key=lambda resource: resource.resource_type)
except Exception:
r_types = []
msg = _('Unable to retrieve stack resource types.')
exceptions.handle(self.request, msg)
return r_types
class DetailView(tabs.TabView):
tab_group_class = project_tabs.ResourceTypeDetailsTabs
template_name = 'project/stacks.resource_types/details.html'
page_title = _("Resource Type Details")
def get_resource_type(self, request, **kwargs):
try:
resource_type_overview = api.heat.resource_type_get(
request,
kwargs['resource_type'])
return resource_type_overview
except Exception:
msg = _('Unable to retrieve resource type details.')
exceptions.handle(request, msg, redirect=self.get_redirect_url())
def get_tabs(self, request, **kwargs):
resource_type_overview = self.get_resource_type(request, **kwargs)
r_type = resource_type_overview['resource_type']
r_type_attributes = resource_type_overview['attributes']
r_type_properties = resource_type_overview['properties']
return self.tab_group_class(
request,
rt=r_type,
rt_attributes=yaml.safe_dump(r_type_attributes, indent=2),
rt_properties=yaml.safe_dump(r_type_properties, indent=2),
**kwargs)
@staticmethod
def get_redirect_url():
return reverse('horizon:project:stacks.resources:index')
| apache-2.0 |
tequa/ammisoft | ammimain/WinPython-64bit-2.7.13.1Zero/python-2.7.13.amd64/Lib/rfc822.py | 30 | 33542 | """RFC 2822 message manipulation.
Note: This is only a very rough sketch of a full RFC-822 parser; in particular
the tokenizing of addresses does not adhere to all the quoting rules.
Note: RFC 2822 is a long awaited update to RFC 822. This module should
conform to RFC 2822, and is thus mis-named (it's not worth renaming it). Some
effort at RFC 2822 updates have been made, but a thorough audit has not been
performed. Consider any RFC 2822 non-conformance to be a bug.
RFC 2822: http://www.faqs.org/rfcs/rfc2822.html
RFC 822 : http://www.faqs.org/rfcs/rfc822.html (obsolete)
Directions for use:
To create a Message object: first open a file, e.g.:
fp = open(file, 'r')
You can use any other legal way of getting an open file object, e.g. use
sys.stdin or call os.popen(). Then pass the open file object to the Message()
constructor:
m = Message(fp)
This class can work with any input object that supports a readline method. If
the input object has seek and tell capability, the rewindbody method will
work; also illegal lines will be pushed back onto the input stream. If the
input object lacks seek but has an `unread' method that can push back a line
of input, Message will use that to push back illegal lines. Thus this class
can be used to parse messages coming from a buffered stream.
The optional `seekable' argument is provided as a workaround for certain stdio
libraries in which tell() discards buffered data before discovering that the
lseek() system call doesn't work. For maximum portability, you should set the
seekable argument to zero to prevent that initial \code{tell} when passing in
an unseekable object such as a file object created from a socket object. If
it is 1 on entry -- which it is by default -- the tell() method of the open
file object is called once; if this raises an exception, seekable is reset to
0. For other nonzero values of seekable, this test is not made.
To get the text of a particular header there are several methods:
str = m.getheader(name)
str = m.getrawheader(name)
where name is the name of the header, e.g. 'Subject'. The difference is that
getheader() strips the leading and trailing whitespace, while getrawheader()
doesn't. Both functions retain embedded whitespace (including newlines)
exactly as they are specified in the header, and leave the case of the text
unchanged.
For addresses and address lists there are functions
realname, mailaddress = m.getaddr(name)
list = m.getaddrlist(name)
where the latter returns a list of (realname, mailaddr) tuples.
There is also a method
time = m.getdate(name)
which parses a Date-like field and returns a time-compatible tuple,
i.e. a tuple such as returned by time.localtime() or accepted by
time.mktime().
See the class definition for lower level access methods.
There are also some utility functions here.
"""
# Cleanup and extensions by Eric S. Raymond <esr@thyrsus.com>
import time
from warnings import warnpy3k
warnpy3k("in 3.x, rfc822 has been removed in favor of the email package",
stacklevel=2)
__all__ = ["Message","AddressList","parsedate","parsedate_tz","mktime_tz"]
_blanklines = ('\r\n', '\n') # Optimization for islast()
class Message:
"""Represents a single RFC 2822-compliant message."""
def __init__(self, fp, seekable = 1):
"""Initialize the class instance and read the headers."""
if seekable == 1:
# Exercise tell() to make sure it works
# (and then assume seek() works, too)
try:
fp.tell()
except (AttributeError, IOError):
seekable = 0
self.fp = fp
self.seekable = seekable
self.startofheaders = None
self.startofbody = None
#
if self.seekable:
try:
self.startofheaders = self.fp.tell()
except IOError:
self.seekable = 0
#
self.readheaders()
#
if self.seekable:
try:
self.startofbody = self.fp.tell()
except IOError:
self.seekable = 0
def rewindbody(self):
"""Rewind the file to the start of the body (if seekable)."""
if not self.seekable:
raise IOError, "unseekable file"
self.fp.seek(self.startofbody)
def readheaders(self):
"""Read header lines.
Read header lines up to the entirely blank line that terminates them.
The (normally blank) line that ends the headers is skipped, but not
included in the returned list. If a non-header line ends the headers,
(which is an error), an attempt is made to backspace over it; it is
never included in the returned list.
The variable self.status is set to the empty string if all went well,
otherwise it is an error message. The variable self.headers is a
completely uninterpreted list of lines contained in the header (so
printing them will reproduce the header exactly as it appears in the
file).
"""
self.dict = {}
self.unixfrom = ''
self.headers = lst = []
self.status = ''
headerseen = ""
firstline = 1
startofline = unread = tell = None
if hasattr(self.fp, 'unread'):
unread = self.fp.unread
elif self.seekable:
tell = self.fp.tell
while 1:
if tell:
try:
startofline = tell()
except IOError:
startofline = tell = None
self.seekable = 0
line = self.fp.readline()
if not line:
self.status = 'EOF in headers'
break
# Skip unix From name time lines
if firstline and line.startswith('From '):
self.unixfrom = self.unixfrom + line
continue
firstline = 0
if headerseen and line[0] in ' \t':
# It's a continuation line.
lst.append(line)
x = (self.dict[headerseen] + "\n " + line.strip())
self.dict[headerseen] = x.strip()
continue
elif self.iscomment(line):
# It's a comment. Ignore it.
continue
elif self.islast(line):
# Note! No pushback here! The delimiter line gets eaten.
break
headerseen = self.isheader(line)
if headerseen:
# It's a legal header line, save it.
lst.append(line)
self.dict[headerseen] = line[len(headerseen)+1:].strip()
continue
elif headerseen is not None:
# An empty header name. These aren't allowed in HTTP, but it's
# probably a benign mistake. Don't add the header, just keep
# going.
continue
else:
# It's not a header line; throw it back and stop here.
if not self.dict:
self.status = 'No headers'
else:
self.status = 'Non-header line where header expected'
# Try to undo the read.
if unread:
unread(line)
elif tell:
self.fp.seek(startofline)
else:
self.status = self.status + '; bad seek'
break
def isheader(self, line):
"""Determine whether a given line is a legal header.
This method should return the header name, suitably canonicalized.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats with special header formats.
"""
i = line.find(':')
if i > -1:
return line[:i].lower()
return None
def islast(self, line):
"""Determine whether a line is a legal end of RFC 2822 headers.
You may override this method if your application wants to bend the
rules, e.g. to strip trailing whitespace, or to recognize MH template
separators ('--------'). For convenience (e.g. for code reading from
sockets) a line consisting of \\r\\n also matches.
"""
return line in _blanklines
def iscomment(self, line):
"""Determine whether a line should be skipped entirely.
You may override this method in order to use Message parsing on tagged
data in RFC 2822-like formats that support embedded comments or
free-text data.
"""
return False
def getallmatchingheaders(self, name):
"""Find all header lines matching a given header name.
Look through the list of headers and find all lines matching a given
header name (and their continuation lines). A list of the lines is
returned, without interpretation. If the header does not occur, an
empty list is returned. If the header occurs multiple times, all
occurrences are returned. Case is not important in the header name.
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(line)
return lst
def getfirstmatchingheader(self, name):
"""Get the first header line matching name.
This is similar to getallmatchingheaders, but it returns only the
first matching header (and its continuation lines).
"""
name = name.lower() + ':'
n = len(name)
lst = []
hit = 0
for line in self.headers:
if hit:
if not line[:1].isspace():
break
elif line[:n].lower() == name:
hit = 1
if hit:
lst.append(line)
return lst
def getrawheader(self, name):
"""A higher-level interface to getfirstmatchingheader().
Return a string containing the literal text of the header but with the
keyword stripped. All leading, trailing and embedded whitespace is
kept in the string, however. Return None if the header does not
occur.
"""
lst = self.getfirstmatchingheader(name)
if not lst:
return None
lst[0] = lst[0][len(name) + 1:]
return ''.join(lst)
def getheader(self, name, default=None):
"""Get the header value for a name.
This is the normal interface: it returns a stripped version of the
header value for a given header name, or None if it doesn't exist.
This uses the dictionary version which finds the *last* such header.
"""
return self.dict.get(name.lower(), default)
get = getheader
def getheaders(self, name):
"""Get all values for a header.
This returns a list of values for headers given more than once; each
value in the result list is stripped in the same way as the result of
getheader(). If the header is not given, return an empty list.
"""
result = []
current = ''
have_header = 0
for s in self.getallmatchingheaders(name):
if s[0].isspace():
if current:
current = "%s\n %s" % (current, s.strip())
else:
current = s.strip()
else:
if have_header:
result.append(current)
current = s[s.find(":") + 1:].strip()
have_header = 1
if have_header:
result.append(current)
return result
def getaddr(self, name):
"""Get a single address from a header, as a tuple.
An example return value:
('Guido van Rossum', 'guido@cwi.nl')
"""
# New, by Ben Escoto
alist = self.getaddrlist(name)
if alist:
return alist[0]
else:
return (None, None)
def getaddrlist(self, name):
"""Get a list of addresses from a header.
Retrieves a list of addresses from a header, where each address is a
tuple as returned by getaddr(). Scans all named headers, so it works
properly with multiple To: or Cc: headers for example.
"""
raw = []
for h in self.getallmatchingheaders(name):
if h[0] in ' \t':
raw.append(h)
else:
if raw:
raw.append(', ')
i = h.find(':')
if i > 0:
addr = h[i+1:]
raw.append(addr)
alladdrs = ''.join(raw)
a = AddressList(alladdrs)
return a.addresslist
def getdate(self, name):
"""Retrieve a date field from a header.
Retrieves a date field from the named header, returning a tuple
compatible with time.mktime().
"""
try:
data = self[name]
except KeyError:
return None
return parsedate(data)
def getdate_tz(self, name):
"""Retrieve a date field from a header as a 10-tuple.
The first 9 elements make up a tuple compatible with time.mktime(),
and the 10th is the offset of the poster's time zone from GMT/UTC.
"""
try:
data = self[name]
except KeyError:
return None
return parsedate_tz(data)
# Access as a dictionary (only finds *last* header of each type):
def __len__(self):
"""Get the number of headers in a message."""
return len(self.dict)
def __getitem__(self, name):
"""Get a specific header, as from a dictionary."""
return self.dict[name.lower()]
def __setitem__(self, name, value):
"""Set the value of a header.
Note: This is not a perfect inversion of __getitem__, because any
changed headers get stuck at the end of the raw-headers list rather
than where the altered header was.
"""
del self[name] # Won't fail if it doesn't exist
self.dict[name.lower()] = value
text = name + ": " + value
for line in text.split("\n"):
self.headers.append(line + "\n")
def __delitem__(self, name):
"""Delete all occurrences of a specific header, if it is present."""
name = name.lower()
if not name in self.dict:
return
del self.dict[name]
name = name + ':'
n = len(name)
lst = []
hit = 0
for i in range(len(self.headers)):
line = self.headers[i]
if line[:n].lower() == name:
hit = 1
elif not line[:1].isspace():
hit = 0
if hit:
lst.append(i)
for i in reversed(lst):
del self.headers[i]
def setdefault(self, name, default=""):
lowername = name.lower()
if lowername in self.dict:
return self.dict[lowername]
else:
text = name + ": " + default
for line in text.split("\n"):
self.headers.append(line + "\n")
self.dict[lowername] = default
return default
def has_key(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __contains__(self, name):
"""Determine whether a message contains the named header."""
return name.lower() in self.dict
def __iter__(self):
return iter(self.dict)
def keys(self):
"""Get all of a message's header field names."""
return self.dict.keys()
def values(self):
"""Get all of a message's header field values."""
return self.dict.values()
def items(self):
"""Get all of a message's headers.
Returns a list of name, value tuples.
"""
return self.dict.items()
def __str__(self):
return ''.join(self.headers)
# Utility functions
# -----------------
# XXX Should fix unquote() and quote() to be really conformant.
# XXX The inverses of the parse functions may also be useful.
def unquote(s):
"""Remove quotes from a string."""
if len(s) > 1:
if s.startswith('"') and s.endswith('"'):
return s[1:-1].replace('\\\\', '\\').replace('\\"', '"')
if s.startswith('<') and s.endswith('>'):
return s[1:-1]
return s
def quote(s):
"""Add quotes around a string."""
return s.replace('\\', '\\\\').replace('"', '\\"')
def parseaddr(address):
"""Parse an address into a (realname, mailaddr) tuple."""
a = AddressList(address)
lst = a.addresslist
if not lst:
return (None, None)
return lst[0]
class AddrlistClass:
"""Address parser class by Ben Escoto.
To understand what this class does, it helps to have a copy of
RFC 2822 in front of you.
http://www.faqs.org/rfcs/rfc2822.html
Note: this class interface is deprecated and may be removed in the future.
Use rfc822.AddressList instead.
"""
def __init__(self, field):
"""Initialize a new instance.
`field' is an unparsed address header field, containing one or more
addresses.
"""
self.specials = '()<>@,:;.\"[]'
self.pos = 0
self.LWS = ' \t'
self.CR = '\r\n'
self.atomends = self.specials + self.LWS + self.CR
# Note that RFC 2822 now specifies `.' as obs-phrase, meaning that it
# is obsolete syntax. RFC 2822 requires that we recognize obsolete
# syntax, so allow dots in phrases.
self.phraseends = self.atomends.replace('.', '')
self.field = field
self.commentlist = []
def gotonext(self):
"""Parse up to the start of the next address."""
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS + '\n\r':
self.pos = self.pos + 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
else: break
def getaddrlist(self):
"""Parse all addresses.
Returns a list containing all of the addresses.
"""
result = []
ad = self.getaddress()
while ad:
result += ad
ad = self.getaddress()
return result
def getaddress(self):
"""Parse the next address."""
self.commentlist = []
self.gotonext()
oldpos = self.pos
oldcl = self.commentlist
plist = self.getphraselist()
self.gotonext()
returnlist = []
if self.pos >= len(self.field):
# Bad email address technically, no domain.
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in '.@':
# email address is just an addrspec
# this isn't very efficient since we start over
self.pos = oldpos
self.commentlist = oldcl
addrspec = self.getaddrspec()
returnlist = [(' '.join(self.commentlist), addrspec)]
elif self.field[self.pos] == ':':
# address is a group
returnlist = []
fieldlen = len(self.field)
self.pos += 1
while self.pos < len(self.field):
self.gotonext()
if self.pos < fieldlen and self.field[self.pos] == ';':
self.pos += 1
break
returnlist = returnlist + self.getaddress()
elif self.field[self.pos] == '<':
# Address is a phrase then a route addr
routeaddr = self.getrouteaddr()
if self.commentlist:
returnlist = [(' '.join(plist) + ' (' + \
' '.join(self.commentlist) + ')', routeaddr)]
else: returnlist = [(' '.join(plist), routeaddr)]
else:
if plist:
returnlist = [(' '.join(self.commentlist), plist[0])]
elif self.field[self.pos] in self.specials:
self.pos += 1
self.gotonext()
if self.pos < len(self.field) and self.field[self.pos] == ',':
self.pos += 1
return returnlist
def getrouteaddr(self):
"""Parse a route address (Return-path value).
This method just skips all the route stuff and returns the addrspec.
"""
if self.field[self.pos] != '<':
return
expectroute = 0
self.pos += 1
self.gotonext()
adlist = ""
while self.pos < len(self.field):
if expectroute:
self.getdomain()
expectroute = 0
elif self.field[self.pos] == '>':
self.pos += 1
break
elif self.field[self.pos] == '@':
self.pos += 1
expectroute = 1
elif self.field[self.pos] == ':':
self.pos += 1
else:
adlist = self.getaddrspec()
self.pos += 1
break
self.gotonext()
return adlist
def getaddrspec(self):
"""Parse an RFC 2822 addr-spec."""
aslist = []
self.gotonext()
while self.pos < len(self.field):
if self.field[self.pos] == '.':
aslist.append('.')
self.pos += 1
elif self.field[self.pos] == '"':
aslist.append('"%s"' % self.getquote())
elif self.field[self.pos] in self.atomends:
break
else: aslist.append(self.getatom())
self.gotonext()
if self.pos >= len(self.field) or self.field[self.pos] != '@':
return ''.join(aslist)
aslist.append('@')
self.pos += 1
self.gotonext()
return ''.join(aslist) + self.getdomain()
def getdomain(self):
"""Get the complete domain name from an address."""
sdlist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] == '[':
sdlist.append(self.getdomainliteral())
elif self.field[self.pos] == '.':
self.pos += 1
sdlist.append('.')
elif self.field[self.pos] in self.atomends:
break
else: sdlist.append(self.getatom())
return ''.join(sdlist)
def getdelimited(self, beginchar, endchars, allowcomments = 1):
"""Parse a header fragment delimited by special characters.
`beginchar' is the start character for the fragment. If self is not
looking at an instance of `beginchar' then getdelimited returns the
empty string.
`endchars' is a sequence of allowable end-delimiting characters.
Parsing stops when one of these is encountered.
If `allowcomments' is non-zero, embedded RFC 2822 comments are allowed
within the parsed fragment.
"""
if self.field[self.pos] != beginchar:
return ''
slist = ['']
quote = 0
self.pos += 1
while self.pos < len(self.field):
if quote == 1:
slist.append(self.field[self.pos])
quote = 0
elif self.field[self.pos] in endchars:
self.pos += 1
break
elif allowcomments and self.field[self.pos] == '(':
slist.append(self.getcomment())
continue # have already advanced pos from getcomment
elif self.field[self.pos] == '\\':
quote = 1
else:
slist.append(self.field[self.pos])
self.pos += 1
return ''.join(slist)
def getquote(self):
"""Get a quote-delimited fragment from self's field."""
return self.getdelimited('"', '"\r', 0)
def getcomment(self):
"""Get a parenthesis-delimited fragment from self's field."""
return self.getdelimited('(', ')\r', 1)
def getdomainliteral(self):
"""Parse an RFC 2822 domain-literal."""
return '[%s]' % self.getdelimited('[', ']\r', 0)
def getatom(self, atomends=None):
"""Parse an RFC 2822 atom.
Optional atomends specifies a different set of end token delimiters
(the default is to use self.atomends). This is used e.g. in
getphraselist() since phrase endings must not include the `.' (which
is legal in phrases)."""
atomlist = ['']
if atomends is None:
atomends = self.atomends
while self.pos < len(self.field):
if self.field[self.pos] in atomends:
break
else: atomlist.append(self.field[self.pos])
self.pos += 1
return ''.join(atomlist)
def getphraselist(self):
"""Parse a sequence of RFC 2822 phrases.
A phrase is a sequence of words, which are in turn either RFC 2822
atoms or quoted-strings. Phrases are canonicalized by squeezing all
runs of continuous whitespace into one space.
"""
plist = []
while self.pos < len(self.field):
if self.field[self.pos] in self.LWS:
self.pos += 1
elif self.field[self.pos] == '"':
plist.append(self.getquote())
elif self.field[self.pos] == '(':
self.commentlist.append(self.getcomment())
elif self.field[self.pos] in self.phraseends:
break
else:
plist.append(self.getatom(self.phraseends))
return plist
class AddressList(AddrlistClass):
"""An AddressList encapsulates a list of parsed RFC 2822 addresses."""
def __init__(self, field):
AddrlistClass.__init__(self, field)
if field:
self.addresslist = self.getaddrlist()
else:
self.addresslist = []
def __len__(self):
return len(self.addresslist)
def __str__(self):
return ", ".join(map(dump_address_pair, self.addresslist))
def __add__(self, other):
# Set union
newaddr = AddressList(None)
newaddr.addresslist = self.addresslist[:]
for x in other.addresslist:
if not x in self.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __iadd__(self, other):
# Set union, in-place
for x in other.addresslist:
if not x in self.addresslist:
self.addresslist.append(x)
return self
def __sub__(self, other):
# Set difference
newaddr = AddressList(None)
for x in self.addresslist:
if not x in other.addresslist:
newaddr.addresslist.append(x)
return newaddr
def __isub__(self, other):
# Set difference, in-place
for x in other.addresslist:
if x in self.addresslist:
self.addresslist.remove(x)
return self
def __getitem__(self, index):
# Make indexing, slices, and 'in' work
return self.addresslist[index]
def dump_address_pair(pair):
"""Dump a (name, address) pair in a canonicalized form."""
if pair[0]:
return '"' + pair[0] + '" <' + pair[1] + '>'
else:
return pair[1]
# Parse a date field
_monthnames = ['jan', 'feb', 'mar', 'apr', 'may', 'jun', 'jul',
'aug', 'sep', 'oct', 'nov', 'dec',
'january', 'february', 'march', 'april', 'may', 'june', 'july',
'august', 'september', 'october', 'november', 'december']
_daynames = ['mon', 'tue', 'wed', 'thu', 'fri', 'sat', 'sun']
# The timezone table does not include the military time zones defined
# in RFC822, other than Z. According to RFC1123, the description in
# RFC822 gets the signs wrong, so we can't rely on any such time
# zones. RFC1123 recommends that numeric timezone indicators be used
# instead of timezone names.
_timezones = {'UT':0, 'UTC':0, 'GMT':0, 'Z':0,
'AST': -400, 'ADT': -300, # Atlantic (used in Canada)
'EST': -500, 'EDT': -400, # Eastern
'CST': -600, 'CDT': -500, # Central
'MST': -700, 'MDT': -600, # Mountain
'PST': -800, 'PDT': -700 # Pacific
}
def parsedate_tz(data):
"""Convert a date string to a time tuple.
Accounts for military timezones.
"""
if not data:
return None
data = data.split()
if data[0][-1] in (',', '.') or data[0].lower() in _daynames:
# There's a dayname here. Skip it
del data[0]
else:
# no space after the "weekday,"?
i = data[0].rfind(',')
if i >= 0:
data[0] = data[0][i+1:]
if len(data) == 3: # RFC 850 date, deprecated
stuff = data[0].split('-')
if len(stuff) == 3:
data = stuff + data[1:]
if len(data) == 4:
s = data[3]
i = s.find('+')
if i > 0:
data[3:] = [s[:i], s[i+1:]]
else:
data.append('') # Dummy tz
if len(data) < 5:
return None
data = data[:5]
[dd, mm, yy, tm, tz] = data
mm = mm.lower()
if not mm in _monthnames:
dd, mm = mm, dd.lower()
if not mm in _monthnames:
return None
mm = _monthnames.index(mm)+1
if mm > 12: mm = mm - 12
if dd[-1] == ',':
dd = dd[:-1]
i = yy.find(':')
if i > 0:
yy, tm = tm, yy
if yy[-1] == ',':
yy = yy[:-1]
if not yy[0].isdigit():
yy, tz = tz, yy
if tm[-1] == ',':
tm = tm[:-1]
tm = tm.split(':')
if len(tm) == 2:
[thh, tmm] = tm
tss = '0'
elif len(tm) == 3:
[thh, tmm, tss] = tm
else:
return None
try:
yy = int(yy)
dd = int(dd)
thh = int(thh)
tmm = int(tmm)
tss = int(tss)
except ValueError:
return None
tzoffset = None
tz = tz.upper()
if tz in _timezones:
tzoffset = _timezones[tz]
else:
try:
tzoffset = int(tz)
except ValueError:
pass
# Convert a timezone offset into seconds ; -0500 -> -18000
if tzoffset:
if tzoffset < 0:
tzsign = -1
tzoffset = -tzoffset
else:
tzsign = 1
tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
return (yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset)
def parsedate(data):
"""Convert a time string to a time tuple."""
t = parsedate_tz(data)
if t is None:
return t
return t[:9]
def mktime_tz(data):
"""Turn a 10-tuple as returned by parsedate_tz() into a UTC timestamp."""
if data[9] is None:
# No zone info, so localtime is better assumption than GMT
return time.mktime(data[:8] + (-1,))
else:
t = time.mktime(data[:8] + (0,))
return t - data[9] - time.timezone
def formatdate(timeval=None):
"""Returns time format preferred for Internet standards.
Sun, 06 Nov 1994 08:49:37 GMT ; RFC 822, updated by RFC 1123
According to RFC 1123, day and month names must always be in
English. If not for that, this code could use strftime(). It
can't because strftime() honors the locale and could generate
non-English names.
"""
if timeval is None:
timeval = time.time()
timeval = time.gmtime(timeval)
return "%s, %02d %s %04d %02d:%02d:%02d GMT" % (
("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")[timeval[6]],
timeval[2],
("Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec")[timeval[1]-1],
timeval[0], timeval[3], timeval[4], timeval[5])
# When used as script, run a small test program.
# The first command line argument must be a filename containing one
# message in RFC-822 format.
if __name__ == '__main__':
import sys, os
file = os.path.join(os.environ['HOME'], 'Mail/inbox/1')
if sys.argv[1:]: file = sys.argv[1]
f = open(file, 'r')
m = Message(f)
print 'From:', m.getaddr('from')
print 'To:', m.getaddrlist('to')
print 'Subject:', m.getheader('subject')
print 'Date:', m.getheader('date')
date = m.getdate_tz('date')
tz = date[-1]
date = time.localtime(mktime_tz(date))
if date:
print 'ParsedDate:', time.asctime(date),
hhmmss = tz
hhmm, ss = divmod(hhmmss, 60)
hh, mm = divmod(hhmm, 60)
print "%+03d%02d" % (hh, mm),
if ss: print ".%02d" % ss,
print
else:
print 'ParsedDate:', None
m.rewindbody()
n = 0
while f.readline():
n += 1
print 'Lines:', n
print '-'*70
print 'len =', len(m)
if 'Date' in m: print 'Date =', m['Date']
if 'X-Nonsense' in m: pass
print 'keys =', m.keys()
print 'values =', m.values()
print 'items =', m.items()
| bsd-3-clause |
0Chencc/CTFCrackTools | Lib/Lib/markupbase.py | 173 | 14643 | """Shared support for scanning document type declarations in HTML and XHTML.
This module is used as a foundation for the HTMLParser and sgmllib
modules (indirectly, for htmllib as well). It has no documented
public API and should not be used directly.
"""
import re
_declname_match = re.compile(r'[a-zA-Z][-_.a-zA-Z0-9]*\s*').match
_declstringlit_match = re.compile(r'(\'[^\']*\'|"[^"]*")\s*').match
_commentclose = re.compile(r'--\s*>')
_markedsectionclose = re.compile(r']\s*]\s*>')
# An analysis of the MS-Word extensions is available at
# http://www.planetpublish.com/xmlarena/xap/Thursday/WordtoXML.pdf
_msmarkedsectionclose = re.compile(r']\s*>')
del re
class ParserBase:
"""Parser base class which provides some common support methods used
by the SGML/HTML and XHTML parsers."""
def __init__(self):
if self.__class__ is ParserBase:
raise RuntimeError(
"markupbase.ParserBase must be subclassed")
def error(self, message):
raise NotImplementedError(
"subclasses of ParserBase must override error()")
def reset(self):
self.lineno = 1
self.offset = 0
def getpos(self):
"""Return current line number and offset."""
return self.lineno, self.offset
# Internal -- update line number and offset. This should be
# called for each piece of data exactly once, in order -- in other
# words the concatenation of all the input strings to this
# function should be exactly the entire input.
def updatepos(self, i, j):
if i >= j:
return j
rawdata = self.rawdata
nlines = rawdata.count("\n", i, j)
if nlines:
self.lineno = self.lineno + nlines
pos = rawdata.rindex("\n", i, j) # Should not fail
self.offset = j-(pos+1)
else:
self.offset = self.offset + j-i
return j
_decl_otherchars = ''
# Internal -- parse declaration (for use by subclasses).
def parse_declaration(self, i):
# This is some sort of declaration; in "HTML as
# deployed," this should only be the document type
# declaration ("<!DOCTYPE html...>").
# ISO 8879:1986, however, has more complex
# declaration syntax for elements in <!...>, including:
# --comment--
# [marked section]
# name in the following list: ENTITY, DOCTYPE, ELEMENT,
# ATTLIST, NOTATION, SHORTREF, USEMAP,
# LINKTYPE, LINK, IDLINK, USELINK, SYSTEM
rawdata = self.rawdata
j = i + 2
assert rawdata[i:j] == "<!", "unexpected call to parse_declaration"
if rawdata[j:j+1] == ">":
# the empty comment <!>
return j + 1
if rawdata[j:j+1] in ("-", ""):
# Start of comment followed by buffer boundary,
# or just a buffer boundary.
return -1
# A simple, practical version could look like: ((name|stringlit) S*) + '>'
n = len(rawdata)
if rawdata[j:j+2] == '--': #comment
# Locate --.*-- as the body of the comment
return self.parse_comment(i)
elif rawdata[j] == '[': #marked section
# Locate [statusWord [...arbitrary SGML...]] as the body of the marked section
# Where statusWord is one of TEMP, CDATA, IGNORE, INCLUDE, RCDATA
# Note that this is extended by Microsoft Office "Save as Web" function
# to include [if...] and [endif].
return self.parse_marked_section(i)
else: #all other declaration elements
decltype, j = self._scan_name(j, i)
if j < 0:
return j
if decltype == "doctype":
self._decl_otherchars = ''
while j < n:
c = rawdata[j]
if c == ">":
# end of declaration syntax
data = rawdata[i+2:j]
if decltype == "doctype":
self.handle_decl(data)
else:
# According to the HTML5 specs sections "8.2.4.44 Bogus
# comment state" and "8.2.4.45 Markup declaration open
# state", a comment token should be emitted.
# Calling unknown_decl provides more flexibility though.
self.unknown_decl(data)
return j + 1
if c in "\"'":
m = _declstringlit_match(rawdata, j)
if not m:
return -1 # incomplete
j = m.end()
elif c in "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ":
name, j = self._scan_name(j, i)
elif c in self._decl_otherchars:
j = j + 1
elif c == "[":
# this could be handled in a separate doctype parser
if decltype == "doctype":
j = self._parse_doctype_subset(j + 1, i)
elif decltype in ("attlist", "linktype", "link", "element"):
# must tolerate []'d groups in a content model in an element declaration
# also in data attribute specifications of attlist declaration
# also link type declaration subsets in linktype declarations
# also link attribute specification lists in link declarations
self.error("unsupported '[' char in %s declaration" % decltype)
else:
self.error("unexpected '[' char in declaration")
else:
self.error(
"unexpected %r char in declaration" % rawdata[j])
if j < 0:
return j
return -1 # incomplete
# Internal -- parse a marked section
# Override this to handle MS-word extension syntax <![if word]>content<![endif]>
def parse_marked_section(self, i, report=1):
rawdata= self.rawdata
assert rawdata[i:i+3] == '<![', "unexpected call to parse_marked_section()"
sectName, j = self._scan_name( i+3, i )
if j < 0:
return j
if sectName in ("temp", "cdata", "ignore", "include", "rcdata"):
# look for standard ]]> ending
match= _markedsectionclose.search(rawdata, i+3)
elif sectName in ("if", "else", "endif"):
# look for MS Office ]> ending
match= _msmarkedsectionclose.search(rawdata, i+3)
else:
self.error('unknown status keyword %r in marked section' % rawdata[i+3:j])
if not match:
return -1
if report:
j = match.start(0)
self.unknown_decl(rawdata[i+3: j])
return match.end(0)
# Internal -- parse comment, return length or -1 if not terminated
def parse_comment(self, i, report=1):
rawdata = self.rawdata
if rawdata[i:i+4] != '<!--':
self.error('unexpected call to parse_comment()')
match = _commentclose.search(rawdata, i+4)
if not match:
return -1
if report:
j = match.start(0)
self.handle_comment(rawdata[i+4: j])
return match.end(0)
# Internal -- scan past the internal subset in a <!DOCTYPE declaration,
# returning the index just past any whitespace following the trailing ']'.
def _parse_doctype_subset(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
j = i
while j < n:
c = rawdata[j]
if c == "<":
s = rawdata[j:j+2]
if s == "<":
# end of buffer; incomplete
return -1
if s != "<!":
self.updatepos(declstartpos, j + 1)
self.error("unexpected char in internal subset (in %r)" % s)
if (j + 2) == n:
# end of buffer; incomplete
return -1
if (j + 4) > n:
# end of buffer; incomplete
return -1
if rawdata[j:j+4] == "<!--":
j = self.parse_comment(j, report=0)
if j < 0:
return j
continue
name, j = self._scan_name(j + 2, declstartpos)
if j == -1:
return -1
if name not in ("attlist", "element", "entity", "notation"):
self.updatepos(declstartpos, j + 2)
self.error(
"unknown declaration %r in internal subset" % name)
# handle the individual names
meth = getattr(self, "_parse_doctype_" + name)
j = meth(j, declstartpos)
if j < 0:
return j
elif c == "%":
# parameter entity reference
if (j + 1) == n:
# end of buffer; incomplete
return -1
s, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
if rawdata[j] == ";":
j = j + 1
elif c == "]":
j = j + 1
while j < n and rawdata[j].isspace():
j = j + 1
if j < n:
if rawdata[j] == ">":
return j
self.updatepos(declstartpos, j)
self.error("unexpected char after internal subset")
else:
return -1
elif c.isspace():
j = j + 1
else:
self.updatepos(declstartpos, j)
self.error("unexpected char %r in internal subset" % c)
# end of buffer reached
return -1
# Internal -- scan past <!ELEMENT declarations
def _parse_doctype_element(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j == -1:
return -1
# style content model; just skip until '>'
rawdata = self.rawdata
if '>' in rawdata[j:]:
return rawdata.find(">", j) + 1
return -1
# Internal -- scan past <!ATTLIST declarations
def _parse_doctype_attlist(self, i, declstartpos):
rawdata = self.rawdata
name, j = self._scan_name(i, declstartpos)
c = rawdata[j:j+1]
if c == "":
return -1
if c == ">":
return j + 1
while 1:
# scan a series of attribute descriptions; simplified:
# name type [value] [#constraint]
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if c == "":
return -1
if c == "(":
# an enumerated type; look for ')'
if ")" in rawdata[j:]:
j = rawdata.find(")", j) + 1
else:
return -1
while rawdata[j:j+1].isspace():
j = j + 1
if not rawdata[j:]:
# end of buffer, incomplete
return -1
else:
name, j = self._scan_name(j, declstartpos)
c = rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1
c = rawdata[j:j+1]
if not c:
return -1
if c == "#":
if rawdata[j:] == "#":
# end of buffer
return -1
name, j = self._scan_name(j + 1, declstartpos)
if j < 0:
return j
c = rawdata[j:j+1]
if not c:
return -1
if c == '>':
# all done
return j + 1
# Internal -- scan past <!NOTATION declarations
def _parse_doctype_notation(self, i, declstartpos):
name, j = self._scan_name(i, declstartpos)
if j < 0:
return j
rawdata = self.rawdata
while 1:
c = rawdata[j:j+1]
if not c:
# end of buffer; incomplete
return -1
if c == '>':
return j + 1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if not m:
return -1
j = m.end()
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan past <!ENTITY declarations
def _parse_doctype_entity(self, i, declstartpos):
rawdata = self.rawdata
if rawdata[i:i+1] == "%":
j = i + 1
while 1:
c = rawdata[j:j+1]
if not c:
return -1
if c.isspace():
j = j + 1
else:
break
else:
j = i
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
while 1:
c = self.rawdata[j:j+1]
if not c:
return -1
if c in "'\"":
m = _declstringlit_match(rawdata, j)
if m:
j = m.end()
else:
return -1 # incomplete
elif c == ">":
return j + 1
else:
name, j = self._scan_name(j, declstartpos)
if j < 0:
return j
# Internal -- scan a name token and the new position and the token, or
# return -1 if we've reached the end of the buffer.
def _scan_name(self, i, declstartpos):
rawdata = self.rawdata
n = len(rawdata)
if i == n:
return None, -1
m = _declname_match(rawdata, i)
if m:
s = m.group()
name = s.strip()
if (i + len(s)) == n:
return None, -1 # end of buffer
return name.lower(), m.end()
else:
self.updatepos(declstartpos, i)
self.error("expected name token at %r"
% rawdata[declstartpos:declstartpos+20])
# To be overridden -- handlers for unknown objects
def unknown_decl(self, data):
pass
| gpl-3.0 |
j00bar/ansible | contrib/inventory/zabbix.py | 47 | 4204 | #!/usr/bin/env python
# (c) 2013, Greg Buehler
#
# This file is part of Ansible,
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
######################################################################
"""
Zabbix Server external inventory script.
========================================
Returns hosts and hostgroups from Zabbix Server.
Configuration is read from `zabbix.ini`.
Tested with Zabbix Server 2.0.6.
"""
from __future__ import print_function
import os
import sys
import argparse
import ConfigParser
try:
from zabbix_api import ZabbixAPI
except:
print("Error: Zabbix API library must be installed: pip install zabbix-api.",
file=sys.stderr)
sys.exit(1)
try:
import json
except:
import simplejson as json
class ZabbixInventory(object):
def read_settings(self):
config = ConfigParser.SafeConfigParser()
conf_path = './zabbix.ini'
if not os.path.exists(conf_path):
conf_path = os.path.dirname(os.path.realpath(__file__)) + '/zabbix.ini'
if os.path.exists(conf_path):
config.read(conf_path)
# server
if config.has_option('zabbix', 'server'):
self.zabbix_server = config.get('zabbix', 'server')
# login
if config.has_option('zabbix', 'username'):
self.zabbix_username = config.get('zabbix', 'username')
if config.has_option('zabbix', 'password'):
self.zabbix_password = config.get('zabbix', 'password')
def read_cli(self):
parser = argparse.ArgumentParser()
parser.add_argument('--host')
parser.add_argument('--list', action='store_true')
self.options = parser.parse_args()
def hoststub(self):
return {
'hosts': []
}
def get_host(self, api, name):
data = {'ansible_ssh_host': name}
return data
def get_list(self, api):
hostsData = api.host.get({'output': 'extend', 'selectGroups': 'extend'})
data = {}
data[self.defaultgroup] = self.hoststub()
for host in hostsData:
hostname = host['name']
data[self.defaultgroup]['hosts'].append(hostname)
for group in host['groups']:
groupname = group['name']
if not groupname in data:
data[groupname] = self.hoststub()
data[groupname]['hosts'].append(hostname)
return data
def __init__(self):
self.defaultgroup = 'group_all'
self.zabbix_server = None
self.zabbix_username = None
self.zabbix_password = None
self.read_settings()
self.read_cli()
if self.zabbix_server and self.zabbix_username:
try:
api = ZabbixAPI(server=self.zabbix_server)
api.login(user=self.zabbix_username, password=self.zabbix_password)
except BaseException as e:
print("Error: Could not login to Zabbix server. Check your zabbix.ini.", file=sys.stderr)
sys.exit(1)
if self.options.host:
data = self.get_host(api, self.options.host)
print(json.dumps(data, indent=2))
elif self.options.list:
data = self.get_list(api)
print(json.dumps(data, indent=2))
else:
print("usage: --list ..OR.. --host <hostname>", file=sys.stderr)
sys.exit(1)
else:
print("Error: Configuration of server and credentials are required. See zabbix.ini.", file=sys.stderr)
sys.exit(1)
ZabbixInventory()
| gpl-3.0 |
ScreamingUdder/mantid | scripts/Muon/MaxentTools/outspec.py | 1 | 1776 |
from __future__ import (absolute_import, division, print_function)
import numpy as np
from Muon.MaxentTools.zft import ZFT
def OUTSPEC(
datum,
f,
sigma,
datt,
CHANNELS_itzero,
CHANNELS_itotal,
PULSESHAPE_convol,
FAC_ratio,
DETECT_a,
DETECT_b,
DETECT_d,
DETECT_e,
SAVETIME_I2,
RUNDATA_fnorm,
mylog):
npts, ngroups = datum.shape
guess = np.zeros([npts, ngroups])
chi = np.zeros([ngroups])
zr, zi = ZFT(f, PULSESHAPE_convol, DETECT_e, SAVETIME_I2)
guess = np.outer(zr, DETECT_a) + np.outer(zi, DETECT_b)
test0 = (datum - guess) / sigma
test = test0 * FAC_ratio
chi = np.sum(test0, axis=0)
ibads9 = np.greater_equal(test, 5.0)
chi = chi / float(npts)
for j in range(ngroups):
ibads8 = np.nonzero(ibads9[:, j])[0]
for i in ibads8[0:10]:
mylog.warning(
"devn .gt. 5 std devs on point {:4d} of group {:2d}".format(i, j))
if(len(ibads8) > 10):
mylog.warning("... lots of baddies in group {:2d}".format(j))
mylog.debug("contribs to chi**2 from each group:" + str(chi))
nphalf =int( npts / 2)
tmp1 = np.where(np.less_equal(sigma[nphalf:, ], 1.E3), guess[nphalf:,:]/sigma[nphalf:,:]**2, 0.0)
sum = np.sum(tmp1)
nsum = np.count_nonzero(tmp1)
if(nsum == 0):
mylog.debug(' no data in last half, so last half sum = 0')
else:
bum = sum / nsum
mylog.debug("Points:{0}".format(npts))
mylog.debug("Last half sum:{0}".format(bum))
# add in backgrounds (no dead time adjustment yet)
guess = ((guess + np.outer(DETECT_e, DETECT_d)) /
RUNDATA_fnorm) # convert from 10Mevent normalisation to original stats
return test, guess
| gpl-3.0 |
cselis86/edx-platform | common/djangoapps/track/shim.py | 13 | 5643 | """Map new event context values to old top-level field values. Ensures events can be parsed by legacy parsers."""
import json
import logging
from opaque_keys import InvalidKeyError
from opaque_keys.edx.keys import UsageKey
log = logging.getLogger(__name__)
CONTEXT_FIELDS_TO_INCLUDE = [
'username',
'session',
'ip',
'agent',
'host',
'referer',
'accept_language'
]
class LegacyFieldMappingProcessor(object):
"""Ensures all required fields are included in emitted events"""
def __call__(self, event):
context = event.get('context', {})
if 'context' in event:
for field in CONTEXT_FIELDS_TO_INCLUDE:
self.move_from_context(field, event)
remove_shim_context(event)
if 'data' in event:
event['event'] = event['data']
del event['data']
else:
event['event'] = {}
if 'timestamp' in context:
event['time'] = context['timestamp']
del context['timestamp']
elif 'timestamp' in event:
event['time'] = event['timestamp']
if 'timestamp' in event:
del event['timestamp']
self.move_from_context('event_type', event, event.get('name', ''))
self.move_from_context('event_source', event, 'server')
self.move_from_context('page', event, None)
def move_from_context(self, field, event, default_value=''):
"""Move a field from the context to the top level of the event."""
context = event.get('context', {})
if field in context:
event[field] = context[field]
del context[field]
else:
event[field] = default_value
def remove_shim_context(event):
if 'context' in event:
context = event['context']
# These fields are present elsewhere in the event at this point
context_fields_to_remove = set(CONTEXT_FIELDS_TO_INCLUDE)
# This field is only used for Segment.io web analytics and does not concern researchers
context_fields_to_remove.add('client_id')
for field in context_fields_to_remove:
if field in context:
del context[field]
NAME_TO_EVENT_TYPE_MAP = {
'edx.video.played': 'play_video',
'edx.video.paused': 'pause_video',
'edx.video.stopped': 'stop_video',
'edx.video.loaded': 'load_video',
'edx.video.position.changed': 'seek_video',
'edx.video.seeked': 'seek_video',
'edx.video.transcript.shown': 'show_transcript',
'edx.video.transcript.hidden': 'hide_transcript',
}
class VideoEventProcessor(object):
"""
Converts new format video events into the legacy video event format.
Mobile devices cannot actually emit events that exactly match their counterparts emitted by the LMS javascript
video player. Instead of attempting to get them to do that, we instead insert a shim here that converts the events
they *can* easily emit and converts them into the legacy format.
TODO: Remove this shim and perform the conversion as part of some batch canonicalization process.
"""
def __call__(self, event):
name = event.get('name')
if not name:
return
if name not in NAME_TO_EVENT_TYPE_MAP:
return
# Convert edx.video.seeked to edx.video.positiion.changed
if name == "edx.video.seeked":
event['name'] = "edx.video.position.changed"
event['event_type'] = NAME_TO_EVENT_TYPE_MAP[name]
if 'event' not in event:
return
payload = event['event']
if 'module_id' in payload:
module_id = payload['module_id']
try:
usage_key = UsageKey.from_string(module_id)
except InvalidKeyError:
log.warning('Unable to parse module_id "%s"', module_id, exc_info=True)
else:
payload['id'] = usage_key.html_id()
del payload['module_id']
if 'current_time' in payload:
payload['currentTime'] = payload.pop('current_time')
if 'context' in event:
context = event['context']
# Converts seek_type to seek and skip|slide to onSlideSeek|onSkipSeek
if 'seek_type' in payload:
seek_type = payload['seek_type']
if seek_type == 'slide':
payload['type'] = "onSlideSeek"
elif seek_type == 'skip':
payload['type'] = "onSkipSeek"
del payload['seek_type']
# For the iOS build that is returning a +30 for back skip 30
if (
context['application']['version'] == "1.0.02" and
context['application']['name'] == "edx.mobileapp.iOS"
):
if 'requested_skip_interval' in payload and 'type' in payload:
if (
payload['requested_skip_interval'] == 30 and
payload['type'] == "onSkipSeek"
):
payload['requested_skip_interval'] = -30
# For the Android build that isn't distinguishing between skip/seek
if 'requested_skip_interval' in payload:
if abs(payload['requested_skip_interval']) != 30:
if 'type' in payload:
payload['type'] = 'onSlideSeek'
if 'open_in_browser_url' in context:
page, _sep, _tail = context.pop('open_in_browser_url').rpartition('/')
event['page'] = page
event['event'] = json.dumps(payload)
| agpl-3.0 |
kvar/ansible | test/units/modules/network/vyos/test_vyos_config.py | 68 | 5136 | #
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
from units.compat.mock import patch, MagicMock
from ansible.modules.network.vyos import vyos_config
from ansible.plugins.cliconf.vyos import Cliconf
from units.modules.utils import set_module_args
from .vyos_module import TestVyosModule, load_fixture
class TestVyosConfigModule(TestVyosModule):
module = vyos_config
def setUp(self):
super(TestVyosConfigModule, self).setUp()
self.mock_get_config = patch('ansible.modules.network.vyos.vyos_config.get_config')
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch('ansible.modules.network.vyos.vyos_config.load_config')
self.load_config = self.mock_load_config.start()
self.mock_run_commands = patch('ansible.modules.network.vyos.vyos_config.run_commands')
self.run_commands = self.mock_run_commands.start()
self.mock_get_connection = patch('ansible.modules.network.vyos.vyos_config.get_connection')
self.get_connection = self.mock_get_connection.start()
self.cliconf_obj = Cliconf(MagicMock())
self.running_config = load_fixture('vyos_config_config.cfg')
self.conn = self.get_connection()
self.conn.edit_config = MagicMock()
self.running_config = load_fixture('vyos_config_config.cfg')
def tearDown(self):
super(TestVyosConfigModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
self.mock_run_commands.stop()
self.mock_get_connection.stop()
def load_fixtures(self, commands=None):
config_file = 'vyos_config_config.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_vyos_config_unchanged(self):
src = load_fixture('vyos_config_config.cfg')
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(src, src))
set_module_args(dict(src=src))
self.execute_module()
def test_vyos_config_src(self):
src = load_fixture('vyos_config_src.cfg')
set_module_args(dict(src=src))
candidate = '\n'.join(self.module.format_commands(src.splitlines()))
commands = ['set system host-name foo', 'delete interfaces ethernet eth0 address']
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(candidate, self.running_config))
self.execute_module(changed=True, commands=commands)
def test_vyos_config_src_brackets(self):
src = load_fixture('vyos_config_src_brackets.cfg')
set_module_args(dict(src=src))
candidate = '\n'.join(self.module.format_commands(src.splitlines()))
commands = ['set interfaces ethernet eth0 address 10.10.10.10/24', 'set system host-name foo']
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(candidate, self.running_config))
self.execute_module(changed=True, commands=commands)
def test_vyos_config_backup(self):
set_module_args(dict(backup=True))
result = self.execute_module()
self.assertIn('__backup__', result)
def test_vyos_config_lines(self):
commands = ['set system host-name foo']
set_module_args(dict(lines=commands))
candidate = '\n'.join(commands)
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(candidate, self.running_config))
self.execute_module(changed=True, commands=commands)
def test_vyos_config_config(self):
config = 'set system host-name localhost'
new_config = ['set system host-name router']
set_module_args(dict(lines=new_config, config=config))
candidate = '\n'.join(new_config)
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(candidate, config))
self.execute_module(changed=True, commands=new_config)
def test_vyos_config_match_none(self):
lines = ['set system interfaces ethernet eth0 address 1.2.3.4/24',
'set system interfaces ethernet eth0 description test string']
set_module_args(dict(lines=lines, match='none'))
candidate = '\n'.join(lines)
self.conn.get_diff = MagicMock(return_value=self.cliconf_obj.get_diff(candidate, None, diff_match='none'))
self.execute_module(changed=True, commands=lines, sort=False)
| gpl-3.0 |
dominicelse/scipy | scipy/signal/_upfirdn.py | 40 | 6587 | # Code adapted from "upfirdn" python library with permission:
#
# Copyright (c) 2009, Motorola, Inc
#
# All Rights Reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Motorola nor the names of its contributors may be
# used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import numpy as np
from ._upfirdn_apply import _output_len, _apply
__all__ = ['upfirdn', '_output_len']
def _pad_h(h, up):
"""Store coefficients in a transposed, flipped arrangement.
For example, suppose upRate is 3, and the
input number of coefficients is 10, represented as h[0], ..., h[9].
Then the internal buffer will look like this::
h[9], h[6], h[3], h[0], // flipped phase 0 coefs
0, h[7], h[4], h[1], // flipped phase 1 coefs (zero-padded)
0, h[8], h[5], h[2], // flipped phase 2 coefs (zero-padded)
"""
h_padlen = len(h) + (-len(h) % up)
h_full = np.zeros(h_padlen, h.dtype)
h_full[:len(h)] = h
h_full = h_full.reshape(-1, up).T[:, ::-1].ravel()
return h_full
class _UpFIRDn(object):
def __init__(self, h, x_dtype, up, down):
"""Helper for resampling"""
h = np.asarray(h)
if h.ndim != 1 or h.size == 0:
raise ValueError('h must be 1D with non-zero length')
self._output_type = np.result_type(h.dtype, x_dtype, np.float32)
h = np.asarray(h, self._output_type)
self._up = int(up)
self._down = int(down)
if self._up < 1 or self._down < 1:
raise ValueError('Both up and down must be >= 1')
# This both transposes, and "flips" each phase for filtering
self._h_trans_flip = _pad_h(h, self._up)
self._h_trans_flip = np.ascontiguousarray(self._h_trans_flip)
def apply_filter(self, x, axis=-1):
"""Apply the prepared filter to the specified axis of a nD signal x"""
output_len = _output_len(len(self._h_trans_flip), x.shape[axis],
self._up, self._down)
output_shape = np.asarray(x.shape)
output_shape[axis] = output_len
out = np.zeros(output_shape, dtype=self._output_type, order='C')
axis = axis % x.ndim
_apply(np.asarray(x, self._output_type),
self._h_trans_flip, out,
self._up, self._down, axis)
return out
def upfirdn(h, x, up=1, down=1, axis=-1):
"""Upsample, FIR filter, and downsample
Parameters
----------
h : array_like
1-dimensional FIR (finite-impulse response) filter coefficients.
x : array_like
Input signal array.
up : int, optional
Upsampling rate. Default is 1.
down : int, optional
Downsampling rate. Default is 1.
axis : int, optional
The axis of the input data array along which to apply the
linear filter. The filter is applied to each subarray along
this axis. Default is -1.
Returns
-------
y : ndarray
The output signal array. Dimensions will be the same as `x` except
for along `axis`, which will change size according to the `h`,
`up`, and `down` parameters.
Notes
-----
The algorithm is an implementation of the block diagram shown on page 129
of the Vaidyanathan text [1]_ (Figure 4.3-8d).
.. [1] P. P. Vaidyanathan, Multirate Systems and Filter Banks,
Prentice Hall, 1993.
The direct approach of upsampling by factor of P with zero insertion,
FIR filtering of length ``N``, and downsampling by factor of Q is
O(N*Q) per output sample. The polyphase implementation used here is
O(N/P).
.. versionadded:: 0.18
Examples
--------
Simple operations:
>>> from scipy.signal import upfirdn
>>> upfirdn([1, 1, 1], [1, 1, 1]) # FIR filter
array([ 1., 2., 3., 2., 1.])
>>> upfirdn([1], [1, 2, 3], 3) # upsampling with zeros insertion
array([ 1., 0., 0., 2., 0., 0., 3., 0., 0.])
>>> upfirdn([1, 1, 1], [1, 2, 3], 3) # upsampling with sample-and-hold
array([ 1., 1., 1., 2., 2., 2., 3., 3., 3.])
>>> upfirdn([.5, 1, .5], [1, 1, 1], 2) # linear interpolation
array([ 0.5, 1. , 1. , 1. , 1. , 1. , 0.5, 0. ])
>>> upfirdn([1], np.arange(10), 1, 3) # decimation by 3
array([ 0., 3., 6., 9.])
>>> upfirdn([.5, 1, .5], np.arange(10), 2, 3) # linear interp, rate 2/3
array([ 0. , 1. , 2.5, 4. , 5.5, 7. , 8.5, 0. ])
Apply a single filter to multiple signals:
>>> x = np.reshape(np.arange(8), (4, 2))
>>> x
array([[0, 1],
[2, 3],
[4, 5],
[6, 7]])
Apply along the last dimension of ``x``:
>>> h = [1, 1]
>>> upfirdn(h, x, 2)
array([[ 0., 0., 1., 1.],
[ 2., 2., 3., 3.],
[ 4., 4., 5., 5.],
[ 6., 6., 7., 7.]])
Apply along the 0th dimension of ``x``:
>>> upfirdn(h, x, 2, axis=0)
array([[ 0., 1.],
[ 0., 1.],
[ 2., 3.],
[ 2., 3.],
[ 4., 5.],
[ 4., 5.],
[ 6., 7.],
[ 6., 7.]])
"""
x = np.asarray(x)
ufd = _UpFIRDn(h, x.dtype, up, down)
# This is equivalent to (but faster than) using np.apply_along_axis
return ufd.apply_filter(x, axis)
| bsd-3-clause |
mluscon/ci-dnf-stack | dnf-docker-test/features/steps/command_utils.py | 1 | 2910 | from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals
import codecs
import shlex
import subprocess
import six
__all__ = ["run"]
class CommandResult(object):
def __init__(self, **kwargs):
self.command = kwargs.pop("command", None)
self.returncode = kwargs.pop("returncode", 0)
self.stdout = kwargs.pop("stdout", "")
self.stderr = kwargs.pop("stderr", "")
if kwargs:
names = ", ".join("{!r}".format(x) for x in kwargs)
raise ValueError("Unexpected: {!s}".format(names))
@property
def failed(self):
return self.returncode != 0
def clear(self):
self.command = None
self.returncode = 0
self.stdout = ""
self.stderr = ""
class Command(object):
COMMAND_MAP = {}
@classmethod
def run(cls, command, **kwargs):
"""
Make a subprocess call, collect its output and return code.
"""
assert isinstance(command, six.string_types)
cmd_result = CommandResult()
cmd_result.command = command
if six.PY2 and isinstance(command, six.text_type):
# In PY2, shlex.split() requires bytes string (non-unicode).
# In PY3, shlex.split() accepts unicode string.
command = codecs.encode(command, "utf-8")
cmdargs = shlex.split(command)
command0 = cmdargs[0]
real_command = cls.COMMAND_MAP.get(command0, None)
if real_command:
cmdargs0 = real_command.split()
cmdargs = cmdargs0 + cmdargs[1:]
try:
process = subprocess.Popen(cmdargs,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
universal_newlines=True,
**kwargs)
out, err = process.communicate()
if six.PY2: # py3: we get unicode strings, py2 not
out = six.text_type(out, process.stdout.encoding or "utf-8")
err = six.text_type(err, process.stderr.encoding or "utf-8")
process.poll()
assert process.returncode is not None
cmd_result.stdout = out
cmd_result.stderr = err
cmd_result.returncode = process.returncode
print("shell.command: {!r}".format(cmdargs))
print("shell.command.stdout:\n{!s}".format(cmd_result.stdout))
print("shell.command.stderr:\n{!s}".format(cmd_result.stderr))
except OSError as e:
cmd_result.stderr = "OSError: {!r}".format(e)
cmd_result.returncode = int(e.errno)
assert cmd_result.returncode != 0
return cmd_result
def run(ctx, *args, **kwargs):
Command.COMMAND_MAP = ctx.command_map
return Command.run(*args, **kwargs)
| gpl-3.0 |
nguyenphanhuynh/metasyntactic | protobuf-2.2.0/python/google/protobuf/internal/message_listener.py | 17 | 2635 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines a listener interface for observing certain
state transitions on Message objects.
Also defines a null implementation of this interface.
"""
__author__ = 'robinson@google.com (Will Robinson)'
class MessageListener(object):
"""Listens for transitions to nonempty and for invalidations of cached
byte sizes. Meant to be registered via Message._SetListener().
"""
def TransitionToNonempty(self):
"""Called the *first* time that this message becomes nonempty.
Implementations are free (but not required) to call this method multiple
times after the message has become nonempty.
"""
raise NotImplementedError
def ByteSizeDirty(self):
"""Called *every* time the cached byte size value
for this object is invalidated (transitions from being
"clean" to "dirty").
"""
raise NotImplementedError
class NullMessageListener(object):
"""No-op MessageListener implementation."""
def TransitionToNonempty(self):
pass
def ByteSizeDirty(self):
pass
| apache-2.0 |
RedHat-Eng-PGM/python-schedules-tools | schedules_tools/tests/test_check_tasks.py | 1 | 2681 | import os
import pytest
from . import create_test_schedule
DATA_DIR = 'data'
# tests
PARENT_DIRNAME = os.path.basename(os.path.dirname(os.path.realpath(__file__)))
# schedules_tools
BASE_DIR = os.path.dirname(os.path.realpath(
os.path.join(__file__, os.pardir)))
# schedules_tools/tests
CURR_DIR = os.path.join(BASE_DIR, PARENT_DIRNAME)
class TestCheckTaskExistence(object):
schedule = None
@pytest.fixture(autouse=True)
def setUp(self):
self.schedule = create_test_schedule()
def test_empty_check_list(self):
check_tasks = dict()
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 0
def test_exact_match(self):
# key = task name, value = match from beginning
check_tasks = {
'Planning': False,
'Development': False,
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 0
def test_exact_not_match(self):
# key = task name, value = match from beginning
check_tasks = {
'Releaseeee': False,
'Development': False,
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 1
assert 'Releaseeee' in missing_tasks
def test_startswith_match(self):
check_tasks = {
'Devel': True, # Should match 'Development'
'P': True, # Should match 'Planning', ...
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 0
def test_startswith_not_match(self):
check_tasks = {
'Developing something completely different': True,
'Tradada': True,
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 2
assert 'Developing something completely different' in missing_tasks
assert 'Tradada' in missing_tasks
def test_combine_exact_startswith_match(self):
check_tasks = {
'Planning': False,
'Dev': True
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 0
def test_combine_exact_startswith_not_matchs(self):
check_tasks = {
'Releaseeee': False,
'Planning': False,
'Dev': True,
'Testnothing': True
}
missing_tasks = self.schedule.check_for_taskname(check_tasks)
assert len(missing_tasks) == 2
assert 'Releaseeee' in missing_tasks
assert 'Testnothing' in missing_tasks
| gpl-3.0 |
blackzw/openwrt_sdk_dev1 | staging_dir/target-mips_r2_uClibc-0.9.33.2/usr/lib/python2.7/lib2to3/tests/pytree_idempotency.py | 129 | 2385 | #!/usr/bin/env python
# Copyright 2006 Google, Inc. All Rights Reserved.
# Licensed to PSF under a Contributor Agreement.
"""Main program for testing the infrastructure."""
__author__ = "Guido van Rossum <guido@python.org>"
# Support imports (need to be imported first)
from . import support
# Python imports
import os
import sys
import logging
# Local imports
from .. import pytree
import pgen2
from pgen2 import driver
logging.basicConfig()
def main():
gr = driver.load_grammar("Grammar.txt")
dr = driver.Driver(gr, convert=pytree.convert)
fn = "example.py"
tree = dr.parse_file(fn, debug=True)
if not diff(fn, tree):
print "No diffs."
if not sys.argv[1:]:
return # Pass a dummy argument to run the complete test suite below
problems = []
# Process every imported module
for name in sys.modules:
mod = sys.modules[name]
if mod is None or not hasattr(mod, "__file__"):
continue
fn = mod.__file__
if fn.endswith(".pyc"):
fn = fn[:-1]
if not fn.endswith(".py"):
continue
print >>sys.stderr, "Parsing", fn
tree = dr.parse_file(fn, debug=True)
if diff(fn, tree):
problems.append(fn)
# Process every single module on sys.path (but not in packages)
for dir in sys.path:
try:
names = os.listdir(dir)
except os.error:
continue
print >>sys.stderr, "Scanning", dir, "..."
for name in names:
if not name.endswith(".py"):
continue
print >>sys.stderr, "Parsing", name
fn = os.path.join(dir, name)
try:
tree = dr.parse_file(fn, debug=True)
except pgen2.parse.ParseError, err:
print "ParseError:", err
else:
if diff(fn, tree):
problems.append(fn)
# Show summary of problem files
if not problems:
print "No problems. Congratulations!"
else:
print "Problems in following files:"
for fn in problems:
print "***", fn
def diff(fn, tree):
f = open("@", "w")
try:
f.write(str(tree))
finally:
f.close()
try:
return os.system("diff -u %s @" % fn)
finally:
os.remove("@")
if __name__ == "__main__":
main()
| gpl-2.0 |
adobecs5/urp2015 | lib/python3.4/site-packages/pip/_vendor/distlib/scripts.py | 203 | 12894 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2014 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from io import BytesIO
import logging
import os
import re
import struct
import sys
from .compat import sysconfig, detect_encoding, ZipFile
from .resources import finder
from .util import (FileOperator, get_export_entry, convert_path,
get_executable, in_venv)
logger = logging.getLogger(__name__)
_DEFAULT_MANIFEST = '''
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" manifestVersion="1.0">
<assemblyIdentity version="1.0.0.0"
processorArchitecture="X86"
name="%s"
type="win32"/>
<!-- Identify the application security requirements. -->
<trustInfo xmlns="urn:schemas-microsoft-com:asm.v3">
<security>
<requestedPrivileges>
<requestedExecutionLevel level="asInvoker" uiAccess="false"/>
</requestedPrivileges>
</security>
</trustInfo>
</assembly>'''.strip()
# check if Python is called on the first line with this expression
FIRST_LINE_RE = re.compile(b'^#!.*pythonw?[0-9.]*([ \t].*)?$')
SCRIPT_TEMPLATE = '''# -*- coding: utf-8 -*-
if __name__ == '__main__':
import sys, re
def _resolve(module, func):
__import__(module)
mod = sys.modules[module]
parts = func.split('.')
result = getattr(mod, parts.pop(0))
for p in parts:
result = getattr(result, p)
return result
try:
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
func = _resolve('%(module)s', '%(func)s')
rc = func() # None interpreted as 0
except Exception as e: # only supporting Python >= 2.6
sys.stderr.write('%%s\\n' %% e)
rc = 1
sys.exit(rc)
'''
class ScriptMaker(object):
"""
A class to copy or create scripts from source scripts or callable
specifications.
"""
script_template = SCRIPT_TEMPLATE
executable = None # for shebangs
def __init__(self, source_dir, target_dir, add_launchers=True,
dry_run=False, fileop=None):
self.source_dir = source_dir
self.target_dir = target_dir
self.add_launchers = add_launchers
self.force = False
self.clobber = False
# It only makes sense to set mode bits on POSIX.
self.set_mode = (os.name == 'posix')
self.variants = set(('', 'X.Y'))
self._fileop = fileop or FileOperator(dry_run)
def _get_alternate_executable(self, executable, options):
if options.get('gui', False) and os.name == 'nt':
dn, fn = os.path.split(executable)
fn = fn.replace('python', 'pythonw')
executable = os.path.join(dn, fn)
return executable
def _get_shebang(self, encoding, post_interp=b'', options=None):
enquote = True
if self.executable:
executable = self.executable
enquote = False # assume this will be taken care of
elif not sysconfig.is_python_build():
executable = get_executable()
elif in_venv():
executable = os.path.join(sysconfig.get_path('scripts'),
'python%s' % sysconfig.get_config_var('EXE'))
else:
executable = os.path.join(
sysconfig.get_config_var('BINDIR'),
'python%s%s' % (sysconfig.get_config_var('VERSION'),
sysconfig.get_config_var('EXE')))
if options:
executable = self._get_alternate_executable(executable, options)
# If the user didn't specify an executable, it may be necessary to
# cater for executable paths with spaces (not uncommon on Windows)
if enquote and ' ' in executable:
executable = '"%s"' % executable
# Issue #51: don't use fsencode, since we later try to
# check that the shebang is decodable using utf-8.
executable = executable.encode('utf-8')
# in case of IronPython, play safe and enable frames support
if (sys.platform == 'cli' and '-X:Frames' not in post_interp
and '-X:FullFrames' not in post_interp):
post_interp += b' -X:Frames'
shebang = b'#!' + executable + post_interp + b'\n'
# Python parser starts to read a script using UTF-8 until
# it gets a #coding:xxx cookie. The shebang has to be the
# first line of a file, the #coding:xxx cookie cannot be
# written before. So the shebang has to be decodable from
# UTF-8.
try:
shebang.decode('utf-8')
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable from utf-8' % shebang)
# If the script is encoded to a custom encoding (use a
# #coding:xxx cookie), the shebang has to be decodable from
# the script encoding too.
if encoding != 'utf-8':
try:
shebang.decode(encoding)
except UnicodeDecodeError:
raise ValueError(
'The shebang (%r) is not decodable '
'from the script encoding (%r)' % (shebang, encoding))
return shebang
def _get_script_text(self, entry):
return self.script_template % dict(module=entry.prefix,
func=entry.suffix)
manifest = _DEFAULT_MANIFEST
def get_manifest(self, exename):
base = os.path.basename(exename)
return self.manifest % base
def _write_script(self, names, shebang, script_bytes, filenames, ext):
use_launcher = self.add_launchers and os.name == 'nt'
linesep = os.linesep.encode('utf-8')
if not use_launcher:
script_bytes = shebang + linesep + script_bytes
else:
if ext == 'py':
launcher = self._get_launcher('t')
else:
launcher = self._get_launcher('w')
stream = BytesIO()
with ZipFile(stream, 'w') as zf:
zf.writestr('__main__.py', script_bytes)
zip_data = stream.getvalue()
script_bytes = launcher + shebang + linesep + zip_data
for name in names:
outname = os.path.join(self.target_dir, name)
if use_launcher:
n, e = os.path.splitext(outname)
if e.startswith('.py'):
outname = n
outname = '%s.exe' % outname
try:
self._fileop.write_binary_file(outname, script_bytes)
except Exception:
# Failed writing an executable - it might be in use.
logger.warning('Failed to write executable - trying to '
'use .deleteme logic')
dfname = '%s.deleteme' % outname
if os.path.exists(dfname):
os.remove(dfname) # Not allowed to fail here
os.rename(outname, dfname) # nor here
self._fileop.write_binary_file(outname, script_bytes)
logger.debug('Able to replace executable using '
'.deleteme logic')
try:
os.remove(dfname)
except Exception:
pass # still in use - ignore error
else:
if os.name == 'nt' and not outname.endswith('.' + ext):
outname = '%s.%s' % (outname, ext)
if os.path.exists(outname) and not self.clobber:
logger.warning('Skipping existing file %s', outname)
continue
self._fileop.write_binary_file(outname, script_bytes)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
def _make_script(self, entry, filenames, options=None):
post_interp = b''
if options:
args = options.get('interpreter_args', [])
if args:
args = ' %s' % ' '.join(args)
post_interp = args.encode('utf-8')
shebang = self._get_shebang('utf-8', post_interp, options=options)
script = self._get_script_text(entry).encode('utf-8')
name = entry.name
scriptnames = set()
if '' in self.variants:
scriptnames.add(name)
if 'X' in self.variants:
scriptnames.add('%s%s' % (name, sys.version[0]))
if 'X.Y' in self.variants:
scriptnames.add('%s-%s' % (name, sys.version[:3]))
if options and options.get('gui', False):
ext = 'pyw'
else:
ext = 'py'
self._write_script(scriptnames, shebang, script, filenames, ext)
def _copy_script(self, script, filenames):
adjust = False
script = os.path.join(self.source_dir, convert_path(script))
outname = os.path.join(self.target_dir, os.path.basename(script))
if not self.force and not self._fileop.newer(script, outname):
logger.debug('not copying %s (up-to-date)', script)
return
# Always open the file, but ignore failures in dry-run mode --
# that way, we'll get accurate feedback if we can read the
# script.
try:
f = open(script, 'rb')
except IOError:
if not self.dry_run:
raise
f = None
else:
encoding, lines = detect_encoding(f.readline)
f.seek(0)
first_line = f.readline()
if not first_line:
logger.warning('%s: %s is an empty file (skipping)',
self.get_command_name(), script)
return
match = FIRST_LINE_RE.match(first_line.replace(b'\r\n', b'\n'))
if match:
adjust = True
post_interp = match.group(1) or b''
if not adjust:
if f:
f.close()
self._fileop.copy_file(script, outname)
if self.set_mode:
self._fileop.set_executable_mode([outname])
filenames.append(outname)
else:
logger.info('copying and adjusting %s -> %s', script,
self.target_dir)
if not self._fileop.dry_run:
shebang = self._get_shebang(encoding, post_interp)
if b'pythonw' in first_line:
ext = 'pyw'
else:
ext = 'py'
n = os.path.basename(outname)
self._write_script([n], shebang, f.read(), filenames, ext)
if f:
f.close()
@property
def dry_run(self):
return self._fileop.dry_run
@dry_run.setter
def dry_run(self, value):
self._fileop.dry_run = value
if os.name == 'nt':
# Executable launcher support.
# Launchers are from https://bitbucket.org/vinay.sajip/simple_launcher/
def _get_launcher(self, kind):
if struct.calcsize('P') == 8: # 64-bit
bits = '64'
else:
bits = '32'
name = '%s%s.exe' % (kind, bits)
# Issue 31: don't hardcode an absolute package name, but
# determine it relative to the current package
distlib_package = __name__.rsplit('.', 1)[0]
result = finder(distlib_package).find(name).bytes
return result
# Public API follows
def make(self, specification, options=None):
"""
Make a script.
:param specification: The specification, which is either a valid export
entry specification (to make a script from a
callable) or a filename (to make a script by
copying from a source location).
:param options: A dictionary of options controlling script generation.
:return: A list of all absolute pathnames written to.
"""
filenames = []
entry = get_export_entry(specification)
if entry is None:
self._copy_script(specification, filenames)
else:
self._make_script(entry, filenames, options=options)
return filenames
def make_multiple(self, specifications, options=None):
"""
Take a list of specifications and make scripts from them,
:param specifications: A list of specifications.
:return: A list of all absolute pathnames written to,
"""
filenames = []
for specification in specifications:
filenames.extend(self.make(specification, options))
return filenames
| apache-2.0 |
acsone/account-financial-tools | account_auto_fy_sequence/models/account_journal.py | 38 | 1795 | # coding=utf-8
##############################################################################
#
# account_auto_fy_sequence module for Odoo
# Copyright (C) 2014 ACSONE SA/NV (<http://acsone.eu>)
# @author Laetitia Gangloff <laetitia.gangloff@acsone.eu>
#
# account_auto_fy_sequence is free software:
# you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License v3 or later
# as published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# account_auto_fy_sequence is distributed
# in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License v3 or later for more details.
#
# You should have received a copy of the GNU Affero General Public License
# v3 or later along with this program.
# If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import orm
class account_journal(orm.Model):
_inherit = "account.journal"
def create_sequence(self, cr, uid, vals, context=None):
""" Create new no_gap entry sequence for every new Joural
with fiscal year prefix
"""
seq_id = super(account_journal, self).create_sequence(cr, uid, vals,
context=context)
seq_obj = self.pool['ir.sequence']
seq = seq_obj.browse(cr, uid, seq_id, context=context)
prefix = seq.prefix.replace('%(year)s', '%(fy)s')
seq_obj.write(cr, uid, seq_id, {'prefix': prefix}, context=context)
return seq_id
| agpl-3.0 |
darkleons/lama | addons/knowledge/__init__.py | 436 | 1064 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import res_config
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
thurt/arangodb | 3rdParty/V8-4.3.61/third_party/python_26/Lib/sqlite3/dbapi2.py | 161 | 2615 | #-*- coding: ISO-8859-1 -*-
# pysqlite2/dbapi2.py: the DB-API 2.0 interface
#
# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
#
# This file is part of pysqlite.
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
# 3. This notice may not be removed or altered from any source distribution.
import datetime
import time
from _sqlite3 import *
paramstyle = "qmark"
threadsafety = 1
apilevel = "2.0"
Date = datetime.date
Time = datetime.time
Timestamp = datetime.datetime
def DateFromTicks(ticks):
return Date(*time.localtime(ticks)[:3])
def TimeFromTicks(ticks):
return Time(*time.localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
return Timestamp(*time.localtime(ticks)[:6])
version_info = tuple([int(x) for x in version.split(".")])
sqlite_version_info = tuple([int(x) for x in sqlite_version.split(".")])
Binary = buffer
def register_adapters_and_converters():
def adapt_date(val):
return val.isoformat()
def adapt_datetime(val):
return val.isoformat(" ")
def convert_date(val):
return datetime.date(*map(int, val.split("-")))
def convert_timestamp(val):
datepart, timepart = val.split(" ")
year, month, day = map(int, datepart.split("-"))
timepart_full = timepart.split(".")
hours, minutes, seconds = map(int, timepart_full[0].split(":"))
if len(timepart_full) == 2:
microseconds = int(timepart_full[1])
else:
microseconds = 0
val = datetime.datetime(year, month, day, hours, minutes, seconds, microseconds)
return val
register_adapter(datetime.date, adapt_date)
register_adapter(datetime.datetime, adapt_datetime)
register_converter("date", convert_date)
register_converter("timestamp", convert_timestamp)
register_adapters_and_converters()
# Clean up namespace
del(register_adapters_and_converters)
| apache-2.0 |
madhurrajn/samashthi | lib/gevent/_ssl2.py | 9 | 17240 | # Wrapper module for _ssl. Written by Bill Janssen.
# Ported to gevent by Denis Bilenko.
"""SSL wrapper for socket objects on Python 2.7.8 and below.
For the documentation, refer to :mod:`ssl` module manual.
This module implements cooperative SSL socket wrappers.
"""
from __future__ import absolute_import
import ssl as __ssl__
try:
_ssl = __ssl__._ssl
except AttributeError:
_ssl = __ssl__._ssl2
import sys
import errno
from gevent._socket2 import socket
from gevent.socket import _fileobject, timeout_default
from gevent.socket import error as socket_error, EWOULDBLOCK
from gevent.socket import timeout as _socket_timeout
from gevent.hub import string_types, PYPY
try:
long
except NameError:
# Make us importable under Py3k for documentation
long = int
__implements__ = ['SSLSocket',
'wrap_socket',
'get_server_certificate',
'sslwrap_simple']
__imports__ = ['SSLError',
'RAND_status',
'RAND_egd',
'RAND_add',
'cert_time_to_seconds',
'get_protocol_name',
'DER_cert_to_PEM_cert',
'PEM_cert_to_DER_cert']
for name in __imports__[:]:
try:
value = getattr(__ssl__, name)
globals()[name] = value
except AttributeError:
__imports__.remove(name)
for name in dir(__ssl__):
if not name.startswith('_'):
value = getattr(__ssl__, name)
if isinstance(value, (int, long, tuple)) or isinstance(value, string_types):
globals()[name] = value
__imports__.append(name)
del name, value
# Py2.6 can get RAND_status added twice
__all__ = list(set(__implements__) | set(__imports__))
class SSLSocket(socket):
"""
gevent `ssl.SSLSocket <https://docs.python.org/2.6/library/ssl.html#sslsocket-objects>`_
for Pythons < 2.7.9.
"""
def __init__(self, sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True,
ciphers=None):
socket.__init__(self, _sock=sock)
if PYPY:
sock._drop()
if certfile and not keyfile:
keyfile = certfile
# see if it's connected
try:
socket.getpeername(self)
except socket_error as e:
if e.args[0] != errno.ENOTCONN:
raise
# no, no connection yet
self._sslobj = None
else:
# yes, create the SSL object
if ciphers is None:
self._sslobj = _ssl.sslwrap(self._sock, server_side,
keyfile, certfile,
cert_reqs, ssl_version, ca_certs)
else:
self._sslobj = _ssl.sslwrap(self._sock, server_side,
keyfile, certfile,
cert_reqs, ssl_version, ca_certs,
ciphers)
if do_handshake_on_connect:
self.do_handshake()
self.keyfile = keyfile
self.certfile = certfile
self.cert_reqs = cert_reqs
self.ssl_version = ssl_version
self.ca_certs = ca_certs
self.ciphers = ciphers
self.do_handshake_on_connect = do_handshake_on_connect
self.suppress_ragged_eofs = suppress_ragged_eofs
self._makefile_refs = 0
def read(self, len=1024):
"""Read up to LEN bytes and return them.
Return zero-length string on EOF."""
while True:
try:
return self._sslobj.read(len)
except SSLError as ex:
if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
return ''
elif ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
sys.exc_clear()
# note: using _SSLErrorReadTimeout rather than _SSLErrorWriteTimeout below is intentional
self._wait(self._write_event, timeout_exc=_SSLErrorReadTimeout)
else:
raise
def write(self, data):
"""Write DATA to the underlying SSL channel. Returns
number of bytes of DATA actually transmitted."""
while True:
try:
return self._sslobj.write(data)
except SSLError as ex:
if ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event, timeout_exc=_SSLErrorWriteTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout)
else:
raise
def getpeercert(self, binary_form=False):
"""Returns a formatted version of the data in the
certificate provided by the other end of the SSL channel.
Return None if no certificate was provided, {} if a
certificate was provided, but not validated."""
return self._sslobj.peer_certificate(binary_form)
def cipher(self):
if not self._sslobj:
return None
else:
return self._sslobj.cipher()
def send(self, data, flags=0, timeout=timeout_default):
if timeout is timeout_default:
timeout = self.timeout
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to send() on %s" %
self.__class__)
while True:
try:
v = self._sslobj.write(data)
except SSLError as x:
if x.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
return 0
sys.exc_clear()
self._wait(self._read_event)
elif x.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
return 0
sys.exc_clear()
self._wait(self._write_event)
else:
raise
else:
return v
else:
return socket.send(self, data, flags, timeout)
# is it possible for sendall() to send some data without encryption if another end shut down SSL?
def sendall(self, data, flags=0):
try:
socket.sendall(self, data)
except _socket_timeout as ex:
if self.timeout == 0.0:
# Python 2 simply *hangs* in this case, which is bad, but
# Python 3 raises SSLWantWriteError. We do the same.
raise SSLError(SSL_ERROR_WANT_WRITE)
# Convert the socket.timeout back to the sslerror
raise SSLError(*ex.args)
def sendto(self, *args):
if self._sslobj:
raise ValueError("sendto not allowed on instances of %s" %
self.__class__)
else:
return socket.sendto(self, *args)
def recv(self, buflen=1024, flags=0):
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv() on %s" %
self.__class__)
# QQQ Shouldn't we wrap the SSL_WANT_READ errors as socket.timeout errors to match socket.recv's behavior?
return self.read(buflen)
else:
return socket.recv(self, buflen, flags)
def recv_into(self, buffer, nbytes=None, flags=0):
if buffer and (nbytes is None):
nbytes = len(buffer)
elif nbytes is None:
nbytes = 1024
if self._sslobj:
if flags != 0:
raise ValueError(
"non-zero flags not allowed in calls to recv_into() on %s" %
self.__class__)
while True:
try:
tmp_buffer = self.read(nbytes)
v = len(tmp_buffer)
buffer[:v] = tmp_buffer
return v
except SSLError as x:
if x.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event)
continue
else:
raise
else:
return socket.recv_into(self, buffer, nbytes, flags)
def recvfrom(self, *args):
if self._sslobj:
raise ValueError("recvfrom not allowed on instances of %s" %
self.__class__)
else:
return socket.recvfrom(self, *args)
def recvfrom_into(self, *args):
if self._sslobj:
raise ValueError("recvfrom_into not allowed on instances of %s" %
self.__class__)
else:
return socket.recvfrom_into(self, *args)
def pending(self):
if self._sslobj:
return self._sslobj.pending()
else:
return 0
def _sslobj_shutdown(self):
while True:
try:
return self._sslobj.shutdown()
except SSLError as ex:
if ex.args[0] == SSL_ERROR_EOF and self.suppress_ragged_eofs:
return ''
elif ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event, timeout_exc=_SSLErrorReadTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._write_event, timeout_exc=_SSLErrorWriteTimeout)
else:
raise
def unwrap(self):
if self._sslobj:
s = self._sslobj_shutdown()
self._sslobj = None
return socket(_sock=s)
else:
raise ValueError("No SSL wrapper around " + str(self))
def shutdown(self, how):
self._sslobj = None
socket.shutdown(self, how)
def close(self):
if self._makefile_refs < 1:
self._sslobj = None
socket.close(self)
else:
self._makefile_refs -= 1
if PYPY:
def _reuse(self):
self._makefile_refs += 1
def _drop(self):
if self._makefile_refs < 1:
self.close()
else:
self._makefile_refs -= 1
def do_handshake(self):
"""Perform a TLS/SSL handshake."""
while True:
try:
return self._sslobj.do_handshake()
except SSLError as ex:
if ex.args[0] == SSL_ERROR_WANT_READ:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event, timeout_exc=_SSLErrorHandshakeTimeout)
elif ex.args[0] == SSL_ERROR_WANT_WRITE:
if self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._write_event, timeout_exc=_SSLErrorHandshakeTimeout)
else:
raise
def connect(self, addr):
"""Connects to remote ADDR, and then wraps the connection in
an SSL channel."""
# Here we assume that the socket is client-side, and not
# connected at the time of the call. We connect it, then wrap it.
if self._sslobj:
raise ValueError("attempt to connect already-connected SSLSocket!")
socket.connect(self, addr)
if self.ciphers is None:
self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile,
self.cert_reqs, self.ssl_version,
self.ca_certs)
else:
self._sslobj = _ssl.sslwrap(self._sock, False, self.keyfile, self.certfile,
self.cert_reqs, self.ssl_version,
self.ca_certs, self.ciphers)
if self.do_handshake_on_connect:
self.do_handshake()
def accept(self):
"""Accepts a new connection from a remote client, and returns
a tuple containing that new connection wrapped with a server-side
SSL channel, and the address of the remote client."""
sock = self._sock
while True:
try:
client_socket, address = sock.accept()
break
except socket_error as ex:
if ex.args[0] != EWOULDBLOCK or self.timeout == 0.0:
raise
sys.exc_clear()
self._wait(self._read_event)
sslobj = SSLSocket(client_socket,
keyfile=self.keyfile,
certfile=self.certfile,
server_side=True,
cert_reqs=self.cert_reqs,
ssl_version=self.ssl_version,
ca_certs=self.ca_certs,
do_handshake_on_connect=self.do_handshake_on_connect,
suppress_ragged_eofs=self.suppress_ragged_eofs,
ciphers=self.ciphers)
return sslobj, address
def makefile(self, mode='r', bufsize=-1):
"""Make and return a file-like object that
works with the SSL connection. Just use the code
from the socket module."""
if not PYPY:
self._makefile_refs += 1
# close=True so as to decrement the reference count when done with
# the file-like object.
return _fileobject(self, mode, bufsize, close=True)
if PYPY or not hasattr(SSLSocket, 'timeout'):
# PyPy (and certain versions of CPython) doesn't have a direct
# 'timeout' property on raw sockets, because that's not part of
# the documented specification. We may wind up wrapping a raw
# socket (when ssl is used with PyWSGI) or a gevent socket, which
# does have a read/write timeout property as an alias for
# get/settimeout, so make sure that's always the case because
# pywsgi can depend on that.
SSLSocket.timeout = property(lambda self: self.gettimeout(),
lambda self, value: self.settimeout(value))
_SSLErrorReadTimeout = SSLError('The read operation timed out')
_SSLErrorWriteTimeout = SSLError('The write operation timed out')
_SSLErrorHandshakeTimeout = SSLError('The handshake operation timed out')
def wrap_socket(sock, keyfile=None, certfile=None,
server_side=False, cert_reqs=CERT_NONE,
ssl_version=PROTOCOL_SSLv23, ca_certs=None,
do_handshake_on_connect=True,
suppress_ragged_eofs=True, ciphers=None):
"""Create a new :class:`SSLSocket` instance."""
return SSLSocket(sock, keyfile=keyfile, certfile=certfile,
server_side=server_side, cert_reqs=cert_reqs,
ssl_version=ssl_version, ca_certs=ca_certs,
do_handshake_on_connect=do_handshake_on_connect,
suppress_ragged_eofs=suppress_ragged_eofs,
ciphers=ciphers)
def get_server_certificate(addr, ssl_version=PROTOCOL_SSLv23, ca_certs=None):
"""Retrieve the certificate from the server at the specified address,
and return it as a PEM-encoded string.
If 'ca_certs' is specified, validate the server cert against it.
If 'ssl_version' is specified, use it in the connection attempt."""
host, port = addr
if (ca_certs is not None):
cert_reqs = CERT_REQUIRED
else:
cert_reqs = CERT_NONE
s = wrap_socket(socket(), ssl_version=ssl_version,
cert_reqs=cert_reqs, ca_certs=ca_certs)
s.connect(addr)
dercert = s.getpeercert(True)
s.close()
return DER_cert_to_PEM_cert(dercert)
def sslwrap_simple(sock, keyfile=None, certfile=None):
"""A replacement for the old socket.ssl function. Designed
for compatability with Python 2.5 and earlier. Will disappear in
Python 3.0."""
return SSLSocket(sock, keyfile, certfile)
| bsd-3-clause |
gcanal/Mono | mono/test/ex_start_stop.py | 1 | 1214 | #!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Fri Mar 10 15:28:50 2017
@author: robert
"""
from scapy.all import IP,sendrecv, ICMP, utils, STP, Ether, Dot3, Dot1Q, ARP, IPv6, TCP
import mono
import MySQLdb
import threading
import sys
import mono_config as mc
#Test the start/stop functions of the MONO api mono.run_session mono.give_stop_order
db = MySQLdb.connect(mc.db_host,mc.db_user_name, mc.db_password,mc.db_db_name)
print "#### Add Session"
id_session = mono.add_session({"name":"Robert Session", "iface":"wlan0"}, db)
print "created session with id " + str(id_session)
res = mono.run_session(id_session, db)
print (" Successfully started session" if res else " Failed to start session")
def check_session_active():
print "is_session active " + str(mono.is_session_active())
print "current session is " + str(mono.get_current_session_id())
def give_stop_order():
print "give stop order"
mono.set_run_order(False)
check_session_active()
return
print "waiting 15 second before giving stop order"
threading.Timer(15, give_stop_order, ()).start()
print "waiting 17 second before checking if session is active"
threading.Timer(17, check_session_active, ()).start() | mit |
o3project/ryu-oe | ryu/lib/packet/icmp.py | 42 | 9194 | # Copyright (C) 2012 Nippon Telegraph and Telephone Corporation.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import struct
from . import packet_base
from . import packet_utils
from ryu.lib import stringify
ICMP_ECHO_REPLY = 0
ICMP_DEST_UNREACH = 3
ICMP_SRC_QUENCH = 4
ICMP_REDIRECT = 5
ICMP_ECHO_REQUEST = 8
ICMP_TIME_EXCEEDED = 11
ICMP_ECHO_REPLY_CODE = 0
ICMP_HOST_UNREACH_CODE = 1
ICMP_PORT_UNREACH_CODE = 3
ICMP_TTL_EXPIRED_CODE = 0
class icmp(packet_base.PacketBase):
"""ICMP (RFC 792) header encoder/decoder class.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the corresponding args in this order.
.. tabularcolumns:: |l|L|
============== ====================
Attribute Description
============== ====================
type Type
code Code
csum CheckSum \
(0 means automatically-calculate when encoding)
data Payload. \
Either a bytearray, or \
ryu.lib.packet.icmp.echo or \
ryu.lib.packet.icmp.dest_unreach or \
ryu.lib.packet.icmp.TimeExceeded object \
NOTE for icmp.echo: \
This includes "unused" 16 bits and the following \
"Internet Header + 64 bits of Original Data Datagram" of \
the ICMP header. \
NOTE for icmp.dest_unreach and icmp.TimeExceeded: \
This includes "unused" 8 or 24 bits and the following \
"Internet Header + leading octets of original datagram" \
of the original packet.
============== ====================
"""
_PACK_STR = '!BBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
_ICMP_TYPES = {}
@staticmethod
def register_icmp_type(*args):
def _register_icmp_type(cls):
for type_ in args:
icmp._ICMP_TYPES[type_] = cls
return cls
return _register_icmp_type
def __init__(self, type_=ICMP_ECHO_REQUEST, code=0, csum=0, data=None):
super(icmp, self).__init__()
self.type = type_
self.code = code
self.csum = csum
self.data = data
@classmethod
def parser(cls, buf):
(type_, code, csum) = struct.unpack_from(cls._PACK_STR, buf)
msg = cls(type_, code, csum)
offset = cls._MIN_LEN
if len(buf) > offset:
cls_ = cls._ICMP_TYPES.get(type_, None)
if cls_:
msg.data = cls_.parser(buf, offset)
else:
msg.data = buf[offset:]
return msg, None, None
def serialize(self, payload, prev):
hdr = bytearray(struct.pack(icmp._PACK_STR, self.type,
self.code, self.csum))
if self.data is not None:
if self.type in icmp._ICMP_TYPES:
hdr += self.data.serialize()
else:
hdr += self.data
else:
self.data = echo()
hdr += self.data.serialize()
if self.csum == 0:
self.csum = packet_utils.checksum(hdr)
struct.pack_into('!H', hdr, 2, self.csum)
return hdr
def __len__(self):
return self._MIN_LEN + len(self.data)
@icmp.register_icmp_type(ICMP_ECHO_REPLY, ICMP_ECHO_REQUEST)
class echo(stringify.StringifyMixin):
"""ICMP sub encoder/decoder class for Echo and Echo Reply messages.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Echo and Echo Reply messages.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the corresponding args in this order.
.. tabularcolumns:: |l|L|
============== ====================
Attribute Description
============== ====================
id Identifier
seq Sequence Number
data Internet Header + 64 bits of Original Data Datagram
============== ====================
"""
_PACK_STR = '!HH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, id_=0, seq=0, data=None):
super(echo, self).__init__()
self.id = id_
self.seq = seq
self.data = data
@classmethod
def parser(cls, buf, offset):
(id_, seq) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(id_, seq)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(echo._PACK_STR, self.id,
self.seq))
if self.data is not None:
hdr += self.data
return hdr
def __len__(self):
length = self._MIN_LEN
if self.data is not None:
length += len(self.data)
return length
@icmp.register_icmp_type(ICMP_DEST_UNREACH)
class dest_unreach(stringify.StringifyMixin):
"""ICMP sub encoder/decoder class for Destination Unreachable Message.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Destination Unreachable Message.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the corresponding args in this order.
[RFC1191] reserves bits for the "Next-Hop MTU" field.
[RFC4884] introduced 8-bit data length attribute.
.. tabularcolumns:: |l|p{35em}|
============== =====================================================
Attribute Description
============== =====================================================
data_len data length
mtu Next-Hop MTU
NOTE: This field is required when icmp code is 4
code 4 = fragmentation needed and DF set
data Internet Header + leading octets of original datagram
============== =====================================================
"""
_PACK_STR = '!xBH'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, data_len=0, mtu=0, data=None):
super(dest_unreach, self).__init__()
self.data_len = data_len
self.mtu = mtu
self.data = data
@classmethod
def parser(cls, buf, offset):
(data_len, mtu) = struct.unpack_from(cls._PACK_STR,
buf, offset)
msg = cls(data_len, mtu)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(dest_unreach._PACK_STR,
self.data_len, self.mtu))
if self.data is not None:
hdr += self.data
return hdr
def __len__(self):
length = self._MIN_LEN
if self.data is not None:
length += len(self.data)
return length
@icmp.register_icmp_type(ICMP_TIME_EXCEEDED)
class TimeExceeded(stringify.StringifyMixin):
"""ICMP sub encoder/decoder class for Time Exceeded Message.
This is used with ryu.lib.packet.icmp.icmp for
ICMP Time Exceeded Message.
An instance has the following attributes at least.
Most of them are same to the on-wire counterparts but in host byte order.
__init__ takes the corresponding args in this order.
[RFC4884] introduced 8-bit data length attribute.
.. tabularcolumns:: |l|L|
============== ====================
Attribute Description
============== ====================
data_len data length
data Internet Header + leading octets of original datagram
============== ====================
"""
_PACK_STR = '!xBxx'
_MIN_LEN = struct.calcsize(_PACK_STR)
def __init__(self, data_len=0, data=None):
self.data_len = data_len
self.data = data
@classmethod
def parser(cls, buf, offset):
(data_len, ) = struct.unpack_from(cls._PACK_STR, buf, offset)
msg = cls(data_len)
offset += cls._MIN_LEN
if len(buf) > offset:
msg.data = buf[offset:]
return msg
def serialize(self):
hdr = bytearray(struct.pack(TimeExceeded._PACK_STR, self.data_len))
if self.data is not None:
hdr += self.data
return hdr
def __len__(self):
length = self._MIN_LEN
if self.data is not None:
length += len(self.data)
return length
icmp.set_classes(icmp._ICMP_TYPES)
| apache-2.0 |
ResearchSoftwareInstitute/MyHPOM | myhpom/tests/test_documentreview_task.py | 1 | 1045 | from django.test import TestCase
from django.core import mail
from myhpom.models import CloudFactoryDocumentRun, DocumentUrl
from myhpom.tasks import EmailUserDocumentReviewCompleted
from myhpom.tests.factories import AdvanceDirectiveFactory
class EmailUserDocumentReviewCompletedTestCase(TestCase):
"""
* If the corresponding DocumentUrl has been deleted, no email should be sent.
* Otherwise, a generic email should be sent to the user who owns the DocumentUrl
"""
def setUp(self):
self.run = CloudFactoryDocumentRun.objects.create(
document_url=DocumentUrl.objects.create(advancedirective=AdvanceDirectiveFactory())
)
self.task = EmailUserDocumentReviewCompleted
def test_email_sent(self):
self.task(self.run.id, 'http', 'localhost')
self.assertEqual(len(mail.outbox), 1)
def test_document_deleted_email_not_sent(self):
self.run.document_url.delete()
self.task(self.run.id, 'http', 'localhost')
self.assertEqual(len(mail.outbox), 0)
| bsd-3-clause |
chennan47/osf.io | addons/wiki/routes.py | 22 | 4851 | """
Routes associated with the wiki page
"""
from framework.routing import Rule, json_renderer
from website.routes import OsfWebRenderer
from . import views
TEMPLATE_DIR = './addons/wiki/templates/'
settings_routes = {
'rules': [],
'prefix': '/api/v1',
}
# NOTE: <wname> refers to a wiki page's key, e.g. 'Home'
page_routes = {
'rules': [
# Home (Base) | GET
Rule(
[
'/project/<pid>/wiki/',
'/project/<pid>/node/<nid>/wiki/',
],
'get',
views.project_wiki_home,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
# View (Id) | GET
Rule(
[
'/project/<pid>/wiki/id/<wid>/',
'/project/<pid>/node/<nid>/wiki/id/<wid>/',
],
'get',
views.project_wiki_id_page,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
# Wiki | GET
Rule(
[
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
],
'get',
views.project_wiki_view,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
# Edit | GET (legacy url, trigger redirect)
Rule(
[
'/project/<pid>/wiki/<wname>/edit/',
'/project/<pid>/node/<nid>/wiki/<wname>/edit/',
],
'get',
views.project_wiki_edit,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
# Compare | GET (legacy url, trigger redirect)
Rule(
[
'/project/<pid>/wiki/<wname>/compare/<int:wver>/',
'/project/<pid>/node/<nid>/wiki/<wname>/compare/<int:wver>/',
],
'get',
views.project_wiki_compare,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
# Edit | POST
Rule(
[
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
],
'post',
views.project_wiki_edit_post,
OsfWebRenderer('edit.mako', trust=False, template_dir=TEMPLATE_DIR)
),
]
}
api_routes = {
'rules': [
# Home (Base) : GET
Rule([
'/project/<pid>/wiki/',
'/project/<pid>/node/<nid>/wiki/',
], 'get', views.project_wiki_home, json_renderer),
# Draft : GET
Rule([
'/project/<pid>/wiki/<wname>/draft/',
'/project/<pid>/node/<nid>/wiki/<wname>/draft/',
], 'get', views.wiki_page_draft, json_renderer),
# Content : GET
# <wver> refers to a wiki page's version number
Rule([
'/project/<pid>/wiki/<wname>/content/',
'/project/<pid>/node/<nid>/wiki/<wname>/content/',
'/project/<pid>/wiki/<wname>/content/<wver>/',
'/project/<pid>/node/<nid>/wiki/<wname>/content/<wver>/',
], 'get', views.wiki_page_content, json_renderer),
# Validate | GET
Rule([
'/project/<pid>/wiki/<wname>/validate/',
'/project/<pid>/node/<nid>/wiki/<wname>/validate/',
], 'get', views.project_wiki_validate_name, json_renderer),
# Edit | POST
Rule([
'/project/<pid>/wiki/<wname>/edit/',
'/project/<pid>/node/<nid>/wiki/<wname>/edit/',
], 'post', views.project_wiki_edit_post, json_renderer),
# Rename : PUT
Rule([
'/project/<pid>/wiki/<wname>/rename/',
'/project/<pid>/node/<nid>/wiki/<wname>/rename/',
], 'put', views.project_wiki_rename, json_renderer),
# Delete : DELETE
Rule([
'/project/<pid>/wiki/<wname>/',
'/project/<pid>/node/<nid>/wiki/<wname>/',
], 'delete', views.project_wiki_delete, json_renderer),
# Change Wiki Settings | PUT
Rule([
'/project/<pid>/wiki/settings/',
'/project/<pid>/node/<nid>/wiki/settings/',
], 'put', views.edit_wiki_settings, json_renderer),
#Permissions Info for Settings Page | GET
Rule(
[
'/project/<pid>/wiki/settings/',
'/project/<pid>/node/<nid>/wiki/settings/'
],
'get',
views.get_node_wiki_permissions,
json_renderer,
),
# Wiki Menu : GET
Rule([
'/project/<pid>/wiki/<wname>/grid/',
'/project/<pid>/node/<nid>/wiki/<wname>/grid/'
], 'get', views.project_wiki_grid_data, json_renderer),
],
'prefix': '/api/v1',
}
| apache-2.0 |
google/starthinker | starthinker_ui/website/management/commands/scripts_to_csv.py | 1 | 1997 | ###########################################################################
#
# Copyright 2021 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
###########################################################################
from django.core.management.base import BaseCommand, CommandError
from starthinker_ui.recipe.scripts import Script
from starthinker.util.csv import rows_to_csv
class Command(BaseCommand):
""" Command used to generate a simple list of solutions.
Nothing depends on this, just a quick implementation, feel
free to pivot as necessary.
Current version returns a CSV with:
- Name
- Description
- Global
- Product List
- Owners List
- Year Active
- Status
- Link ( local, may need to regexp into production after running )
Call without arguments using: python manage.py scripts_to_csv
"""
help = 'Generate CSV Of Scripts'
def get_scripts(self):
for script in Script.get_scripts():
yield (
script.get_name(), # solution
script.get_description().replace('"', '\''), # description
'Global', # region
', '.join(script.get_products()), # entity
', '.join(x.replace('@google.com', '') for x in script.get_authors()), # POC
'%s - current' % script.get_released().year, # year
script.get_status() or 'Live', # status
script.get_link(), # link
)
def handle(self, *args, **kwargs):
print(rows_to_csv(self.get_scripts()).read())
| apache-2.0 |
Jmainguy/ansible-modules-extras | cloud/centurylink/clc_group.py | 25 | 17067 | #!/usr/bin/python
#
# Copyright (c) 2015 CenturyLink
#
# This file is part of Ansible.
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>
#
DOCUMENTATION = '''
module: clc_group
short_description: Create/delete Server Groups at Centurylink Cloud
description:
- Create or delete Server Groups at Centurylink Centurylink Cloud
version_added: "2.0"
options:
name:
description:
- The name of the Server Group
required: True
description:
description:
- A description of the Server Group
required: False
parent:
description:
- The parent group of the server group. If parent is not provided, it creates the group at top level.
required: False
location:
description:
- Datacenter to create the group in. If location is not provided, the group gets created in the default datacenter
associated with the account
required: False
state:
description:
- Whether to create or delete the group
default: present
choices: ['present', 'absent']
wait:
description:
- Whether to wait for the tasks to finish before returning.
choices: [ True, False ]
default: True
required: False
requirements:
- python = 2.7
- requests >= 2.5.0
- clc-sdk
author: "CLC Runner (@clc-runner)"
notes:
- To use this module, it is required to set the below environment variables which enables access to the
Centurylink Cloud
- CLC_V2_API_USERNAME, the account login id for the centurylink cloud
- CLC_V2_API_PASSWORD, the account password for the centurylink cloud
- Alternatively, the module accepts the API token and account alias. The API token can be generated using the
CLC account login and password via the HTTP api call @ https://api.ctl.io/v2/authentication/login
- CLC_V2_API_TOKEN, the API token generated from https://api.ctl.io/v2/authentication/login
- CLC_ACCT_ALIAS, the account alias associated with the centurylink cloud
- Users can set CLC_V2_API_URL to specify an endpoint for pointing to a different CLC environment.
'''
EXAMPLES = '''
# Create a Server Group
---
- name: Create Server Group
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Create / Verify a Server Group at CenturyLink Cloud
clc_group:
name: 'My Cool Server Group'
parent: 'Default Group'
state: present
register: clc
- name: debug
debug: var=clc
# Delete a Server Group
---
- name: Delete Server Group
hosts: localhost
gather_facts: False
connection: local
tasks:
- name: Delete / Verify Absent a Server Group at CenturyLink Cloud
clc_group:
name: 'My Cool Server Group'
parent: 'Default Group'
state: absent
register: clc
- name: debug
debug: var=clc
'''
RETURN = '''
group:
description: The group information
returned: success
type: dict
sample:
{
"changeInfo":{
"createdBy":"service.wfad",
"createdDate":"2015-07-29T18:52:47Z",
"modifiedBy":"service.wfad",
"modifiedDate":"2015-07-29T18:52:47Z"
},
"customFields":[
],
"description":"test group",
"groups":[
],
"id":"bb5f12a3c6044ae4ad0a03e73ae12cd1",
"links":[
{
"href":"/v2/groups/wfad",
"rel":"createGroup",
"verbs":[
"POST"
]
},
{
"href":"/v2/servers/wfad",
"rel":"createServer",
"verbs":[
"POST"
]
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1",
"rel":"self",
"verbs":[
"GET",
"PATCH",
"DELETE"
]
},
{
"href":"/v2/groups/wfad/086ac1dfe0b6411989e8d1b77c4065f0",
"id":"086ac1dfe0b6411989e8d1b77c4065f0",
"rel":"parentGroup"
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/defaults",
"rel":"defaults",
"verbs":[
"GET",
"POST"
]
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/billing",
"rel":"billing"
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/archive",
"rel":"archiveGroupAction"
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/statistics",
"rel":"statistics"
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/upcomingScheduledActivities",
"rel":"upcomingScheduledActivities"
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/horizontalAutoscalePolicy",
"rel":"horizontalAutoscalePolicyMapping",
"verbs":[
"GET",
"PUT",
"DELETE"
]
},
{
"href":"/v2/groups/wfad/bb5f12a3c6044ae4ad0a03e73ae12cd1/scheduledActivities",
"rel":"scheduledActivities",
"verbs":[
"GET",
"POST"
]
}
],
"locationId":"UC1",
"name":"test group",
"status":"active",
"type":"default"
}
'''
__version__ = '${version}'
from distutils.version import LooseVersion
try:
import requests
except ImportError:
REQUESTS_FOUND = False
else:
REQUESTS_FOUND = True
#
# Requires the clc-python-sdk.
# sudo pip install clc-sdk
#
try:
import clc as clc_sdk
from clc import CLCException
except ImportError:
CLC_FOUND = False
clc_sdk = None
else:
CLC_FOUND = True
class ClcGroup(object):
clc = None
root_group = None
def __init__(self, module):
"""
Construct module
"""
self.clc = clc_sdk
self.module = module
self.group_dict = {}
if not CLC_FOUND:
self.module.fail_json(
msg='clc-python-sdk required for this module')
if not REQUESTS_FOUND:
self.module.fail_json(
msg='requests library is required for this module')
if requests.__version__ and LooseVersion(requests.__version__) < LooseVersion('2.5.0'):
self.module.fail_json(
msg='requests library version should be >= 2.5.0')
self._set_user_agent(self.clc)
def process_request(self):
"""
Execute the main code path, and handle the request
:return: none
"""
location = self.module.params.get('location')
group_name = self.module.params.get('name')
parent_name = self.module.params.get('parent')
group_description = self.module.params.get('description')
state = self.module.params.get('state')
self._set_clc_credentials_from_env()
self.group_dict = self._get_group_tree_for_datacenter(
datacenter=location)
if state == "absent":
changed, group, requests = self._ensure_group_is_absent(
group_name=group_name, parent_name=parent_name)
if requests:
self._wait_for_requests_to_complete(requests)
else:
changed, group = self._ensure_group_is_present(
group_name=group_name, parent_name=parent_name, group_description=group_description)
try:
group = group.data
except AttributeError:
group = group_name
self.module.exit_json(changed=changed, group=group)
@staticmethod
def _define_module_argument_spec():
"""
Define the argument spec for the ansible module
:return: argument spec dictionary
"""
argument_spec = dict(
name=dict(required=True),
description=dict(default=None),
parent=dict(default=None),
location=dict(default=None),
state=dict(default='present', choices=['present', 'absent']),
wait=dict(type='bool', default=True))
return argument_spec
def _set_clc_credentials_from_env(self):
"""
Set the CLC Credentials on the sdk by reading environment variables
:return: none
"""
env = os.environ
v2_api_token = env.get('CLC_V2_API_TOKEN', False)
v2_api_username = env.get('CLC_V2_API_USERNAME', False)
v2_api_passwd = env.get('CLC_V2_API_PASSWD', False)
clc_alias = env.get('CLC_ACCT_ALIAS', False)
api_url = env.get('CLC_V2_API_URL', False)
if api_url:
self.clc.defaults.ENDPOINT_URL_V2 = api_url
if v2_api_token and clc_alias:
self.clc._LOGIN_TOKEN_V2 = v2_api_token
self.clc._V2_ENABLED = True
self.clc.ALIAS = clc_alias
elif v2_api_username and v2_api_passwd:
self.clc.v2.SetCredentials(
api_username=v2_api_username,
api_passwd=v2_api_passwd)
else:
return self.module.fail_json(
msg="You must set the CLC_V2_API_USERNAME and CLC_V2_API_PASSWD "
"environment variables")
def _ensure_group_is_absent(self, group_name, parent_name):
"""
Ensure that group_name is absent by deleting it if necessary
:param group_name: string - the name of the clc server group to delete
:param parent_name: string - the name of the parent group for group_name
:return: changed, group
"""
changed = False
group = []
results = []
if self._group_exists(group_name=group_name, parent_name=parent_name):
if not self.module.check_mode:
group.append(group_name)
result = self._delete_group(group_name)
results.append(result)
changed = True
return changed, group, results
def _delete_group(self, group_name):
"""
Delete the provided server group
:param group_name: string - the server group to delete
:return: none
"""
response = None
group, parent = self.group_dict.get(group_name)
try:
response = group.Delete()
except CLCException, ex:
self.module.fail_json(msg='Failed to delete group :{0}. {1}'.format(
group_name, ex.response_text
))
return response
def _ensure_group_is_present(
self,
group_name,
parent_name,
group_description):
"""
Checks to see if a server group exists, creates it if it doesn't.
:param group_name: the name of the group to validate/create
:param parent_name: the name of the parent group for group_name
:param group_description: a short description of the server group (used when creating)
:return: (changed, group) -
changed: Boolean- whether a change was made,
group: A clc group object for the group
"""
assert self.root_group, "Implementation Error: Root Group not set"
parent = parent_name if parent_name is not None else self.root_group.name
description = group_description
changed = False
group = group_name
parent_exists = self._group_exists(group_name=parent, parent_name=None)
child_exists = self._group_exists(
group_name=group_name,
parent_name=parent)
if parent_exists and child_exists:
group, parent = self.group_dict[group_name]
changed = False
elif parent_exists and not child_exists:
if not self.module.check_mode:
group = self._create_group(
group=group,
parent=parent,
description=description)
changed = True
else:
self.module.fail_json(
msg="parent group: " +
parent +
" does not exist")
return changed, group
def _create_group(self, group, parent, description):
"""
Create the provided server group
:param group: clc_sdk.Group - the group to create
:param parent: clc_sdk.Parent - the parent group for {group}
:param description: string - a text description of the group
:return: clc_sdk.Group - the created group
"""
response = None
(parent, grandparent) = self.group_dict[parent]
try:
response = parent.Create(name=group, description=description)
except CLCException, ex:
self.module.fail_json(msg='Failed to create group :{0}. {1}'.format(
group, ex.response_text))
return response
def _group_exists(self, group_name, parent_name):
"""
Check to see if a group exists
:param group_name: string - the group to check
:param parent_name: string - the parent of group_name
:return: boolean - whether the group exists
"""
result = False
if group_name in self.group_dict:
(group, parent) = self.group_dict[group_name]
if parent_name is None or parent_name == parent.name:
result = True
return result
def _get_group_tree_for_datacenter(self, datacenter=None):
"""
Walk the tree of groups for a datacenter
:param datacenter: string - the datacenter to walk (ex: 'UC1')
:return: a dictionary of groups and parents
"""
self.root_group = self.clc.v2.Datacenter(
location=datacenter).RootGroup()
return self._walk_groups_recursive(
parent_group=None,
child_group=self.root_group)
def _walk_groups_recursive(self, parent_group, child_group):
"""
Walk a parent-child tree of groups, starting with the provided child group
:param parent_group: clc_sdk.Group - the parent group to start the walk
:param child_group: clc_sdk.Group - the child group to start the walk
:return: a dictionary of groups and parents
"""
result = {str(child_group): (child_group, parent_group)}
groups = child_group.Subgroups().groups
if len(groups) > 0:
for group in groups:
if group.type != 'default':
continue
result.update(self._walk_groups_recursive(child_group, group))
return result
def _wait_for_requests_to_complete(self, requests_lst):
"""
Waits until the CLC requests are complete if the wait argument is True
:param requests_lst: The list of CLC request objects
:return: none
"""
if not self.module.params['wait']:
return
for request in requests_lst:
request.WaitUntilComplete()
for request_details in request.requests:
if request_details.Status() != 'succeeded':
self.module.fail_json(
msg='Unable to process group request')
@staticmethod
def _set_user_agent(clc):
if hasattr(clc, 'SetRequestsSession'):
agent_string = "ClcAnsibleModule/" + __version__
ses = requests.Session()
ses.headers.update({"Api-Client": agent_string})
ses.headers['User-Agent'] += " " + agent_string
clc.SetRequestsSession(ses)
def main():
"""
The main function. Instantiates the module and calls process_request.
:return: none
"""
module = AnsibleModule(
argument_spec=ClcGroup._define_module_argument_spec(),
supports_check_mode=True)
clc_group = ClcGroup(module)
clc_group.process_request()
from ansible.module_utils.basic import * # pylint: disable=W0614
if __name__ == '__main__':
main()
| gpl-3.0 |
k3nnyfr/s2a_fr-nsis | s2a/Python/Lib/weakref.py | 187 | 10693 | """Weak reference support for Python.
This module is an implementation of PEP 205:
http://www.python.org/dev/peps/pep-0205/
"""
# Naming convention: Variables named "wr" are weak reference objects;
# they are called this instead of "ref" to avoid name collisions with
# the module-global ref() function imported from _weakref.
import UserDict
from _weakref import (
getweakrefcount,
getweakrefs,
ref,
proxy,
CallableProxyType,
ProxyType,
ReferenceType)
from _weakrefset import WeakSet
from exceptions import ReferenceError
ProxyTypes = (ProxyType, CallableProxyType)
__all__ = ["ref", "proxy", "getweakrefcount", "getweakrefs",
"WeakKeyDictionary", "ReferenceError", "ReferenceType", "ProxyType",
"CallableProxyType", "ProxyTypes", "WeakValueDictionary", 'WeakSet']
class WeakValueDictionary(UserDict.UserDict):
"""Mapping class that references values weakly.
Entries in the dictionary will be discarded when no strong
reference to the value exists anymore
"""
# We inherit the constructor without worrying about the input
# dictionary; since it uses our .update() method, we get the right
# checks (if the other dictionary is a WeakValueDictionary,
# objects are unwrapped on the way out, and we always wrap on the
# way in).
def __init__(self, *args, **kw):
def remove(wr, selfref=ref(self)):
self = selfref()
if self is not None:
del self.data[wr.key]
self._remove = remove
UserDict.UserDict.__init__(self, *args, **kw)
def __getitem__(self, key):
o = self.data[key]()
if o is None:
raise KeyError, key
else:
return o
def __contains__(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def has_key(self, key):
try:
o = self.data[key]()
except KeyError:
return False
return o is not None
def __repr__(self):
return "<WeakValueDictionary at %s>" % id(self)
def __setitem__(self, key, value):
self.data[key] = KeyedRef(value, self._remove, key)
def copy(self):
new = WeakValueDictionary()
for key, wr in self.data.items():
o = wr()
if o is not None:
new[key] = o
return new
__copy__ = copy
def __deepcopy__(self, memo):
from copy import deepcopy
new = self.__class__()
for key, wr in self.data.items():
o = wr()
if o is not None:
new[deepcopy(key, memo)] = o
return new
def get(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
return default
else:
o = wr()
if o is None:
# This should only happen
return default
else:
return o
def items(self):
L = []
for key, wr in self.data.items():
o = wr()
if o is not None:
L.append((key, o))
return L
def iteritems(self):
for wr in self.data.itervalues():
value = wr()
if value is not None:
yield wr.key, value
def iterkeys(self):
return self.data.iterkeys()
def __iter__(self):
return self.data.iterkeys()
def itervaluerefs(self):
"""Return an iterator that yields the weak references to the values.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the values around longer than needed.
"""
return self.data.itervalues()
def itervalues(self):
for wr in self.data.itervalues():
obj = wr()
if obj is not None:
yield obj
def popitem(self):
while 1:
key, wr = self.data.popitem()
o = wr()
if o is not None:
return key, o
def pop(self, key, *args):
try:
o = self.data.pop(key)()
except KeyError:
if args:
return args[0]
raise
if o is None:
raise KeyError, key
else:
return o
def setdefault(self, key, default=None):
try:
wr = self.data[key]
except KeyError:
self.data[key] = KeyedRef(default, self._remove, key)
return default
else:
return wr()
def update(self, dict=None, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, o in dict.items():
d[key] = KeyedRef(o, self._remove, key)
if len(kwargs):
self.update(kwargs)
def valuerefs(self):
"""Return a list of weak references to the values.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the values around longer than needed.
"""
return self.data.values()
def values(self):
L = []
for wr in self.data.values():
o = wr()
if o is not None:
L.append(o)
return L
class KeyedRef(ref):
"""Specialized reference that includes a key corresponding to the value.
This is used in the WeakValueDictionary to avoid having to create
a function object for each key stored in the mapping. A shared
callback object can use the 'key' attribute of a KeyedRef instead
of getting a reference to the key from an enclosing scope.
"""
__slots__ = "key",
def __new__(type, ob, callback, key):
self = ref.__new__(type, ob, callback)
self.key = key
return self
def __init__(self, ob, callback, key):
super(KeyedRef, self).__init__(ob, callback)
class WeakKeyDictionary(UserDict.UserDict):
""" Mapping class that references keys weakly.
Entries in the dictionary will be discarded when there is no
longer a strong reference to the key. This can be used to
associate additional data with an object owned by other parts of
an application without adding attributes to those objects. This
can be especially useful with objects that override attribute
accesses.
"""
def __init__(self, dict=None):
self.data = {}
def remove(k, selfref=ref(self)):
self = selfref()
if self is not None:
del self.data[k]
self._remove = remove
if dict is not None: self.update(dict)
def __delitem__(self, key):
del self.data[ref(key)]
def __getitem__(self, key):
return self.data[ref(key)]
def __repr__(self):
return "<WeakKeyDictionary at %s>" % id(self)
def __setitem__(self, key, value):
self.data[ref(key, self._remove)] = value
def copy(self):
new = WeakKeyDictionary()
for key, value in self.data.items():
o = key()
if o is not None:
new[o] = value
return new
__copy__ = copy
def __deepcopy__(self, memo):
from copy import deepcopy
new = self.__class__()
for key, value in self.data.items():
o = key()
if o is not None:
new[o] = deepcopy(value, memo)
return new
def get(self, key, default=None):
return self.data.get(ref(key),default)
def has_key(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def __contains__(self, key):
try:
wr = ref(key)
except TypeError:
return 0
return wr in self.data
def items(self):
L = []
for key, value in self.data.items():
o = key()
if o is not None:
L.append((o, value))
return L
def iteritems(self):
for wr, value in self.data.iteritems():
key = wr()
if key is not None:
yield key, value
def iterkeyrefs(self):
"""Return an iterator that yields the weak references to the keys.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the keys around longer than needed.
"""
return self.data.iterkeys()
def iterkeys(self):
for wr in self.data.iterkeys():
obj = wr()
if obj is not None:
yield obj
def __iter__(self):
return self.iterkeys()
def itervalues(self):
return self.data.itervalues()
def keyrefs(self):
"""Return a list of weak references to the keys.
The references are not guaranteed to be 'live' at the time
they are used, so the result of calling the references needs
to be checked before being used. This can be used to avoid
creating references that will cause the garbage collector to
keep the keys around longer than needed.
"""
return self.data.keys()
def keys(self):
L = []
for wr in self.data.keys():
o = wr()
if o is not None:
L.append(o)
return L
def popitem(self):
while 1:
key, value = self.data.popitem()
o = key()
if o is not None:
return o, value
def pop(self, key, *args):
return self.data.pop(ref(key), *args)
def setdefault(self, key, default=None):
return self.data.setdefault(ref(key, self._remove),default)
def update(self, dict=None, **kwargs):
d = self.data
if dict is not None:
if not hasattr(dict, "items"):
dict = type({})(dict)
for key, value in dict.items():
d[ref(key, self._remove)] = value
if len(kwargs):
self.update(kwargs)
| gpl-3.0 |
JeremyAgost/gemrb | gemrb/GUIScripts/iwd2/GUIMA.py | 5 | 11109 | # -*-python-*-
# GemRB - Infinity Engine Emulator
# Copyright (C) 2003-2004 The GemRB Project
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# GUIMA.py - scripts to control map windows from GUIMA and GUIWMAP winpacks
###################################################
import GemRB
import GUICommon
import GUICommonWindows
from GUIDefines import *
MapWindow = None
NoteWindow = None
WorldMapWindow = None
OptionsWindow = None
OldOptionsWindow = None
PortraitWindow = None
OldPortraitWindow = None
def RevealMap ():
global MapWindow
global OldPortraitWindow, OldOptionsWindow
if GUICommon.CloseOtherWindow (ShowMap):
if MapWindow:
MapWindow.Unload ()
if OptionsWindow:
OptionsWindow.Unload ()
if PortraitWindow:
PortraitWindow.Unload ()
MapWindow = None
#this window type should block the game
GemRB.SetVar ("OtherWindow", -1)
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE)
GemRB.UnhideGUI ()
GUICommonWindows.PortraitWindow = OldPortraitWindow
OldPortraitWindow = None
GUICommonWindows.OptionsWindow = OldOptionsWindow
OldOptionsWindow = None
PosX = GemRB.GetVar ("MapControlX")
PosY = GemRB.GetVar ("MapControlY")
GemRB.RevealArea (PosX, PosY, 30, 1)
GemRB.GamePause (0,0)
return
###################################################
# for farsight effect
###################################################
def ShowMap ():
global MapWindow, OptionsWindow, PortraitWindow
global OldPortraitWindow, OldOptionsWindow
if GUICommon.CloseOtherWindow (ShowMap):
if MapWindow:
MapWindow.Unload ()
if OptionsWindow:
OptionsWindow.Unload ()
if PortraitWindow:
PortraitWindow.Unload ()
MapWindow = None
#this window type should block the game
GemRB.SetVar ("OtherWindow", -1)
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE)
GemRB.UnhideGUI ()
GUICommonWindows.PortraitWindow = OldPortraitWindow
OldPortraitWindow = None
GUICommonWindows.OptionsWindow = OldOptionsWindow
OldOptionsWindow = None
return
GemRB.HideGUI ()
GUICommon.GameWindow.SetVisible(WINDOW_INVISIBLE)
GemRB.LoadWindowPack ("GUIMAP", 640, 480)
MapWindow = Window = GemRB.LoadWindow (2)
#this window type blocks the game normally, but map window doesn't
GemRB.SetVar ("OtherWindow", MapWindow.ID)
#saving the original portrait window
OldOptionsWindow = GUICommonWindows.OptionsWindow
OptionsWindow = GemRB.LoadWindow (0)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow, 0, ShowMap)
OldPortraitWindow = GUICommonWindows.PortraitWindow
PortraitWindow = GUICommonWindows.OpenPortraitWindow ()
OptionsWindow.SetFrame ()
# World Map
Button = Window.GetControl (1)
Button.SetState (IE_GUI_BUTTON_LOCKED)
# Hide or Show mapnotes
Button = Window.GetControl (3)
Button.SetState (IE_GUI_BUTTON_LOCKED)
Label = Window.GetControl (0x10000003)
Label.SetText ("")
# Map Control
Window.CreateMapControl (2, 0, 0, 0, 0, 0x10000003, "FLAG1")
Map = Window.GetControl (2)
GemRB.SetVar ("ShowMapNotes",IE_GUI_MAP_REVEAL_MAP)
Map.SetVarAssoc ("ShowMapNotes", IE_GUI_MAP_REVEAL_MAP)
Map.SetEvent (IE_GUI_MAP_ON_PRESS, RevealMap)
Window.SetVisible (WINDOW_VISIBLE)
OptionsWindow.SetVisible (WINDOW_GRAYED)
PortraitWindow.SetVisible (WINDOW_GRAYED)
OptionsWindow.SetVisible (WINDOW_FRONT)
PortraitWindow.SetVisible (WINDOW_FRONT)
Window.SetVisible (WINDOW_FRONT)
Map.SetStatus(IE_GUI_CONTROL_FOCUSED)
GemRB.GamePause (0,0)
return
###################################################
def OpenMapWindow ():
global MapWindow, OptionsWindow, PortraitWindow
global OldPortraitWindow, OldOptionsWindow
if GUICommon.CloseOtherWindow (OpenMapWindow):
if MapWindow:
MapWindow.Unload ()
if OptionsWindow:
OptionsWindow.Unload ()
if PortraitWindow:
PortraitWindow.Unload ()
MapWindow = None
GemRB.SetVar ("OtherWindow", -1)
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE)
GemRB.UnhideGUI ()
GUICommonWindows.PortraitWindow = OldPortraitWindow
OldPortraitWindow = None
GUICommonWindows.OptionsWindow = OldOptionsWindow
OldOptionsWindow = None
GUICommonWindows.SetSelectionChangeHandler (None)
return
GemRB.HideGUI ()
GUICommon.GameWindow.SetVisible(WINDOW_INVISIBLE)
GemRB.LoadWindowPack ("GUIMAP", 800, 600)
MapWindow = Window = GemRB.LoadWindow (2)
GemRB.SetVar ("OtherWindow", MapWindow.ID)
#saving the original portrait window
OldPortraitWindow = GUICommonWindows.PortraitWindow
PortraitWindow = GUICommonWindows.OpenPortraitWindow ()
OldOptionsWindow = GUICommonWindows.OptionsWindow
OptionsWindow = GemRB.LoadWindow (0)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow, 0, OpenMapWindow)
OptionsWindow.SetFrame ()
# World Map
Button = Window.GetControl (1)
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenWorldMapWindowInside)
# Hide or Show mapnotes
Button = Window.GetControl (3)
Button.SetFlags (IE_GUI_BUTTON_CHECKBOX, OP_OR)
# Is this an option?
GemRB.SetVar ("ShowMapNotes", IE_GUI_MAP_VIEW_NOTES)
Button.SetVarAssoc ("ShowMapNotes", IE_GUI_MAP_VIEW_NOTES)
Label = Window.GetControl (0x10000003)
Label.SetText ("")
# Map Control
Window.CreateMapControl (2, 0, 0, 0, 0, 0x10000003, "FLAG1")
Map = Window.GetControl (2)
Map.SetVarAssoc ("ShowMapNotes", IE_GUI_MAP_VIEW_NOTES)
Map.SetEvent (IE_GUI_MAP_ON_RIGHT_PRESS, AddNoteWindow)
Map.SetEvent (IE_GUI_MAP_ON_DOUBLE_PRESS, LeftDoublePressMap)
OptionsWindow.SetVisible(WINDOW_VISIBLE)
PortraitWindow.SetVisible(WINDOW_VISIBLE)
Window.SetVisible(WINDOW_VISIBLE)
GUICommonWindows.SetSelectionChangeHandler(None)
def LeftDoublePressMap ():
OpenMapWindow()
return
def CloseNoteWindow ():
if NoteWindow:
NoteWindow.Unload ()
MapWindow.SetVisible (WINDOW_VISIBLE)
return
def RemoveMapNote ():
PosX = GemRB.GetVar ("MapControlX")
PosY = GemRB.GetVar ("MapControlY")
GemRB.SetMapnote (PosX, PosY, 0, "")
CloseNoteWindow ()
return
def QueryText ():
Data = ""
row = 0
while 1:
GemRB.SetVar ("row", row)
NoteLabel.SetVarAssoc ("row", row)
line = NoteLabel.QueryText ()
if len(line)<=0:
break
Data += line+"\n"
row += 1
return Data
def SetMapNote ():
PosX = GemRB.GetVar ("MapControlX")
PosY = GemRB.GetVar ("MapControlY")
Label = NoteWindow.GetControl (1)
Text = QueryText ()
Color = GemRB.GetVar ("Color")
GemRB.SetMapnote (PosX, PosY, Color, Text)
CloseNoteWindow ()
return
def SetFocusBack ():
NoteLabel.SetStatus (IE_GUI_CONTROL_FOCUSED)
return
def AddNoteWindow ():
global NoteWindow, NoteLabel
Label = MapWindow.GetControl (0x10000003)
Text = Label.QueryText ()
NoteWindow = GemRB.LoadWindow (5)
#convert to multiline, destroy unwanted resources
NoteLabel = NoteWindow.GetControl (1)
#0 is the default Scrollbar ID
NoteLabel = NoteLabel.ConvertEdit (0)
NoteLabel.SetText (Text)
NoteLabel.SetStatus (IE_GUI_CONTROL_FOCUSED)
print "Just set this:", NoteLabel.QueryText()
for i in range(8):
Label = NoteWindow.GetControl (4+i)
Label.SetSprites ("FLAG1", i,0,1,2,0)
Label.SetFlags (IE_GUI_BUTTON_RADIOBUTTON, OP_SET)
Label.SetVarAssoc ("Color", i)
Label.SetEvent (IE_GUI_BUTTON_ON_PRESS, SetFocusBack)
#set
Label = NoteWindow.GetControl (0)
Label.SetEvent (IE_GUI_BUTTON_ON_PRESS, SetMapNote)
Label.SetText (11973)
Label.SetFlags (IE_GUI_BUTTON_DEFAULT, OP_OR)
#cancel
Label = NoteWindow.GetControl (2)
Label.SetEvent (IE_GUI_BUTTON_ON_PRESS, CloseNoteWindow)
Label.SetText (13727)
Label.SetFlags (IE_GUI_BUTTON_CANCEL, OP_OR)
#remove
Label = NoteWindow.GetControl (3)
Label.SetEvent (IE_GUI_BUTTON_ON_PRESS, RemoveMapNote)
Label.SetText (13957)
MapWindow.SetVisible (WINDOW_GRAYED)
NoteWindow.SetVisible (WINDOW_VISIBLE)
return
def OpenWorldMapWindowInside ():
global MapWindow
OpenMapWindow () #closes mapwindow
MapWindow = -1
print "MapWindow=",MapWindow
WorldMapWindowCommon (-1)
return
def OpenWorldMapWindow ():
WorldMapWindowCommon (GemRB.GetVar ("Travel"))
return
def MoveToNewArea ():
global WorldMapWindow, WorldMapControl
tmp = WorldMapControl.GetDestinationArea (1)
if tmp["Distance"]==-1:
print "Invalid target", tmp
return
CloseWorldMapWindow ()
GemRB.CreateMovement (tmp["Destination"], tmp["Entrance"], tmp["Direction"])
return
def ChangeTooltip ():
global WorldMapWindow, WorldMapControl
global str
tmp = WorldMapControl.GetDestinationArea ()
if (tmp):
str = "%s: %d"%(GemRB.GetString(23084),tmp["Distance"])
else:
str=""
WorldMapControl.SetTooltip (str)
return
def CloseWorldMapWindow ():
global WorldMapWindow, WorldMapControl
global OldPortraitWindow, OldOptionsWindow
assert GUICommon.CloseOtherWindow (CloseWorldMapWindow)
if WorldMapWindow:
WorldMapWindow.Unload ()
if PortraitWindow:
PortraitWindow.Unload ()
if OptionsWindow:
OptionsWindow.Unload ()
WorldMapWindow = None
WorldMapControl = None
GUICommonWindows.PortraitWindow = OldPortraitWindow
OldPortraitWindow = None
GUICommonWindows.OptionsWindow = OldOptionsWindow
OldOptionsWindow = None
GemRB.SetVar ("OtherWindow", -1)
GUICommon.GameWindow.SetVisible(WINDOW_VISIBLE)
GemRB.UnhideGUI ()
return
def WorldMapWindowCommon (Travel):
global WorldMapWindow, WorldMapControl
global OptionsWindow, PortraitWindow
global OldPortraitWindow, OldOptionsWindow
if GUICommon.CloseOtherWindow (CloseWorldMapWindow):
return
GemRB.HideGUI ()
GUICommon.GameWindow.SetVisible(WINDOW_INVISIBLE)
GemRB.LoadWindowPack ("GUIWMAP",800, 600)
WorldMapWindow = Window = GemRB.LoadWindow (2)
#(fuzzie just copied this from the map window code..)
GemRB.SetVar ("OtherWindow", WorldMapWindow.ID)
#saving the original portrait window
OldPortraitWindow = GUICommonWindows.PortraitWindow
PortraitWindow = GUICommonWindows.OpenPortraitWindow ()
OldOptionsWindow = GUICommonWindows.OptionsWindow
OptionsWindow = GemRB.LoadWindow (0)
GUICommonWindows.SetupMenuWindowControls (OptionsWindow, 0, OpenMapWindow)
OptionsWindow.SetFrame ()
Window.CreateWorldMapControl (4, 0, 62, 640, 418, Travel, "infofont")
WorldMapControl = Window.GetControl (4)
WorldMapControl.SetAnimation ("WMDAG")
WorldMapControl.SetEvent (IE_GUI_WORLDMAP_ON_PRESS, MoveToNewArea)
WorldMapControl.SetEvent (IE_GUI_MOUSE_ENTER_WORLDMAP, ChangeTooltip)
# Done
Button = Window.GetControl (0)
if Travel>=0:
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenWorldMapWindow)
else:
Button.SetEvent (IE_GUI_BUTTON_ON_PRESS, OpenMapWindow)
Window.SetVisible (WINDOW_VISIBLE)
###################################################
# End of file GUIMA.py
| gpl-2.0 |
falkerson/fuel-plugin-service | config.py | 1 | 1445 | # Server Specific Configurations
server = {
'port': '8080',
'host': '0.0.0.0'
}
# Pecan Application Configurations
app = {
'root': 'fuel_plugin_service.controllers.root.RootController',
'modules': ['fuel_plugin_service'],
'template_path': '%(confdir)s/fuel_plugin_service/templates',
'debug': True,
'errors': {
404: '/error/404',
'__force_dict__': True
}
}
logging = {
'root': {'level': 'INFO', 'handlers': ['console']},
'loggers': {
'fuel_plugin_service': {'level': 'DEBUG', 'handlers': ['console']},
'pecan': {'level': 'DEBUG', 'handlers': ['console']},
'py.warnings': {'handlers': ['console']},
'__force_dict__': True
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'color'
}
},
'formatters': {
'simple': {
'format': ('%(asctime)s %(levelname)-5.5s [%(name)s]'
'[%(threadName)s] %(message)s')
},
'color': {
'()': 'pecan.log.ColorFormatter',
'format': ('%(asctime)s [%(padded_color_levelname)s] [%(name)s]'
'[%(threadName)s] %(message)s'),
'__force_dict__': True
}
}
}
# Custom Configurations must be in Python dictionary format::
#
# foo = {'bar':'baz'}
#
# All configurations are accessible at::
# pecan.conf
| apache-2.0 |
frmichel/vo-support-tools | CE/monitor-ce/processors/running_ratio_day_night.py | 1 | 4813 | #!/usr/bin/python
#
# This tools exploits the data of csv files produced by script collect-ce-job-status.py, to
# compute the average ratio R/(R+W) during day time (12h, 16h, 20h) or night time (0h, 4h, 8h),
# as a function of time.
#
# Results are stored in file running_ratio_day_night.csv.
import os
import csv
import globvars
# -------------------------------------------------------------------------
# Compute the mean ratio R/(R+W) during day (12h, 16h, 20h) or night (0h, 4h, 8h)
# Input:
# dataFiles: list of tuples: (fileName, datetime, date, hour, rows, sum_VO_Waiting, sum_VO_Running)
# where
# - datetime is formated as "YYYY-MM-DD HH:MM:SS"
# - date is only the date part YYYY:MM:DD, and hour is only the hour HH (used for filtering data in excel file)
# - rows is a dictionnary wich keys are the hostnames and values are another dictionnary with the following keys:
# 'Site'
# 'ImplName', 'ImplVer'
# 'CE_Total', 'VO_Total'
# 'CE_Running', 'VO_Running'
# 'CE_Waiting', 'VO_Waiting'
# 'CE_Running', 'VO_Running'
# 'CE_FreeSlots', 'VO_FreeSlots'
# 'CE_MaxTotal', 'VO_MaxTotal'
# 'CE_MaxWaiting', 'VO_MaxWaiting'
# 'CE_MaxRunning', 'VO_MaxRunning'
# 'CE_WRT', 'VO_WRT'
# 'CE_MaxTotal', 'VO_MaxTotal'
# 'CE_ERT', 'VO_ERT'
# 'CE_Status'
# -------------------------------------------------------------------------
def process(dataFiles):
# Global variables
DECIMAL_MARK = globvars.DECIMAL_MARK
DEBUG = globvars.DEBUG
OUTPUT_DIR = globvars.OUTPUT_DIR
print "Computing the mean ratio R/(R+W) grouped by day or night as a function of time..."
outputFile = OUTPUT_DIR + os.sep + "running_ratio_day_night.csv"
outputf = open(outputFile, 'wb')
writer = csv.writer(outputf, delimiter=';')
writer.writerow(["# Date", "Wait 0h", "Run 0h", "R/(R+W) 0h", "Wait 4h", "Run 4h", "R/(R+W) 4h", "Wait 8h", "Run 8h", "R/(R+W) 8h", "Wait 12h", "Run 12h", "R/(R+W) 12h", "Wait 16h", "Run 16h", "R/(R+W) 16h", "Wait 20h", "Run 20h", "R/(R+W) 20h", "Mean Wait night", "Mean Run night", "Mean R/(R+W) night", "Mean Wait day", "Mean Run day", "Mean R/(R+W) day"])
# First, build the list of dates when we have data
listDates = []
for (fileName, datetime, date, hour, rows, sum_VO_Waiting, sum_VO_Running) in dataFiles:
if date not in listDates: listDates.append(date)
# Then for each of these dates, collect data at 0h, 4h, 8h, 12h, 16h and 20h
for theDate in listDates:
W0 = W4 = W8 = W12 = W16 = W20 = 0.0
R0 = R4 = R8 = R12 = R16 = R20 = 0.0
ratio0 = ratio4 = ratio8 = ratio12 = ratio16 = ratio20 = 0.0
# Loop on all files that we have at the given date
for (fileName, datetime, date, hour, rows, sum_VO_Waiting, sum_VO_Running) in dataFiles:
if date == theDate:
R = float(sum_VO_Running)
W = float(sum_VO_Waiting)
if hour == '00':
W0 = W
R0 = R
if R+W > 0: ratio0 = R/(R+W)
elif hour == '04':
W4 = W
R4 = R
if R+W > 0: ratio4 = R/(R+W)
elif hour == '08':
W8 = W
R8 = R
if R+W > 0: ratio8 = R/(R+W)
elif hour == '12':
W12 = W
R12 = R
if R+W > 0: ratio12 = R/(R+W)
elif hour == '16':
W16 = W
R16 = R
if R+W > 0: ratio16 = R/(R+W)
elif hour == '20':
W20 = W
R20 = R
if R+W > 0: ratio20 = R/(R+W)
# end loop on all files looking for those at the given date
ratioNight = ratioDay = 0.0
if (W0+W4+W8+R0+R4+R8) > 0: ratioNight = (R0+R4+R8)/(W0+W4+W8+R0+R4+R8)
if (W12+W16+W20+R12+R16+R20) > 0: ratioDay = (R12+R16+R20)/(W12+W16+W20+R12+R16+R20)
writer.writerow([theDate,
int(W0), int(R0), str(round(ratio0, 4)).replace('.', DECIMAL_MARK),
int(W4), int(R4), str(round(ratio4, 4)).replace('.', DECIMAL_MARK),
int(W8), int(R8), str(round(ratio8, 4)).replace('.', DECIMAL_MARK),
int(W12), int(R12), str(round(ratio12, 4)).replace('.', DECIMAL_MARK),
int(W16), int(R16), str(round(ratio16, 4)).replace('.', DECIMAL_MARK),
int(W20), int(R20), str(round(ratio20, 4)).replace('.', DECIMAL_MARK),
int((W0+W4+W8)/3), int((R0+R4+R8)/3),str(round(ratioNight, 4)).replace('.', DECIMAL_MARK),
int((W12+W16+W20)/3), int((R12+R16+R20)/3),str(round(ratioDay, 4)).replace('.', DECIMAL_MARK)
])
# end loop on all single dates
outputf.close()
| mit |
laurb9/rich-traceback | test/formatter_test.py | 1 | 1347 | #
# Copyright (C)2014 Laurentiu Badea
#
"""
Test RichTracebackFormatter class.
"""
import unittest
import logging
import rich_traceback.formatter
import StringIO
class LogTest(unittest.TestCase):
def testLogException(self):
output = StringIO.StringIO()
self.logger = logging.getLogger('root')
self.logger.setLevel(logging.DEBUG)
con_log = logging.StreamHandler(output)
con_log.setLevel(logging.DEBUG)
con_log.setFormatter(rich_traceback.formatter.RichTracebackFormatter())
self.logger.addHandler(con_log)
def f(a, b):
self.logger.info('executing a/b')
return a/b
self.logger.debug('calling f')
try:
f(5, 0)
except:
self.logger.exception("While trying to frobnicate")
self.assertEqual(output.getvalue(), """\
DEBUG root formatter_test.testLogException:28 calling f
INFO root formatter_test.f:26 executing a/b
ERROR root formatter_test.testLogException:32 While trying to frobnicate
<type 'exceptions.ZeroDivisionError'>: ZeroDivisionError('integer division or modulo by zero',) ([2] frames following)
[1] formatter_test, f(a=5, b=0) at line 27: return a/b
[0] formatter_test, testLogException(self=<formatter_test.LogTest testMethod=testLogException>) at line 30: f(5, 0)
""")
| apache-2.0 |
johnsonlau/multivimdriver-vmware-vio | vio/vio/swagger/views/image/views.py | 1 | 5495 | # Copyright (c) 2017 VMware, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
import json
from rest_framework import status
from rest_framework.response import Response
from rest_framework.views import APIView
from vio.pub.msapi import extsys
from vio.pub.vim.vimapi.glance import OperateImage
from vio.swagger import image_utils
from vio.pub.exceptions import VimDriverVioException
class GetDeleteImageView(APIView):
def get(self, request, vimid, tenantid, imageid):
try:
vim_info = extsys.get_vim_by_id(vimid)
vim_info['tenant'] = tenantid
except VimDriverVioException as e:
return Response(data={'error': str(e)}, status=e.status_code)
image_op = OperateImage.OperateImage(vim_info)
try:
image = image_op.get_vim_image(imageid)
vim_rsp = image_utils.vim_formatter(vim_info, tenantid)
rsp = image_utils.image_formatter(image)
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
except Exception as e:
if hasattr(e, "http_status"):
return Response(data={'error': str(e)}, status=e.http_status)
else:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def delete(self, request, vimid, tenantid, imageid):
try:
vim_info = extsys.get_vim_by_id(vimid)
vim_info['tenant'] = tenantid
except VimDriverVioException as e:
return Response(data={'error': str(e)}, status=e.status_code)
image_op = OperateImage.OperateImage(vim_info)
try:
image_op.delete_vim_image(imageid)
return Response(status=status.HTTP_204_NO_CONTENT)
except Exception as e:
if hasattr(e, "http_status"):
return Response(data={'error': str(e)}, status=e.http_status)
else:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
class CreateListImagesView(APIView):
def get(self, request, vimid, tenantid):
try:
vim_info = extsys.get_vim_by_id(vimid)
vim_info['tenant'] = tenantid
except VimDriverVioException as e:
return Response(data={'error': str(e)}, status=e.status_code)
query_data = dict(request.query_params)
image_instance = OperateImage.OperateImage(vim_info)
try:
images = image_instance.get_vim_images(**query_data)
rsp = {}
rsp['images'] = []
vim_rsp = image_utils.vim_formatter(vim_info, tenantid)
for image in images:
rsp['images'].append(image_utils.image_formatter(image))
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
except Exception as e:
if hasattr(e, "http_status"):
return Response(data={'error': str(e)}, status=e.http_status)
else:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
def post(self, request, vimid, tenantid):
try:
vim_info = extsys.get_vim_by_id(vimid)
vim_info['tenant'] = tenantid
except VimDriverVioException as e:
return Response(data={'error': str(e)}, status=e.status_code)
try:
req_body = json.loads(request.body)
except Exception as e:
return Response(data={'error': 'Fail to decode request body.'},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
vim_rsp = image_utils.vim_formatter(vim_info, tenantid)
image_instance = OperateImage.OperateImage(vim_info)
try:
images = image_instance.get_vim_images()
for image in images:
if image.name == req_body.get('name'):
image_info = image_instance.get_vim_image(image.id)
rsp = image_utils.image_formatter(image_info)
rsp['returnCode'] = '0'
rsp.update(vim_rsp)
return Response(data=rsp, status=status.HTTP_200_OK)
param = image_utils.req_body_formatter(req_body)
image = image_instance.create_vim_image(vimid, tenantid,
imagePath=req_body.get('imagePath'),
**param)
rsp = image_utils.image_formatter(image)
rsp.update(vim_rsp)
rsp['returnCode'] = '1'
return Response(data=rsp, status=status.HTTP_201_CREATED)
except Exception as e:
if hasattr(e, "http_status"):
return Response(data={'error': str(e)}, status=e.http_status)
else:
return Response(data={'error': str(e)},
status=status.HTTP_500_INTERNAL_SERVER_ERROR)
| apache-2.0 |
mancoast/CPythonPyc_test | fail/313_test_pep352.py | 4 | 7156 | import unittest
import builtins
import warnings
from test.support import run_unittest
import os
from platform import system as platform_system
class ExceptionClassTests(unittest.TestCase):
"""Tests for anything relating to exception objects themselves (e.g.,
inheritance hierarchy)"""
def test_builtins_new_style(self):
self.assertTrue(issubclass(Exception, object))
def verify_instance_interface(self, ins):
for attr in ("args", "__str__", "__repr__"):
self.assertTrue(hasattr(ins, attr),
"%s missing %s attribute" %
(ins.__class__.__name__, attr))
def test_inheritance(self):
# Make sure the inheritance hierarchy matches the documentation
exc_set = set()
for object_ in builtins.__dict__.values():
try:
if issubclass(object_, BaseException):
exc_set.add(object_.__name__)
except TypeError:
pass
inheritance_tree = open(os.path.join(os.path.split(__file__)[0],
'exception_hierarchy.txt'))
try:
superclass_name = inheritance_tree.readline().rstrip()
try:
last_exc = getattr(builtins, superclass_name)
except AttributeError:
self.fail("base class %s not a built-in" % superclass_name)
self.assertTrue(superclass_name in exc_set,
'%s not found' % superclass_name)
exc_set.discard(superclass_name)
superclasses = [] # Loop will insert base exception
last_depth = 0
for exc_line in inheritance_tree:
exc_line = exc_line.rstrip()
depth = exc_line.rindex('-')
exc_name = exc_line[depth+2:] # Slice past space
if '(' in exc_name:
paren_index = exc_name.index('(')
platform_name = exc_name[paren_index+1:-1]
exc_name = exc_name[:paren_index-1] # Slice off space
if platform_system() != platform_name:
exc_set.discard(exc_name)
continue
if '[' in exc_name:
left_bracket = exc_name.index('[')
exc_name = exc_name[:left_bracket-1] # cover space
try:
exc = getattr(builtins, exc_name)
except AttributeError:
self.fail("%s not a built-in exception" % exc_name)
if last_depth < depth:
superclasses.append((last_depth, last_exc))
elif last_depth > depth:
while superclasses[-1][0] >= depth:
superclasses.pop()
self.assertTrue(issubclass(exc, superclasses[-1][1]),
"%s is not a subclass of %s" % (exc.__name__,
superclasses[-1][1].__name__))
try: # Some exceptions require arguments; just skip them
self.verify_instance_interface(exc())
except TypeError:
pass
self.assertTrue(exc_name in exc_set)
exc_set.discard(exc_name)
last_exc = exc
last_depth = depth
finally:
inheritance_tree.close()
self.assertEqual(len(exc_set), 0, "%s not accounted for" % exc_set)
interface_tests = ("length", "args", "str", "repr")
def interface_test_driver(self, results):
for test_name, (given, expected) in zip(self.interface_tests, results):
self.assertEqual(given, expected, "%s: %s != %s" % (test_name,
given, expected))
def test_interface_single_arg(self):
# Make sure interface works properly when given a single argument
arg = "spam"
exc = Exception(arg)
results = ([len(exc.args), 1], [exc.args[0], arg],
[str(exc), str(arg)],
[repr(exc), exc.__class__.__name__ + repr(exc.args)])
self.interface_test_driver(results)
def test_interface_multi_arg(self):
# Make sure interface correct when multiple arguments given
arg_count = 3
args = tuple(range(arg_count))
exc = Exception(*args)
results = ([len(exc.args), arg_count], [exc.args, args],
[str(exc), str(args)],
[repr(exc), exc.__class__.__name__ + repr(exc.args)])
self.interface_test_driver(results)
def test_interface_no_arg(self):
# Make sure that with no args that interface is correct
exc = Exception()
results = ([len(exc.args), 0], [exc.args, tuple()],
[str(exc), ''],
[repr(exc), exc.__class__.__name__ + '()'])
self.interface_test_driver(results)
class UsageTests(unittest.TestCase):
"""Test usage of exceptions"""
def raise_fails(self, object_):
"""Make sure that raising 'object_' triggers a TypeError."""
try:
raise object_
except TypeError:
return # What is expected.
self.fail("TypeError expected for raising %s" % type(object_))
def catch_fails(self, object_):
"""Catching 'object_' should raise a TypeError."""
try:
try:
raise Exception
except object_:
pass
except TypeError:
pass
except Exception:
self.fail("TypeError expected when catching %s" % type(object_))
try:
try:
raise Exception
except (object_,):
pass
except TypeError:
return
except Exception:
self.fail("TypeError expected when catching %s as specified in a "
"tuple" % type(object_))
def test_raise_new_style_non_exception(self):
# You cannot raise a new-style class that does not inherit from
# BaseException; the ability was not possible until BaseException's
# introduction so no need to support new-style objects that do not
# inherit from it.
class NewStyleClass(object):
pass
self.raise_fails(NewStyleClass)
self.raise_fails(NewStyleClass())
def test_raise_string(self):
# Raising a string raises TypeError.
self.raise_fails("spam")
def test_catch_non_BaseException(self):
# Tryinng to catch an object that does not inherit from BaseException
# is not allowed.
class NonBaseException(object):
pass
self.catch_fails(NonBaseException)
self.catch_fails(NonBaseException())
def test_catch_BaseException_instance(self):
# Catching an instance of a BaseException subclass won't work.
self.catch_fails(BaseException())
def test_catch_string(self):
# Catching a string is bad.
self.catch_fails("spam")
def test_main():
run_unittest(ExceptionClassTests, UsageTests)
if __name__ == '__main__':
test_main()
| gpl-3.0 |
stewgeo/fmepedia-data-upload-validation-display | pythonLibraries/setuptools-0.6c11/build/lib/site.py | 108 | 2362 | def __boot():
import sys, imp, os, os.path
PYTHONPATH = os.environ.get('PYTHONPATH')
if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
PYTHONPATH = []
else:
PYTHONPATH = PYTHONPATH.split(os.pathsep)
pic = getattr(sys,'path_importer_cache',{})
stdpath = sys.path[len(PYTHONPATH):]
mydir = os.path.dirname(__file__)
#print "searching",stdpath,sys.path
for item in stdpath:
if item==mydir or not item:
continue # skip if current dir. on Windows, or my own directory
importer = pic.get(item)
if importer is not None:
loader = importer.find_module('site')
if loader is not None:
# This should actually reload the current module
loader.load_module('site')
break
else:
try:
stream, path, descr = imp.find_module('site',[item])
except ImportError:
continue
if stream is None:
continue
try:
# This should actually reload the current module
imp.load_module('site',stream,path,descr)
finally:
stream.close()
break
else:
raise ImportError("Couldn't find the real 'site' module")
#print "loaded", __file__
known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
oldpos = getattr(sys,'__egginsert',0) # save old insertion position
sys.__egginsert = 0 # and reset the current one
for item in PYTHONPATH:
addsitedir(item)
sys.__egginsert += oldpos # restore effective old position
d,nd = makepath(stdpath[0])
insert_at = None
new_path = []
for item in sys.path:
p,np = makepath(item)
if np==nd and insert_at is None:
# We've hit the first 'system' path entry, so added entries go here
insert_at = len(new_path)
if np in known_paths or insert_at is None:
new_path.append(item)
else:
# new path after the insert point, back-insert it
new_path.insert(insert_at, item)
insert_at += 1
sys.path[:] = new_path
if __name__=='site':
__boot()
del __boot
| mit |
Big-B702/python-for-android | python-build/python-libs/xmpppy/doc/examples/logger.py | 87 | 2608 | #!/usr/bin/python
# -*- coding: koi8-r -*-
from xmpp import *
import time,os
#BOT=(botjid,password)
BOT=('test@penza-gsm.ru','test')
#CONF=(confjid,password)
CONF=('talks@conference.jabber.ru','')
LOGDIR='./'
PROXY={}
#PROXY={'host':'192.168.0.1','port':3128,'username':'luchs','password':'secret'}
#######################################
def LOG(stanza,nick,text):
ts=stanza.getTimestamp()
if not ts:
ts=stanza.setTimestamp()
ts=stanza.getTimestamp()
tp=time.mktime(time.strptime(ts,'%Y%m%dT%H:%M:%S %Z'))+3600*3
if time.localtime()[-1]: tp+=3600
tp=time.localtime(tp)
fold=stanza.getFrom().getStripped().replace('@','%')+'_'+time.strftime("%Y.%m",tp)
day=time.strftime("%d",tp)
tm=time.strftime("%H:%M:%S",tp)
try: os.mkdir(LOGDIR+fold)
except: pass
fName='%s%s/%s.%s.html'%(LOGDIR,fold,fold,day)
try: open(fName)
except:
open(fName,'w').write("""<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html xml:lang="ru-RU" lang="ru-RU" xmlns="http://www.w3.org/1999/xhtml">
<head>
<meta content="text/html; charset=utf-8" http-equiv="content-type" />
<title>%s logs for %s.%s.</title>
</head>
<body>
<table border="1"><tr><th>time</th><th>who</th><th>text</th></tr>
"""%(CONF[0],fold,day))
text='<pre>%s</pre>'%text
open(fName,'a').write((u"<tr><td>%s</td><td>%s</td><td>%s</td></tr>\n"%(tm,nick,text)).encode('utf-8'))
print (u"<tr><td>%s</td><td>%s</td><td>%s</td></tr>\n"%(tm,nick,text)).encode('koi8-r','replace')
# print time.localtime(tp),nick,text
def messageCB(sess,mess):
nick=mess.getFrom().getResource()
text=mess.getBody()
LOG(mess,nick,text)
roster=[]
def presenceCB(sess,pres):
nick=pres.getFrom().getResource()
text=''
if pres.getType()=='unavailable':
if nick in roster:
text=nick+unicode(' ÐÏËÉÎÕÌ ËÏÎÆÅÒÅÎÃÉÀ','koi8-r')
roster.remove(nick)
else:
if nick not in roster:
text=nick+unicode(' ÐÒÉÛ£Ì × ËÏÎÆÅÒÅÎÃÉÀ','koi8-r')
roster.append(nick)
if text: LOG(pres,nick,text)
if 1:
cl=Client(JID(BOT[0]).getDomain(),debug=[])
cl.connect(proxy=PROXY)
cl.RegisterHandler('message',messageCB)
cl.RegisterHandler('presence',presenceCB)
cl.auth(JID(BOT[0]).getNode(),BOT[1])
p=Presence(to='%s/logger'%CONF[0])
p.setTag('x',namespace=NS_MUC).setTagData('password',CONF[1])
p.getTag('x').addChild('history',{'maxchars':'0','maxstanzas':'0'})
cl.send(p)
while 1:
cl.Process(1)
| apache-2.0 |
isandlaTech/cohorte-devtools | org.cohorte.eclipse.runner.basic/files/test/pelix/shell/remote.py | 3 | 15312 | #!/usr/bin/env python
# -- Content-Encoding: UTF-8 --
"""
iPOPO remote shell
Provides a remote interface for the Pelix shell that can be accessed using
telnet or netcat.
:author: Thomas Calmant
:copyright: Copyright 2016, Thomas Calmant
:license: Apache License 2.0
:version: 0.6.4
..
Copyright 2016 Thomas Calmant
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
# Standard library
from select import select
import logging
import threading
import socket
import sys
try:
# Python 3
# pylint: disable=F0401
import socketserver
except ImportError:
# Python 2
# pylint: disable=F0401
import SocketServer as socketserver
# iPOPO decorators
from pelix.ipopo.decorators import ComponentFactory, Requires, Property, \
Validate, Invalidate, Provides
# Shell constants
import pelix.shell
import pelix.shell.beans as beans
import pelix.ipv6utils
# ------------------------------------------------------------------------------
# Module version
__version_info__ = (0, 6, 4)
__version__ = ".".join(str(x) for x in __version_info__)
# Documentation strings format
__docformat__ = "restructuredtext en"
# ------------------------------------------------------------------------------
_logger = logging.getLogger(__name__)
# ------------------------------------------------------------------------------
class SharedBoolean(object):
"""
Shared boolean between objects / threads
"""
def __init__(self, value=False):
"""
Set up members
"""
self._lock = threading.Lock()
self._value = value
def get_value(self):
"""
Retrieves the boolean value
"""
with self._lock:
return self._value
def set_value(self, value):
"""
Sets the boolean value
"""
with self._lock:
self._value = value
# ------------------------------------------------------------------------------
class RemoteConsole(socketserver.StreamRequestHandler):
"""
Handles incoming connections and redirect network stream to the Pelix shell
"""
def __init__(self, shell_svc, active_flag, *args):
"""
Sets up members
:param shell_svc: The underlying Pelix shell service
:param active_flag: Common flag for stopping the client communication
"""
self._shell = shell_svc
self._active = active_flag
socketserver.StreamRequestHandler.__init__(self, *args)
def send(self, data):
"""
Tries to send data to the client.
:param data: Data to be sent
:return: True if the data was sent, False on error
"""
if data is not None:
data = data.encode("UTF-8")
try:
self.wfile.write(data)
self.wfile.flush()
return True
except IOError:
# An error occurred, mask it
# -> This allows to handle the command even if the client has been
# disconnect (i.e. "echo stop 0 | nc localhost 9000")
return False
def handle(self):
"""
Handles a TCP client
"""
_logger.info("RemoteConsole client connected: [%s]:%d",
self.client_address[0], self.client_address[1])
# Prepare the session
session = beans.ShellSession(
beans.IOHandler(self.rfile, self.wfile),
{"remote_client_ip": self.client_address[0]})
# Print the banner
ps1 = self._shell.get_ps1()
self.send(self._shell.get_banner())
self.send(ps1)
try:
while self._active.get_value():
# Wait for data
rlist = select([self.connection], [], [], .5)[0]
if not rlist:
# Nothing to do (poll timed out)
continue
data = self.rfile.readline()
if not data:
# End of stream (client gone)
break
# Strip the line
line = data.strip()
if not data:
# Empty line
continue
# Execute it
try:
self._shell.handle_line(line, session)
except KeyboardInterrupt:
# Stop there on interruption
self.send("\nInterruption received.")
return
except IOError as ex:
# I/O errors are fatal
_logger.exception(
"Error communicating with a client: %s", ex)
break
except Exception as ex:
# Other exceptions are not important
import traceback
self.send("\nError during last command: {0}\n".format(ex))
self.send(traceback.format_exc())
# Print the prompt
self.send(ps1)
finally:
_logger.info("RemoteConsole client gone: [%s]:%d",
self.client_address[0], self.client_address[1])
# Be polite
self.send("\nSession closed. Good bye.\n")
self.finish()
# ------------------------------------------------------------------------------
class ThreadingTCPServerFamily(socketserver.ThreadingTCPServer):
"""
Threaded TCP Server handling different address families
"""
def __init__(self, server_address, request_handler_class):
"""
Sets up the TCP server. Doesn't bind nor activate it.
"""
# Determine the address family
addr_info = socket.getaddrinfo(server_address[0], server_address[1],
0, 0, socket.SOL_TCP)
# Change the address family before the socket is created
# Get the family of the first possibility
self.address_family = addr_info[0][0]
# Call the super constructor
socketserver.ThreadingTCPServer.__init__(self, server_address,
request_handler_class,
False)
if self.address_family == socket.AF_INET6:
# Explicitly ask to be accessible both by IPv4 and IPv6
try:
pelix.ipv6utils.set_double_stack(self.socket)
except AttributeError as ex:
_logger.exception("System misses IPv6 constant: %s", ex)
except socket.error as ex:
_logger.exception("Error setting up IPv6 double stack: %s", ex)
def process_request(self, request, client_address):
"""
Starts a new thread to process the request, adding the client address
in its name.
"""
thread = threading.Thread(
name="RemoteShell-{0}-Client-{1}".format(self.server_address[1],
client_address[:2]),
target=self.process_request_thread,
args=(request, client_address))
thread.daemon = self.daemon_threads
thread.start()
def _create_server(shell, server_address, port):
"""
Creates the TCP console on the given address and port
:param shell: The remote shell handler
:param server_address: Server bound address
:param port: Server port
:return: server thread, TCP server object
"""
# Set up the request handler creator
active_flag = SharedBoolean(True)
def request_handler(*rh_args):
"""
Constructs a RemoteConsole as TCP request handler
"""
return RemoteConsole(shell, active_flag, *rh_args)
# Set up the server
server = ThreadingTCPServerFamily((server_address, port), request_handler)
# Set flags
server.daemon_threads = True
server.allow_reuse_address = True
# Activate the server
server.server_bind()
server.server_activate()
# Serve clients
server_thread = threading.Thread(target=server.serve_forever,
name="RemoteShell-{0}".format(port))
server_thread.daemon = True
server_thread.start()
return server_thread, server, active_flag
# ------------------------------------------------------------------------------
@ComponentFactory(pelix.shell.FACTORY_REMOTE_SHELL)
@Provides(pelix.shell.SERVICE_SHELL_REMOTE)
@Requires("_shell", pelix.shell.SERVICE_SHELL)
@Property("_address", "pelix.shell.address", "localhost")
@Property("_port", "pelix.shell.port", 9000)
class IPopoRemoteShell(object):
"""
The iPOPO Remote Shell, based on the Pelix Shell
"""
def __init__(self):
"""
Sets up the component
"""
# Component shell
self._shell = None
self._address = None
self._port = 0
# Internals
self._thread = None
self._server = None
self._server_flag = None
def get_access(self):
"""
Implementation of the remote shell specification
:return: A (host, port) tuple
"""
return self._address, self._port
def get_banner(self):
"""
Retrieves the shell banner
:return: The shell banner
"""
line = '-' * 72
shell_banner = self._shell.get_banner()
return "{lines}\n{shell_banner}\niPOPO Remote Shell\n{lines}\n" \
.format(lines=line, shell_banner=shell_banner)
def get_ps1(self):
"""
Returns the shell prompt
:return: The shell prompt
"""
return self._shell.get_ps1()
def handle_line(self, line, session):
"""
Handles the command line.
**Does not catch exceptions !**
:param line: The command line
:param session: The current shell session
:return: The execution result (True on success, else False)
"""
return self._shell.execute(line, session)
@Validate
def validate(self, context):
"""
Component validation
"""
if not self._address:
# Local host by default
self._address = "localhost"
try:
self._port = int(self._port)
if self._port < 0 or self._port > 65535:
# Invalid port value
self._port = 0
except (ValueError, TypeError):
# Invalid port string: use a random port
self._port = 0
# Start the TCP server
self._thread, self._server, self._server_flag = \
_create_server(self, self._address, self._port)
# Property update (if port was 0)
self._port = self._server.socket.getsockname()[1]
_logger.info("RemoteShell validated on port: %d", self._port)
@Invalidate
def invalidate(self, context):
"""
Component invalidation
"""
# Stop the clients loops
if self._server is not None:
self._server_flag.set_value(False)
# Shutdown the server
self._server.shutdown()
self._thread.join(2)
# Close the server socket (ignore errors)
self._server.server_close()
_logger.info("RemoteShell gone from port: %d", self._port)
# Clean up
self._thread = None
self._server = None
self._server_flag = None
# ------------------------------------------------------------------------------
def main(address="localhost", port=9000):
"""
Starts a framework with a remote shell and starts an interactive console.
:param address: Shell binding address
:param port: Shell binding port
"""
from pelix.ipopo.constants import use_ipopo
import pelix.framework
# Start a Pelix framework
framework = pelix.framework.create_framework(('pelix.ipopo.core',
'pelix.shell.core',
'pelix.shell.ipopo',
'pelix.shell.remote'))
framework.start()
context = framework.get_bundle_context()
# Instantiate a Remote Shell
with use_ipopo(context) as ipopo:
rshell = ipopo.instantiate(pelix.shell.FACTORY_REMOTE_SHELL,
"remote-shell",
{"pelix.shell.address": address,
"pelix.shell.port": port})
# Prepare interpreter variables
variables = {'__name__': '__console__',
'__doc__': None,
'__package__': None,
'framework': framework,
'context': context,
'use_ipopo': use_ipopo}
# Prepare a banner
host, port = rshell.get_access()
banner = "{lines}\nPython interpreter with Pelix Remote Shell\n" \
"Remote shell bound to: {host}:{port}\n{lines}\n" \
"Python version: {version}\n" \
.format(lines='-' * 80, version=sys.version,
host=host, port=port)
try:
# Run an interpreter
_run_interpreter(variables, banner)
finally:
# Stop the framework
framework.stop()
def _run_interpreter(variables, banner):
"""
Runs a Python interpreter console and blocks until the user exits it.
:param variables: Interpreters variables (locals)
:param banner: Start-up banners
"""
# Script-only imports
import code
try:
import readline
import rlcompleter
readline.set_completer(rlcompleter.Completer(variables).complete)
readline.parse_and_bind("tab: complete")
except ImportError:
# readline is not available: ignore
pass
# Start the console
shell = code.InteractiveConsole(variables)
shell.interact(banner)
# ------------------------------------------------------------------------------
if __name__ == '__main__':
# Prepare arguments
import argparse
parser = argparse.ArgumentParser(description="Pelix Remote Shell")
parser.add_argument("-v", "--verbose", action="store_true", dest="verbose",
help="Set loggers at debug level")
parser.add_argument("-a", "--address", dest="address", default="localhost",
help="The remote shell binding address")
parser.add_argument("-p", "--port", dest="port", type=int, default=9000,
help="The remote shell binding port")
# Parse them
args = parser.parse_args()
# Prepare the logger
if args.verbose:
logging.basicConfig(level=logging.DEBUG)
else:
logging.basicConfig(level=logging.WARNING)
# Run the entry point
main(args.address, args.port)
| apache-2.0 |
hyperized/ansible | lib/ansible/modules/network/eos/eos_lacp_interfaces.py | 1 | 5527 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2019 Red Hat
# GNU General Public License v3.0+
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#############################################
# WARNING #
#############################################
#
# This file is auto generated by the resource
# module builder playbook.
#
# Do not edit this file manually.
#
# Changes to this file will be over written
# by the resource module builder.
#
# Changes should be made in the model used to
# generate this file or in the resource module
# builder template.
#
#############################################
"""
The module file for eos_lacp_interfaces
"""
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'
}
DOCUMENTATION = """
---
module: eos_lacp_interfaces
version_added: 2.9
short_description: Manage Link Aggregation Control Protocol (LACP) attributes of interfaces on Arista EOS devices.
description:
- This module manages Link Aggregation Control Protocol (LACP) attributes of interfaces on Arista EOS devices.
author: Nathaniel Case (@Qalthos)
options:
config:
description: A dictionary of LACP interfaces options.
type: list
elements: dict
suboptions:
name:
description:
- Full name of the interface (i.e. Ethernet1).
type: str
port_priority:
description:
- LACP port priority for the interface. Range 1-65535.
type: int
rate:
description:
- Rate at which PDUs are sent by LACP.
At fast rate LACP is transmitted once every 1 second.
At normal rate LACP is transmitted every 30 seconds after the link is bundled.
type: str
choices: ['fast', 'normal']
state:
description:
- The state of the configuration after module completion.
type: str
choices:
- merged
- replaced
- overridden
- deleted
default: merged
"""
EXAMPLES = """
# Using merged
#
#
# ------------
# Before state
# ------------
#
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp port-priority 30
# interface Ethernet2
# lacp rate fast
- name: Merge provided configuration with device configuration
eos_lacp_interfaces:
config:
- name: Ethernet1
rate: fast
- name: Ethernet2
rate: normal
state: merged
#
# -----------
# After state
# -----------
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp port-priority 30
# lacp rate fast
# interface Ethernet2
# Using replaced
#
#
# ------------
# Before state
# ------------
#
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp port-priority 30
# interface Ethernet2
# lacp rate fast
- name: Replace existing LACP configuration of specified interfaces with provided configuration
eos_lacp_interfaces:
config:
- name: Ethernet1
rate: fast
state: replaced
#
# -----------
# After state
# -----------
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp rate fast
# interface Ethernet2
# lacp rate fast
# Using overridden
#
#
# ------------
# Before state
# ------------
#
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp port-priority 30
# interface Ethernet2
# lacp rate fast
- name: Override the LACP configuration of all the interfaces with provided configuration
eos_lacp_interfaces:
config:
- name: Ethernet1
rate: fast
state: overridden
#
# -----------
# After state
#
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp rate fast
# interface Ethernet2
# Using deleted
#
#
# ------------
# Before state
# ------------
#
#
# veos#show run | section ^interface
# interface Ethernet1
# lacp port-priority 30
# interface Ethernet2
# lacp rate fast
- name: Delete LACP attributes of given interfaces (or all interfaces if none specified).
eos_lacp_interfaces:
state: deleted
#
# -----------
# After state
# -----------
#
# veos#show run | section ^interface
# interface Ethernet1
# interface Ethernet2
"""
RETURN = """
before:
description: The configuration as structured data prior to module invocation.
returned: always
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
after:
description: The configuration as structured data after module completion.
returned: when changed
type: list
sample: >
The configuration returned will always be in the same format
of the parameters above.
commands:
description: The set of commands pushed to the remote device.
returned: always
type: list
sample: ['interface Ethernet1', 'lacp rate fast']
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.eos.argspec.lacp_interfaces.lacp_interfaces import Lacp_interfacesArgs
from ansible.module_utils.network.eos.config.lacp_interfaces.lacp_interfaces import Lacp_interfaces
def main():
"""
Main entry point for module execution
:returns: the result form module invocation
"""
module = AnsibleModule(argument_spec=Lacp_interfacesArgs.argument_spec,
supports_check_mode=True)
result = Lacp_interfaces(module).execute_module()
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
AlexBoYang/Unblock-Youku | test/run-all-tests.py | 12 | 3293 | #!/usr/bin/env python
"""
Allow you smoothly surf on many websites blocking non-mainland visitors.
Copyright (C) 2012 - 2014 Bo Zhu http://zhuzhu.org
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as
published by the Free Software Foundation, either version 3 of the
License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import subprocess
import time
import sys
import os
# http://goo.gl/2wtRL
# os.chdir(os.path.dirname(sys.argv[0]))
if os.path.dirname(sys.argv[0]) != '':
os.chdir(os.path.dirname(sys.argv[0]))
print 'PhantomJS',
try:
version = subprocess.check_output(['phantomjs', '--version'])
print version
sys.stdout.flush()
except Exception as exp:
print 'is not installed.'
print 'Please install it and try again.'
sys.stdout.flush()
sys.exit(-1)
server_process = None
def start_server():
global server_process
print 'To start the server, and wait for 21 seconds to set up...'
sys.stdout.flush()
server_process = subprocess.Popen(
['node', '../server/server.js', '--production', '--port=8888'])
time.sleep(21)
def stop_server():
time.sleep(1)
print 'To stop the server...',
sys.stdout.flush()
server_process.terminate()
server_process.wait()
print 'done.'
sys.stdout.flush()
# http://goo.gl/xaBer
def red_alert(text):
print "\033[7;31m" + text + "\033[0m"
sys.stdout.flush()
def run_all_tests():
print
print 'To run all test-*.js files...'
sys.stdout.flush()
num_failed = 0
num_passed = 0
for file_name in os.listdir('.'):
if file_name.startswith('test-') and file_name.endswith('.js'):
if file_name.endswith('-proxy.js'):
command = ['phantomjs', '--proxy=127.0.0.1:8888', file_name]
else:
command = ['phantomjs', file_name]
print
print ' '.join(command)
sys.stdout.flush()
return_value = subprocess.call(command)
time.sleep(2) # sleep 2 seconds between tests
if return_value != 0:
num_failed += 1
red_alert(file_name + ' FAILED!')
else:
num_passed += 1
print file_name + ' passed.'
sys.stdout.flush()
print
sys.stdout.flush()
if num_failed > 0:
red_alert('Final results: ' + str(num_failed) + ' TESTS FAILED'
+ ' (out of ' + str(num_failed + num_passed) + ')')
else:
print 'All %d tests passed.' % (num_passed + num_failed)
print
sys.stdout.flush()
return num_failed
if __name__ == '__main__':
exit_code = -1
try:
start_server()
exit_code = run_all_tests()
finally:
stop_server()
sys.exit(exit_code)
| agpl-3.0 |
ralphjzhang/profitpy | profit/session/collection.py | 18 | 4963 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2007 Troy Melhase, Yichun Wei
# Distributed under the terms of the GNU General Public License v2
# Author: Troy Melhase <troy@gci.net>
# Yichun Wei <yichun.wei@gmail.com>
import os
from cPickle import PicklingError, UnpicklingError, dump, load
from PyQt4.QtCore import QObject, QThread
from profit.lib import logging
from profit.lib import Signals
class DataCollection(QObject):
sessionResendSignals = []
def __init__(self, session):
QObject.__init__(self)
self.session = session
self.data = {}
session.registerMeta(self)
for signal in self.sessionResendSignals:
self.connect(self, signal, session, signal)
def __contains__(self, item):
return item in self.data
def __getitem__(self, name):
return self.data[name]
def __setitem__(self, name, value):
self.data[name] = value
def keys(self):
return self.data.keys()
def items(self):
return self.data.items()
def setdefault(self, key, default):
return self.data.setdefault(key, default)
class AccountCollection(DataCollection):
sessionResendSignals = [Signals.createdAccountData, ]
def __init__(self, session):
DataCollection.__init__(self, session)
self.last = {}
def on_session_UpdateAccountValue(self, message):
key = (message.key, message.currency, message.accountName)
try:
iv = float(message.value)
except (ValueError, ):
iv = message.value
try:
acctdata = self[key]
except (KeyError, ):
acctdata = self[key] = \
self.session.strategy.makeAccountSeries(key)
self.emit(Signals.createdAccountData, key, acctdata, iv)
acctdata.append(iv)
self.last[key] = iv
class ContractDataCollection(DataCollection):
sessionResendSignals = [Signals.contract.added, ]
def __setitem__(self, tickerId, contract):
## maybe enforce types?
DataCollection.__setitem__(self, tickerId, contract)
self.emit(Signals.contract.added, tickerId, contract)
def on_session_TickPrice_TickSize(self, message):
tickerId = message.tickerId
if tickerId not in self:
contract = self[tickerId] = self.session.strategy.makeContract(symbol='')
self.emit(Signals.contract.added, tickerId, contract)
class TickerCollection(DataCollection):
sessionResendSignals = [Signals.createdSeries, Signals.createdTicker, ]
def __init__(self, session):
DataCollection.__init__(self, session)
## have to make the strategy symbols lazy somehow
for tid in session.strategy.symbols().values():
self[tid] = session.strategy.makeTicker(tid)
def on_session_TickPrice_TickSize(self, message):
tickerId = message.tickerId
try:
tickerdata = self[tickerId]
except (KeyError, ):
tickerdata = self[tickerId] = \
self.session.strategy.makeTicker(tickerId)
self.emit(Signals.createdTicker, tickerId, tickerdata)
try:
value = message.price
except (AttributeError, ):
value = message.size
field = message.field
try:
seq = tickerdata.series[field]
except (KeyError, ):
seq = tickerdata.series[field] = \
self.session.strategy.makeTickerSeries(tickerId, field)
self.emit(Signals.createdSeries, tickerId, field)
seq.append(value)
class HistoricalDataCollection(DataCollection):
sessionResendSignals = [Signals.histdata.start,
Signals.histdata.finish]
def __init__(self, session):
DataCollection.__init__(self, session)
def on_session_HistoricalData(self, message):
if message.date.startswith('finished'):
reqId = message.reqId
reqData = self.setdefault(reqId, {})
histMsgs = self.session.messagesTyped['HistoricalData']
reqData['messages'] = self.historyMessages(reqId, histMsgs)
self.emit(Signals.histdata.finish, reqId)
def begin(self, params):
reqId = params['tickerId']
reqData = self.setdefault(reqId, {})
reqData.update(params)
self.emit(Signals.histdata.start, reqId, reqData)
self.session.connection.reqHistoricalData(**reqData)
@staticmethod
def historyMessages(reqId, msgs):
return (m for m in msgs
if m[1].reqId==reqId
and not m[1].date.startswith('finished'))
class OrderDataCollection(DataCollection):
nextId = 0
def on_session_nextValidId(self, message):
self.nextId = int(message.orderId)
class ErrorDataCollection(DataCollection):
def on_session_Error(self, message):
logging.debug(str(message))
| gpl-2.0 |
shakamunyi/neutron-dvr | neutron/plugins/cisco/n1kv/n1kv_neutron_plugin.py | 1 | 68138 | # Copyright 2013 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
# @author: Aruna Kushwaha, Cisco Systems, Inc.
# @author: Rudrajit Tapadar, Cisco Systems, Inc.
# @author: Abhishek Raut, Cisco Systems, Inc.
# @author: Sergey Sudakovich, Cisco Systems, Inc.
import eventlet
from oslo.config import cfg as q_conf
from neutron.api.rpc.agentnotifiers import dhcp_rpc_agent_api
from neutron.api.rpc.agentnotifiers import l3_rpc_agent_api
from neutron.api.v2 import attributes
from neutron.common import constants
from neutron.common import exceptions as n_exc
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils
from neutron.db import agents_db
from neutron.db import agentschedulers_db
from neutron.db import db_base_plugin_v2
from neutron.db import dhcp_rpc_base
from neutron.db import external_net_db
from neutron.db import extraroute_db
from neutron.db import l3_agentschedulers_db
from neutron.db import l3_rpc_base
from neutron.db import portbindings_db
from neutron.extensions import portbindings
from neutron.extensions import providernet
from neutron.openstack.common import excutils
from neutron.openstack.common import importutils
from neutron.openstack.common import log as logging
from neutron.openstack.common import uuidutils as uuidutils
from neutron.plugins.cisco.common import cisco_constants as c_const
from neutron.plugins.cisco.common import cisco_credentials_v2 as c_cred
from neutron.plugins.cisco.common import cisco_exceptions
from neutron.plugins.cisco.common import config as c_conf
from neutron.plugins.cisco.db import n1kv_db_v2
from neutron.plugins.cisco.db import network_db_v2
from neutron.plugins.cisco.extensions import n1kv
from neutron.plugins.cisco.n1kv import n1kv_client
from neutron.plugins.common import constants as svc_constants
LOG = logging.getLogger(__name__)
class N1kvRpcCallbacks(n_rpc.RpcCallback,
dhcp_rpc_base.DhcpRpcCallbackMixin,
l3_rpc_base.L3RpcCallbackMixin):
"""Class to handle agent RPC calls."""
# Set RPC API version to 1.1 by default.
RPC_API_VERSION = '1.1'
class N1kvNeutronPluginV2(db_base_plugin_v2.NeutronDbPluginV2,
external_net_db.External_net_db_mixin,
extraroute_db.ExtraRoute_db_mixin,
portbindings_db.PortBindingMixin,
n1kv_db_v2.NetworkProfile_db_mixin,
n1kv_db_v2.PolicyProfile_db_mixin,
network_db_v2.Credential_db_mixin,
l3_agentschedulers_db.L3AgentSchedulerDbMixin,
agentschedulers_db.DhcpAgentSchedulerDbMixin):
"""
Implement the Neutron abstractions using Cisco Nexus1000V.
Refer README file for the architecture, new features, and
workflow
"""
# This attribute specifies whether the plugin supports or not
# bulk operations.
__native_bulk_support = False
supported_extension_aliases = ["provider", "agent",
"n1kv", "network_profile",
"policy_profile", "external-net", "router",
"binding", "credential",
"l3_agent_scheduler",
"dhcp_agent_scheduler"]
def __init__(self, configfile=None):
"""
Initialize Nexus1000V Neutron plugin.
1. Initialize VIF type to OVS
2. clear N1kv credential
3. Initialize Nexus1000v and Credential DB
4. Establish communication with Cisco Nexus1000V
"""
super(N1kvNeutronPluginV2, self).__init__()
self.base_binding_dict = {
portbindings.VIF_TYPE: portbindings.VIF_TYPE_OVS,
portbindings.VIF_DETAILS: {
# TODO(rkukura): Replace with new VIF security details
portbindings.CAP_PORT_FILTER:
'security-group' in self.supported_extension_aliases}}
network_db_v2.delete_all_n1kv_credentials()
c_cred.Store.initialize()
self._setup_vsm()
self._setup_rpc()
self.network_scheduler = importutils.import_object(
q_conf.CONF.network_scheduler_driver
)
self.router_scheduler = importutils.import_object(
q_conf.CONF.router_scheduler_driver
)
def _setup_rpc(self):
# RPC support
self.service_topics = {svc_constants.CORE: topics.PLUGIN,
svc_constants.L3_ROUTER_NAT: topics.L3PLUGIN}
self.conn = n_rpc.create_connection(new=True)
self.endpoints = [N1kvRpcCallbacks(), agents_db.AgentExtRpcCallback()]
for svc_topic in self.service_topics.values():
self.conn.create_consumer(svc_topic, self.endpoints, fanout=False)
self.dhcp_agent_notifier = dhcp_rpc_agent_api.DhcpAgentNotifyAPI()
self.l3_agent_notifier = l3_rpc_agent_api.L3AgentNotifyAPI()
# Consume from all consumers in threads
self.conn.consume_in_threads()
def _setup_vsm(self):
"""
Setup Cisco Nexus 1000V related parameters and pull policy profiles.
Retrieve all the policy profiles from the VSM when the plugin is
is instantiated for the first time and then continue to poll for
policy profile updates.
"""
LOG.debug(_('_setup_vsm'))
self.agent_vsm = True
# Poll VSM for create/delete of policy profile.
eventlet.spawn(self._poll_policy_profiles)
def _poll_policy_profiles(self):
"""Start a green thread to pull policy profiles from VSM."""
while True:
self._populate_policy_profiles()
eventlet.sleep(c_conf.CISCO_N1K.poll_duration)
def _populate_policy_profiles(self):
"""
Populate all the policy profiles from VSM.
The tenant id is not available when the policy profiles are polled
from the VSM. Hence we associate the policy profiles with fake
tenant-ids.
"""
LOG.debug(_('_populate_policy_profiles'))
try:
n1kvclient = n1kv_client.Client()
policy_profiles = n1kvclient.list_port_profiles()
vsm_profiles = {}
plugin_profiles_set = set()
# Fetch policy profiles from VSM
for profile_name in policy_profiles:
profile_id = (policy_profiles
[profile_name][c_const.PROPERTIES][c_const.ID])
vsm_profiles[profile_id] = profile_name
# Fetch policy profiles previously populated
for profile in n1kv_db_v2.get_policy_profiles():
plugin_profiles_set.add(profile.id)
vsm_profiles_set = set(vsm_profiles)
# Update database if the profile sets differ.
if vsm_profiles_set ^ plugin_profiles_set:
# Add profiles in database if new profiles were created in VSM
for pid in vsm_profiles_set - plugin_profiles_set:
self._add_policy_profile(vsm_profiles[pid], pid)
# Delete profiles from database if profiles were deleted in VSM
for pid in plugin_profiles_set - vsm_profiles_set:
self._delete_policy_profile(pid)
self._remove_all_fake_policy_profiles()
except (cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
LOG.warning(_('No policy profile populated from VSM'))
def _extend_network_dict_provider(self, context, network):
"""Add extended network parameters."""
binding = n1kv_db_v2.get_network_binding(context.session,
network['id'])
network[providernet.NETWORK_TYPE] = binding.network_type
if binding.network_type == c_const.NETWORK_TYPE_OVERLAY:
network[providernet.PHYSICAL_NETWORK] = None
network[providernet.SEGMENTATION_ID] = binding.segmentation_id
network[n1kv.MULTICAST_IP] = binding.multicast_ip
elif binding.network_type == c_const.NETWORK_TYPE_VLAN:
network[providernet.PHYSICAL_NETWORK] = binding.physical_network
network[providernet.SEGMENTATION_ID] = binding.segmentation_id
elif binding.network_type == c_const.NETWORK_TYPE_TRUNK:
network[providernet.PHYSICAL_NETWORK] = binding.physical_network
network[providernet.SEGMENTATION_ID] = None
network[n1kv.MULTICAST_IP] = None
elif binding.network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
network[providernet.PHYSICAL_NETWORK] = None
network[providernet.SEGMENTATION_ID] = None
network[n1kv.MULTICAST_IP] = None
def _process_provider_create(self, context, attrs):
network_type = attrs.get(providernet.NETWORK_TYPE)
physical_network = attrs.get(providernet.PHYSICAL_NETWORK)
segmentation_id = attrs.get(providernet.SEGMENTATION_ID)
network_type_set = attributes.is_attr_set(network_type)
physical_network_set = attributes.is_attr_set(physical_network)
segmentation_id_set = attributes.is_attr_set(segmentation_id)
if not (network_type_set or physical_network_set or
segmentation_id_set):
return (None, None, None)
if not network_type_set:
msg = _("provider:network_type required")
raise n_exc.InvalidInput(error_message=msg)
elif network_type == c_const.NETWORK_TYPE_VLAN:
if not segmentation_id_set:
msg = _("provider:segmentation_id required")
raise n_exc.InvalidInput(error_message=msg)
if segmentation_id < 1 or segmentation_id > 4094:
msg = _("provider:segmentation_id out of range "
"(1 through 4094)")
raise n_exc.InvalidInput(error_message=msg)
elif network_type == c_const.NETWORK_TYPE_OVERLAY:
if physical_network_set:
msg = _("provider:physical_network specified for Overlay "
"network")
raise n_exc.InvalidInput(error_message=msg)
else:
physical_network = None
if not segmentation_id_set:
msg = _("provider:segmentation_id required")
raise n_exc.InvalidInput(error_message=msg)
if segmentation_id < 5000:
msg = _("provider:segmentation_id out of range "
"(5000+)")
raise n_exc.InvalidInput(error_message=msg)
else:
msg = _("provider:network_type %s not supported"), network_type
raise n_exc.InvalidInput(error_message=msg)
if network_type == c_const.NETWORK_TYPE_VLAN:
if physical_network_set:
network_profiles = n1kv_db_v2.get_network_profiles()
for network_profile in network_profiles:
if physical_network == network_profile[
'physical_network']:
break
else:
msg = (_("Unknown provider:physical_network %s"),
physical_network)
raise n_exc.InvalidInput(error_message=msg)
else:
msg = _("provider:physical_network required")
raise n_exc.InvalidInput(error_message=msg)
return (network_type, physical_network, segmentation_id)
def _check_provider_update(self, context, attrs):
"""Handle Provider network updates."""
network_type = attrs.get(providernet.NETWORK_TYPE)
physical_network = attrs.get(providernet.PHYSICAL_NETWORK)
segmentation_id = attrs.get(providernet.SEGMENTATION_ID)
network_type_set = attributes.is_attr_set(network_type)
physical_network_set = attributes.is_attr_set(physical_network)
segmentation_id_set = attributes.is_attr_set(segmentation_id)
if not (network_type_set or physical_network_set or
segmentation_id_set):
return
# TBD : Need to handle provider network updates
msg = _("Plugin does not support updating provider attributes")
raise n_exc.InvalidInput(error_message=msg)
def _get_cluster(self, segment1, segment2, clusters):
"""
Returns a cluster to apply the segment mapping
:param segment1: UUID of segment to be mapped
:param segment2: UUID of segment to be mapped
:param clusters: List of clusters
"""
for cluster in sorted(clusters, key=lambda k: k['size']):
for mapping in cluster[c_const.MAPPINGS]:
for segment in mapping[c_const.SEGMENTS]:
if segment1 in segment or segment2 in segment:
break
else:
cluster['size'] += 2
return cluster['encapProfileName']
break
return
def _extend_mapping_dict(self, context, mapping_dict, segment):
"""
Extend a mapping dictionary with dot1q tag and bridge-domain name.
:param context: neutron api request context
:param mapping_dict: dictionary to populate values
:param segment: id of the segment being populated
"""
net = self.get_network(context, segment)
if net[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_VLAN:
mapping_dict['dot1q'] = str(net[providernet.SEGMENTATION_ID])
else:
mapping_dict['bridgeDomain'] = (net['name'] +
c_const.BRIDGE_DOMAIN_SUFFIX)
def _send_add_multi_segment_request(self, context, net_id, segment_pairs):
"""
Send Add multi-segment network request to VSM.
:param context: neutron api request context
:param net_id: UUID of the multi-segment network
:param segment_pairs: List of segments in UUID pairs
that need to be bridged
"""
if not segment_pairs:
return
session = context.session
n1kvclient = n1kv_client.Client()
clusters = n1kvclient.get_clusters()
online_clusters = []
encap_dict = {}
for cluster in clusters['body'][c_const.SET]:
cluster = cluster[c_const.PROPERTIES]
if cluster[c_const.STATE] == c_const.ONLINE:
cluster['size'] = 0
for mapping in cluster[c_const.MAPPINGS]:
cluster['size'] += (
len(mapping[c_const.SEGMENTS]))
online_clusters.append(cluster)
for (segment1, segment2) in segment_pairs:
encap_profile = self._get_cluster(segment1, segment2,
online_clusters)
if encap_profile is not None:
if encap_profile in encap_dict:
profile_dict = encap_dict[encap_profile]
else:
profile_dict = {'name': encap_profile,
'addMappings': [],
'delMappings': []}
encap_dict[encap_profile] = profile_dict
mapping_dict = {}
self._extend_mapping_dict(context,
mapping_dict, segment1)
self._extend_mapping_dict(context,
mapping_dict, segment2)
profile_dict['addMappings'].append(mapping_dict)
n1kv_db_v2.add_multi_segment_encap_profile_name(session,
net_id,
(segment1,
segment2),
encap_profile)
else:
raise cisco_exceptions.NoClusterFound
for profile in encap_dict:
n1kvclient.update_encapsulation_profile(context, profile,
encap_dict[profile])
def _send_del_multi_segment_request(self, context, net_id, segment_pairs):
"""
Send Delete multi-segment network request to VSM.
:param context: neutron api request context
:param net_id: UUID of the multi-segment network
:param segment_pairs: List of segments in UUID pairs
whose bridging needs to be removed
"""
if not segment_pairs:
return
session = context.session
encap_dict = {}
n1kvclient = n1kv_client.Client()
for (segment1, segment2) in segment_pairs:
binding = (
n1kv_db_v2.get_multi_segment_network_binding(session, net_id,
(segment1,
segment2)))
encap_profile = binding['encap_profile_name']
if encap_profile in encap_dict:
profile_dict = encap_dict[encap_profile]
else:
profile_dict = {'name': encap_profile,
'addMappings': [],
'delMappings': []}
encap_dict[encap_profile] = profile_dict
mapping_dict = {}
self._extend_mapping_dict(context,
mapping_dict, segment1)
self._extend_mapping_dict(context,
mapping_dict, segment2)
profile_dict['delMappings'].append(mapping_dict)
for profile in encap_dict:
n1kvclient.update_encapsulation_profile(context, profile,
encap_dict[profile])
def _get_encap_segments(self, context, segment_pairs):
"""
Get the list of segments in encapsulation profile format.
:param context: neutron api request context
:param segment_pairs: List of segments that need to be bridged
"""
member_list = []
for pair in segment_pairs:
(segment, dot1qtag) = pair
member_dict = {}
net = self.get_network(context, segment)
member_dict['bridgeDomain'] = (net['name'] +
c_const.BRIDGE_DOMAIN_SUFFIX)
member_dict['dot1q'] = dot1qtag
member_list.append(member_dict)
return member_list
def _populate_member_segments(self, context, network, segment_pairs, oper):
"""
Populate trunk network dict with member segments.
:param context: neutron api request context
:param network: Dictionary containing the trunk network information
:param segment_pairs: List of segments in UUID pairs
that needs to be trunked
:param oper: Operation to be performed
"""
LOG.debug(_('_populate_member_segments %s'), segment_pairs)
trunk_list = []
for (segment, dot1qtag) in segment_pairs:
net = self.get_network(context, segment)
member_dict = {'segment': net['name'],
'dot1qtag': dot1qtag}
trunk_list.append(member_dict)
if oper == n1kv.SEGMENT_ADD:
network['add_segment_list'] = trunk_list
elif oper == n1kv.SEGMENT_DEL:
network['del_segment_list'] = trunk_list
def _parse_multi_segments(self, context, attrs, param):
"""
Parse the multi-segment network attributes.
:param context: neutron api request context
:param attrs: Attributes of the network
:param param: Additional parameter indicating an add
or del operation
:returns: List of segment UUIDs in set pairs
"""
pair_list = []
valid_seg_types = [c_const.NETWORK_TYPE_VLAN,
c_const.NETWORK_TYPE_OVERLAY]
segments = attrs.get(param)
if not attributes.is_attr_set(segments):
return pair_list
for pair in segments.split(','):
segment1, sep, segment2 = pair.partition(':')
if (uuidutils.is_uuid_like(segment1) and
uuidutils.is_uuid_like(segment2)):
binding1 = n1kv_db_v2.get_network_binding(context.session,
segment1)
binding2 = n1kv_db_v2.get_network_binding(context.session,
segment2)
if (binding1.network_type not in valid_seg_types or
binding2.network_type not in valid_seg_types or
binding1.network_type == binding2.network_type):
msg = _("Invalid pairing supplied")
raise n_exc.InvalidInput(error_message=msg)
else:
pair_list.append((segment1, segment2))
else:
LOG.debug(_('Invalid UUID supplied in %s'), pair)
msg = _("Invalid UUID supplied")
raise n_exc.InvalidInput(error_message=msg)
return pair_list
def _parse_trunk_segments(self, context, attrs, param, physical_network,
sub_type):
"""
Parse the trunk network attributes.
:param context: neutron api request context
:param attrs: Attributes of the network
:param param: Additional parameter indicating an add
or del operation
:param physical_network: Physical network of the trunk segment
:param sub_type: Sub-type of the trunk segment
:returns: List of segment UUIDs and dot1qtag (for vxlan) in set pairs
"""
pair_list = []
segments = attrs.get(param)
if not attributes.is_attr_set(segments):
return pair_list
for pair in segments.split(','):
segment, sep, dot1qtag = pair.partition(':')
if sub_type == c_const.NETWORK_TYPE_VLAN:
dot1qtag = ''
if uuidutils.is_uuid_like(segment):
binding = n1kv_db_v2.get_network_binding(context.session,
segment)
if binding.network_type == c_const.NETWORK_TYPE_TRUNK:
msg = _("Cannot add a trunk segment '%s' as a member of "
"another trunk segment") % segment
raise n_exc.InvalidInput(error_message=msg)
elif binding.network_type == c_const.NETWORK_TYPE_VLAN:
if sub_type == c_const.NETWORK_TYPE_OVERLAY:
msg = _("Cannot add vlan segment '%s' as a member of "
"a vxlan trunk segment") % segment
raise n_exc.InvalidInput(error_message=msg)
if not physical_network:
physical_network = binding.physical_network
elif physical_network != binding.physical_network:
msg = _("Network UUID '%s' belongs to a different "
"physical network") % segment
raise n_exc.InvalidInput(error_message=msg)
elif binding.network_type == c_const.NETWORK_TYPE_OVERLAY:
if sub_type == c_const.NETWORK_TYPE_VLAN:
msg = _("Cannot add vxlan segment '%s' as a member of "
"a vlan trunk segment") % segment
raise n_exc.InvalidInput(error_message=msg)
try:
if not utils.is_valid_vlan_tag(int(dot1qtag)):
msg = _("Vlan tag '%s' is out of range") % dot1qtag
raise n_exc.InvalidInput(error_message=msg)
except ValueError:
msg = _("Vlan tag '%s' is not an integer "
"value") % dot1qtag
raise n_exc.InvalidInput(error_message=msg)
pair_list.append((segment, dot1qtag))
else:
LOG.debug(_('%s is not a valid uuid'), segment)
msg = _("'%s' is not a valid UUID") % segment
raise n_exc.InvalidInput(error_message=msg)
return pair_list
def _extend_network_dict_member_segments(self, context, network):
"""Add the extended parameter member segments to the network."""
members = []
binding = n1kv_db_v2.get_network_binding(context.session,
network['id'])
if binding.network_type == c_const.NETWORK_TYPE_TRUNK:
members = n1kv_db_v2.get_trunk_members(context.session,
network['id'])
elif binding.network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
members = n1kv_db_v2.get_multi_segment_members(context.session,
network['id'])
network[n1kv.MEMBER_SEGMENTS] = members
def _extend_network_dict_profile(self, context, network):
"""Add the extended parameter network profile to the network."""
binding = n1kv_db_v2.get_network_binding(context.session,
network['id'])
network[n1kv.PROFILE_ID] = binding.profile_id
def _extend_port_dict_profile(self, context, port):
"""Add the extended parameter port profile to the port."""
binding = n1kv_db_v2.get_port_binding(context.session,
port['id'])
port[n1kv.PROFILE_ID] = binding.profile_id
def _process_network_profile(self, context, network):
"""Validate network profile exists."""
profile_id = network.get(n1kv.PROFILE_ID)
profile_id_set = attributes.is_attr_set(profile_id)
if not profile_id_set:
profile_name = c_conf.CISCO_N1K.default_network_profile
net_p = self._get_network_profile_by_name(context.session,
profile_name)
profile_id = net_p['id']
network['n1kv:profile_id'] = profile_id
return profile_id
def _process_policy_profile(self, context, attrs):
"""Validates whether policy profile exists."""
profile_id = attrs.get(n1kv.PROFILE_ID)
profile_id_set = attributes.is_attr_set(profile_id)
if not profile_id_set:
msg = _("n1kv:profile_id does not exist")
raise n_exc.InvalidInput(error_message=msg)
if not self._policy_profile_exists(profile_id):
msg = _("n1kv:profile_id does not exist")
raise n_exc.InvalidInput(error_message=msg)
return profile_id
def _send_create_logical_network_request(self, network_profile, tenant_id):
"""
Send create logical network request to VSM.
:param network_profile: network profile dictionary
:param tenant_id: UUID representing the tenant
"""
LOG.debug(_('_send_create_logical_network'))
n1kvclient = n1kv_client.Client()
n1kvclient.create_logical_network(network_profile, tenant_id)
def _send_delete_logical_network_request(self, network_profile):
"""
Send delete logical network request to VSM.
:param network_profile: network profile dictionary
"""
LOG.debug('_send_delete_logical_network')
n1kvclient = n1kv_client.Client()
logical_network_name = (network_profile['id'] +
c_const.LOGICAL_NETWORK_SUFFIX)
n1kvclient.delete_logical_network(logical_network_name)
def _send_create_network_profile_request(self, context, profile):
"""
Send create network profile request to VSM.
:param context: neutron api request context
:param profile: network profile dictionary
"""
LOG.debug(_('_send_create_network_profile_request: %s'), profile['id'])
n1kvclient = n1kv_client.Client()
n1kvclient.create_network_segment_pool(profile, context.tenant_id)
def _send_update_network_profile_request(self, profile):
"""
Send update network profile request to VSM.
:param profile: network profile dictionary
"""
LOG.debug(_('_send_update_network_profile_request: %s'), profile['id'])
n1kvclient = n1kv_client.Client()
n1kvclient.update_network_segment_pool(profile)
def _send_delete_network_profile_request(self, profile):
"""
Send delete network profile request to VSM.
:param profile: network profile dictionary
"""
LOG.debug(_('_send_delete_network_profile_request: %s'),
profile['name'])
n1kvclient = n1kv_client.Client()
n1kvclient.delete_network_segment_pool(profile['id'])
def _send_create_network_request(self, context, network, segment_pairs):
"""
Send create network request to VSM.
Create a bridge domain for network of type Overlay.
:param context: neutron api request context
:param network: network dictionary
:param segment_pairs: List of segments in UUID pairs
that need to be bridged
"""
LOG.debug(_('_send_create_network_request: %s'), network['id'])
profile = self.get_network_profile(context,
network[n1kv.PROFILE_ID])
n1kvclient = n1kv_client.Client()
if network[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_OVERLAY:
n1kvclient.create_bridge_domain(network, profile['sub_type'])
if network[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_TRUNK:
self._populate_member_segments(context, network, segment_pairs,
n1kv.SEGMENT_ADD)
network['del_segment_list'] = []
if profile['sub_type'] == c_const.NETWORK_TYPE_OVERLAY:
encap_dict = {'name': (network['name'] +
c_const.ENCAPSULATION_PROFILE_SUFFIX),
'add_segment_list': (
self._get_encap_segments(context,
segment_pairs)),
'del_segment_list': []}
n1kvclient.create_encapsulation_profile(encap_dict)
n1kvclient.create_network_segment(network, profile)
def _send_update_network_request(self, context, network, add_segments,
del_segments):
"""
Send update network request to VSM.
:param context: neutron api request context
:param network: network dictionary
:param add_segments: List of segments bindings
that need to be deleted
:param del_segments: List of segments bindings
that need to be deleted
"""
LOG.debug(_('_send_update_network_request: %s'), network['id'])
db_session = context.session
profile = n1kv_db_v2.get_network_profile(
db_session, network[n1kv.PROFILE_ID])
n1kvclient = n1kv_client.Client()
body = {'description': network['name'],
'id': network['id'],
'networkSegmentPool': profile['id'],
'vlan': network[providernet.SEGMENTATION_ID],
'mode': 'access',
'segmentType': profile['segment_type'],
'addSegments': [],
'delSegments': []}
if network[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_TRUNK:
self._populate_member_segments(context, network, add_segments,
n1kv.SEGMENT_ADD)
self._populate_member_segments(context, network, del_segments,
n1kv.SEGMENT_DEL)
body['mode'] = c_const.NETWORK_TYPE_TRUNK
body['segmentType'] = profile['sub_type']
body['addSegments'] = network['add_segment_list']
body['delSegments'] = network['del_segment_list']
LOG.debug(_('add_segments=%s'), body['addSegments'])
LOG.debug(_('del_segments=%s'), body['delSegments'])
if profile['sub_type'] == c_const.NETWORK_TYPE_OVERLAY:
encap_profile = (network['id'] +
c_const.ENCAPSULATION_PROFILE_SUFFIX)
encap_dict = {'name': encap_profile,
'addMappings': (
self._get_encap_segments(context,
add_segments)),
'delMappings': (
self._get_encap_segments(context,
del_segments))}
n1kvclient.update_encapsulation_profile(context, encap_profile,
encap_dict)
n1kvclient.update_network_segment(network['id'], body)
def _send_delete_network_request(self, context, network):
"""
Send delete network request to VSM.
Delete bridge domain if network is of type Overlay.
Delete encapsulation profile if network is of type OVERLAY Trunk.
:param context: neutron api request context
:param network: network dictionary
"""
LOG.debug(_('_send_delete_network_request: %s'), network['id'])
n1kvclient = n1kv_client.Client()
session = context.session
if network[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_OVERLAY:
name = network['id'] + c_const.BRIDGE_DOMAIN_SUFFIX
n1kvclient.delete_bridge_domain(name)
elif network[providernet.NETWORK_TYPE] == c_const.NETWORK_TYPE_TRUNK:
profile = self.get_network_profile(
context, network[n1kv.PROFILE_ID])
if profile['sub_type'] == c_const.NETWORK_TYPE_OVERLAY:
profile_name = (network['id'] +
c_const.ENCAPSULATION_PROFILE_SUFFIX)
n1kvclient.delete_encapsulation_profile(profile_name)
elif (network[providernet.NETWORK_TYPE] ==
c_const.NETWORK_TYPE_MULTI_SEGMENT):
encap_dict = n1kv_db_v2.get_multi_segment_encap_dict(session,
network['id'])
for profile in encap_dict:
profile_dict = {'name': profile,
'addSegments': [],
'delSegments': []}
for segment_pair in encap_dict[profile]:
mapping_dict = {}
(segment1, segment2) = segment_pair
self._extend_mapping_dict(context,
mapping_dict, segment1)
self._extend_mapping_dict(context,
mapping_dict, segment2)
profile_dict['delSegments'].append(mapping_dict)
n1kvclient.update_encapsulation_profile(context, profile,
profile_dict)
n1kvclient.delete_network_segment(network['id'])
def _send_create_subnet_request(self, context, subnet):
"""
Send create subnet request to VSM.
:param context: neutron api request context
:param subnet: subnet dictionary
"""
LOG.debug(_('_send_create_subnet_request: %s'), subnet['id'])
n1kvclient = n1kv_client.Client()
n1kvclient.create_ip_pool(subnet)
def _send_update_subnet_request(self, subnet):
"""
Send update subnet request to VSM.
:param subnet: subnet dictionary
"""
LOG.debug(_('_send_update_subnet_request: %s'), subnet['name'])
n1kvclient = n1kv_client.Client()
n1kvclient.update_ip_pool(subnet)
def _send_delete_subnet_request(self, context, subnet):
"""
Send delete subnet request to VSM.
:param context: neutron api request context
:param subnet: subnet dictionary
"""
LOG.debug(_('_send_delete_subnet_request: %s'), subnet['name'])
body = {'ipPool': subnet['id'], 'deleteSubnet': True}
n1kvclient = n1kv_client.Client()
n1kvclient.update_network_segment(subnet['network_id'], body=body)
n1kvclient.delete_ip_pool(subnet['id'])
def _send_create_port_request(self,
context,
port,
port_count,
policy_profile,
vm_network_name):
"""
Send create port request to VSM.
Create a VM network for a network and policy profile combination.
If the VM network already exists, bind this port to the existing
VM network on the VSM.
:param context: neutron api request context
:param port: port dictionary
:param port_count: integer representing the number of ports in one
VM Network
:param policy_profile: object of type policy profile
:param vm_network_name: string representing the name of the VM
network
"""
LOG.debug(_('_send_create_port_request: %s'), port)
n1kvclient = n1kv_client.Client()
if port_count == 1:
n1kvclient.create_vm_network(port,
vm_network_name,
policy_profile)
else:
n1kvclient.create_n1kv_port(port, vm_network_name)
def _send_update_port_request(self, port_id, mac_address, vm_network_name):
"""
Send update port request to VSM.
:param port_id: UUID representing port to update
:param mac_address: string representing the mac address
:param vm_network_name: VM network name to which the port is bound
"""
LOG.debug(_('_send_update_port_request: %s'), port_id)
body = {'portId': port_id,
'macAddress': mac_address}
n1kvclient = n1kv_client.Client()
n1kvclient.update_n1kv_port(vm_network_name, port_id, body)
def _send_delete_port_request(self, context, port, vm_network):
"""
Send delete port request to VSM.
Delete the port on the VSM. If it is the last port on the VM Network,
delete the VM Network.
:param context: neutron api request context
:param port: port object which is to be deleted
:param vm_network: VM network object with which the port is associated
"""
LOG.debug(_('_send_delete_port_request: %s'), port['id'])
n1kvclient = n1kv_client.Client()
n1kvclient.delete_n1kv_port(vm_network['name'], port['id'])
if vm_network['port_count'] == 0:
n1kvclient.delete_vm_network(vm_network['name'])
def _get_segmentation_id(self, context, id):
"""
Retrieve segmentation ID for a given network.
:param context: neutron api request context
:param id: UUID of the network
:returns: segmentation ID for the network
"""
session = context.session
binding = n1kv_db_v2.get_network_binding(session, id)
return binding.segmentation_id
def create_network(self, context, network):
"""
Create network based on network profile.
:param context: neutron api request context
:param network: network dictionary
:returns: network object
"""
(network_type, physical_network,
segmentation_id) = self._process_provider_create(context,
network['network'])
profile_id = self._process_network_profile(context, network['network'])
segment_pairs = None
LOG.debug(_('Create network: profile_id=%s'), profile_id)
session = context.session
with session.begin(subtransactions=True):
if not network_type:
# tenant network
(physical_network, network_type, segmentation_id,
multicast_ip) = n1kv_db_v2.alloc_network(session,
profile_id)
LOG.debug(_('Physical_network %(phy_net)s, '
'seg_type %(net_type)s, '
'seg_id %(seg_id)s, '
'multicast_ip %(multicast_ip)s'),
{'phy_net': physical_network,
'net_type': network_type,
'seg_id': segmentation_id,
'multicast_ip': multicast_ip})
if network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
segment_pairs = (
self._parse_multi_segments(context, network['network'],
n1kv.SEGMENT_ADD))
LOG.debug(_('Seg list %s '), segment_pairs)
elif network_type == c_const.NETWORK_TYPE_TRUNK:
network_profile = self.get_network_profile(context,
profile_id)
segment_pairs = (
self._parse_trunk_segments(context, network['network'],
n1kv.SEGMENT_ADD,
physical_network,
network_profile['sub_type']
))
LOG.debug(_('Seg list %s '), segment_pairs)
else:
if not segmentation_id:
raise n_exc.TenantNetworksDisabled()
else:
# provider network
if network_type == c_const.NETWORK_TYPE_VLAN:
network_profile = self.get_network_profile(context,
profile_id)
seg_min, seg_max = self._get_segment_range(
network_profile['segment_range'])
if not seg_min <= segmentation_id <= seg_max:
raise cisco_exceptions.VlanIDOutsidePool
n1kv_db_v2.reserve_specific_vlan(session,
physical_network,
segmentation_id)
multicast_ip = "0.0.0.0"
net = super(N1kvNeutronPluginV2, self).create_network(context,
network)
n1kv_db_v2.add_network_binding(session,
net['id'],
network_type,
physical_network,
segmentation_id,
multicast_ip,
profile_id,
segment_pairs)
self._process_l3_create(context, net, network['network'])
self._extend_network_dict_provider(context, net)
self._extend_network_dict_profile(context, net)
try:
if network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
self._send_add_multi_segment_request(context, net['id'],
segment_pairs)
else:
self._send_create_network_request(context, net, segment_pairs)
except(cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
with excutils.save_and_reraise_exception():
self._delete_network_db(context, net['id'])
else:
LOG.debug(_("Created network: %s"), net['id'])
return net
def update_network(self, context, id, network):
"""
Update network parameters.
:param context: neutron api request context
:param id: UUID representing the network to update
:returns: updated network object
"""
self._check_provider_update(context, network['network'])
add_segments = []
del_segments = []
session = context.session
with session.begin(subtransactions=True):
net = super(N1kvNeutronPluginV2, self).update_network(context, id,
network)
self._process_l3_update(context, net, network['network'])
binding = n1kv_db_v2.get_network_binding(session, id)
if binding.network_type == c_const.NETWORK_TYPE_MULTI_SEGMENT:
add_segments = (
self._parse_multi_segments(context, network['network'],
n1kv.SEGMENT_ADD))
n1kv_db_v2.add_multi_segment_binding(session,
net['id'], add_segments)
del_segments = (
self._parse_multi_segments(context, network['network'],
n1kv.SEGMENT_DEL))
self._send_add_multi_segment_request(context, net['id'],
add_segments)
self._send_del_multi_segment_request(context, net['id'],
del_segments)
n1kv_db_v2.del_multi_segment_binding(session,
net['id'], del_segments)
elif binding.network_type == c_const.NETWORK_TYPE_TRUNK:
network_profile = self.get_network_profile(context,
binding.profile_id)
add_segments = (
self._parse_trunk_segments(context, network['network'],
n1kv.SEGMENT_ADD,
binding.physical_network,
network_profile['sub_type']))
n1kv_db_v2.add_trunk_segment_binding(session,
net['id'], add_segments)
del_segments = (
self._parse_trunk_segments(context, network['network'],
n1kv.SEGMENT_DEL,
binding.physical_network,
network_profile['sub_type']))
n1kv_db_v2.del_trunk_segment_binding(session,
net['id'], del_segments)
self._extend_network_dict_provider(context, net)
self._extend_network_dict_profile(context, net)
if binding.network_type != c_const.NETWORK_TYPE_MULTI_SEGMENT:
self._send_update_network_request(context, net, add_segments,
del_segments)
LOG.debug(_("Updated network: %s"), net['id'])
return net
def delete_network(self, context, id):
"""
Delete a network.
:param context: neutron api request context
:param id: UUID representing the network to delete
"""
session = context.session
with session.begin(subtransactions=True):
network = self.get_network(context, id)
if n1kv_db_v2.is_trunk_member(session, id):
msg = _("Cannot delete network '%s' "
"that is member of a trunk segment") % network['name']
raise n_exc.InvalidInput(error_message=msg)
if n1kv_db_v2.is_multi_segment_member(session, id):
msg = _("Cannot delete network '%s' that is a member of a "
"multi-segment network") % network['name']
raise n_exc.InvalidInput(error_message=msg)
self._delete_network_db(context, id)
# the network_binding record is deleted via cascade from
# the network record, so explicit removal is not necessary
self._send_delete_network_request(context, network)
LOG.debug("Deleted network: %s", id)
def _delete_network_db(self, context, id):
session = context.session
with session.begin(subtransactions=True):
binding = n1kv_db_v2.get_network_binding(session, id)
if binding.network_type == c_const.NETWORK_TYPE_OVERLAY:
n1kv_db_v2.release_vxlan(session, binding.segmentation_id)
elif binding.network_type == c_const.NETWORK_TYPE_VLAN:
n1kv_db_v2.release_vlan(session, binding.physical_network,
binding.segmentation_id)
super(N1kvNeutronPluginV2, self).delete_network(context, id)
def get_network(self, context, id, fields=None):
"""
Retrieve a Network.
:param context: neutron api request context
:param id: UUID representing the network to fetch
:returns: requested network dictionary
"""
LOG.debug(_("Get network: %s"), id)
net = super(N1kvNeutronPluginV2, self).get_network(context, id, None)
self._extend_network_dict_provider(context, net)
self._extend_network_dict_profile(context, net)
self._extend_network_dict_member_segments(context, net)
return self._fields(net, fields)
def get_networks(self, context, filters=None, fields=None):
"""
Retrieve a list of networks.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
network object. Values in this dictiontary are an
iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a network
dictionary. Only these fields will be returned.
:returns: list of network dictionaries.
"""
LOG.debug(_("Get networks"))
nets = super(N1kvNeutronPluginV2, self).get_networks(context, filters,
None)
for net in nets:
self._extend_network_dict_provider(context, net)
self._extend_network_dict_profile(context, net)
return [self._fields(net, fields) for net in nets]
def create_port(self, context, port):
"""
Create neutron port.
Create a port. Use a default policy profile for ports created for dhcp
and router interface. Default policy profile name is configured in the
/etc/neutron/cisco_plugins.ini file.
:param context: neutron api request context
:param port: port dictionary
:returns: port object
"""
p_profile = None
port_count = None
vm_network = None
vm_network_name = None
profile_id_set = False
# Set the network policy profile id for auto generated L3/DHCP ports
if ('device_id' in port['port'] and port['port']['device_owner'] in
[constants.DEVICE_OWNER_DHCP, constants.DEVICE_OWNER_ROUTER_INTF,
constants.DEVICE_OWNER_ROUTER_GW,
constants.DEVICE_OWNER_FLOATINGIP]):
p_profile_name = c_conf.CISCO_N1K.network_node_policy_profile
p_profile = self._get_policy_profile_by_name(p_profile_name)
if p_profile:
port['port']['n1kv:profile_id'] = p_profile['id']
if n1kv.PROFILE_ID in port['port']:
profile_id = port['port'].get(n1kv.PROFILE_ID)
profile_id_set = attributes.is_attr_set(profile_id)
# Set the default policy profile id for ports if no id is set
if not profile_id_set:
p_profile_name = c_conf.CISCO_N1K.default_policy_profile
p_profile = self._get_policy_profile_by_name(p_profile_name)
if p_profile:
port['port']['n1kv:profile_id'] = p_profile['id']
profile_id_set = True
profile_id = self._process_policy_profile(context,
port['port'])
LOG.debug(_('Create port: profile_id=%s'), profile_id)
session = context.session
with session.begin(subtransactions=True):
pt = super(N1kvNeutronPluginV2, self).create_port(context,
port)
n1kv_db_v2.add_port_binding(session, pt['id'], profile_id)
self._extend_port_dict_profile(context, pt)
try:
vm_network = n1kv_db_v2.get_vm_network(
context.session,
profile_id,
pt['network_id'])
except cisco_exceptions.VMNetworkNotFound:
# Create a VM Network if no VM network exists.
vm_network_name = "%s%s_%s" % (c_const.VM_NETWORK_NAME_PREFIX,
profile_id,
pt['network_id'])
port_count = 1
vm_network = n1kv_db_v2.add_vm_network(context.session,
vm_network_name,
profile_id,
pt['network_id'],
port_count)
else:
# Update port count of the VM network.
vm_network_name = vm_network['name']
port_count = vm_network['port_count'] + 1
n1kv_db_v2.update_vm_network_port_count(context.session,
vm_network_name,
port_count)
self._process_portbindings_create_and_update(context,
port['port'],
pt)
# Extract policy profile for VM network create in VSM.
if not p_profile:
p_profile = n1kv_db_v2.get_policy_profile(session, profile_id)
try:
self._send_create_port_request(context,
pt,
port_count,
p_profile,
vm_network_name)
except(cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
with excutils.save_and_reraise_exception():
self._delete_port_db(context, pt, vm_network)
else:
LOG.debug(_("Created port: %s"), pt)
return pt
def update_port(self, context, id, port):
"""
Update port parameters.
:param context: neutron api request context
:param id: UUID representing the port to update
:returns: updated port object
"""
LOG.debug(_("Update port: %s"), id)
with context.session.begin(subtransactions=True):
updated_port = super(N1kvNeutronPluginV2,
self).update_port(context, id, port)
self._process_portbindings_create_and_update(context,
port['port'],
updated_port)
self._extend_port_dict_profile(context, updated_port)
return updated_port
def delete_port(self, context, id, l3_port_check=True):
"""
Delete a port.
:param context: neutron api request context
:param id: UUID representing the port to delete
"""
# if needed, check to see if this is a port owned by
# and l3-router. If so, we should prevent deletion.
if l3_port_check:
self.prevent_l3_port_deletion(context, id)
with context.session.begin(subtransactions=True):
port = self.get_port(context, id)
vm_network = n1kv_db_v2.get_vm_network(context.session,
port[n1kv.PROFILE_ID],
port['network_id'])
router_ids = self.disassociate_floatingips(
context, id, do_notify=False)
self._delete_port_db(context, port, vm_network)
# now that we've left db transaction, we are safe to notify
self.notify_routers_updated(context, router_ids)
self._send_delete_port_request(context, port, vm_network)
def _delete_port_db(self, context, port, vm_network):
with context.session.begin(subtransactions=True):
vm_network['port_count'] -= 1
n1kv_db_v2.update_vm_network_port_count(context.session,
vm_network['name'],
vm_network['port_count'])
if vm_network['port_count'] == 0:
n1kv_db_v2.delete_vm_network(context.session,
port[n1kv.PROFILE_ID],
port['network_id'])
super(N1kvNeutronPluginV2, self).delete_port(context, port['id'])
def get_port(self, context, id, fields=None):
"""
Retrieve a port.
:param context: neutron api request context
:param id: UUID representing the port to retrieve
:param fields: a list of strings that are valid keys in a port
dictionary. Only these fields will be returned.
:returns: port dictionary
"""
LOG.debug(_("Get port: %s"), id)
port = super(N1kvNeutronPluginV2, self).get_port(context, id, None)
self._extend_port_dict_profile(context, port)
return self._fields(port, fields)
def get_ports(self, context, filters=None, fields=None):
"""
Retrieve a list of ports.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
port object. Values in this dictiontary are an
iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a port
dictionary. Only these fields will be returned.
:returns: list of port dictionaries
"""
LOG.debug(_("Get ports"))
ports = super(N1kvNeutronPluginV2, self).get_ports(context, filters,
None)
for port in ports:
self._extend_port_dict_profile(context, port)
return [self._fields(port, fields) for port in ports]
def create_subnet(self, context, subnet):
"""
Create subnet for a given network.
:param context: neutron api request context
:param subnet: subnet dictionary
:returns: subnet object
"""
LOG.debug(_('Create subnet'))
sub = super(N1kvNeutronPluginV2, self).create_subnet(context, subnet)
try:
self._send_create_subnet_request(context, sub)
except(cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
with excutils.save_and_reraise_exception():
super(N1kvNeutronPluginV2,
self).delete_subnet(context, sub['id'])
else:
LOG.debug(_("Created subnet: %s"), sub['id'])
return sub
def update_subnet(self, context, id, subnet):
"""
Update a subnet.
:param context: neutron api request context
:param id: UUID representing subnet to update
:returns: updated subnet object
"""
LOG.debug(_('Update subnet'))
sub = super(N1kvNeutronPluginV2, self).update_subnet(context,
id,
subnet)
self._send_update_subnet_request(sub)
return sub
def delete_subnet(self, context, id):
"""
Delete a subnet.
:param context: neutron api request context
:param id: UUID representing subnet to delete
:returns: deleted subnet object
"""
LOG.debug(_('Delete subnet: %s'), id)
subnet = self.get_subnet(context, id)
self._send_delete_subnet_request(context, subnet)
return super(N1kvNeutronPluginV2, self).delete_subnet(context, id)
def get_subnet(self, context, id, fields=None):
"""
Retrieve a subnet.
:param context: neutron api request context
:param id: UUID representing subnet to retrieve
:params fields: a list of strings that are valid keys in a subnet
dictionary. Only these fields will be returned.
:returns: subnet object
"""
LOG.debug(_("Get subnet: %s"), id)
subnet = super(N1kvNeutronPluginV2, self).get_subnet(context, id,
None)
return self._fields(subnet, fields)
def get_subnets(self, context, filters=None, fields=None):
"""
Retrieve a list of subnets.
:param context: neutron api request context
:param filters: a dictionary with keys that are valid keys for a
subnet object. Values in this dictiontary are an
iterable containing values that will be used for an
exact match comparison for that value. Each result
returned by this function will have matched one of the
values for each key in filters
:params fields: a list of strings that are valid keys in a subnet
dictionary. Only these fields will be returned.
:returns: list of dictionaries of subnets
"""
LOG.debug(_("Get subnets"))
subnets = super(N1kvNeutronPluginV2, self).get_subnets(context,
filters,
None)
return [self._fields(subnet, fields) for subnet in subnets]
def create_network_profile(self, context, network_profile):
"""
Create a network profile.
Create a network profile, which represents a pool of networks
belonging to one type (VLAN or Overlay). On creation of network
profile, we retrieve the admin tenant-id which we use to replace
the previously stored fake tenant-id in tenant-profile bindings.
:param context: neutron api request context
:param network_profile: network profile dictionary
:returns: network profile object
"""
self._replace_fake_tenant_id_with_real(context)
with context.session.begin(subtransactions=True):
net_p = super(N1kvNeutronPluginV2,
self).create_network_profile(context,
network_profile)
try:
self._send_create_logical_network_request(net_p,
context.tenant_id)
except(cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
with excutils.save_and_reraise_exception():
super(N1kvNeutronPluginV2,
self).delete_network_profile(context, net_p['id'])
try:
self._send_create_network_profile_request(context, net_p)
except(cisco_exceptions.VSMError,
cisco_exceptions.VSMConnectionFailed):
with excutils.save_and_reraise_exception():
super(N1kvNeutronPluginV2,
self).delete_network_profile(context, net_p['id'])
self._send_delete_logical_network_request(net_p)
return net_p
def delete_network_profile(self, context, id):
"""
Delete a network profile.
:param context: neutron api request context
:param id: UUID of the network profile to delete
:returns: deleted network profile object
"""
with context.session.begin(subtransactions=True):
net_p = super(N1kvNeutronPluginV2,
self).delete_network_profile(context, id)
self._send_delete_network_profile_request(net_p)
self._send_delete_logical_network_request(net_p)
def update_network_profile(self, context, net_profile_id, network_profile):
"""
Update a network profile.
:param context: neutron api request context
:param net_profile_id: UUID of the network profile to update
:param network_profile: dictionary containing network profile object
"""
session = context.session
with session.begin(subtransactions=True):
net_p = (super(N1kvNeutronPluginV2, self).
update_network_profile(context,
net_profile_id,
network_profile))
self._send_update_network_profile_request(net_p)
return net_p
def create_router(self, context, router):
"""
Handle creation of router.
Schedule router to L3 agent as part of the create handling.
:param context: neutron api request context
:param router: router dictionary
:returns: router object
"""
session = context.session
with session.begin(subtransactions=True):
rtr = (super(N1kvNeutronPluginV2, self).
create_router(context, router))
LOG.debug(_("Scheduling router %s"), rtr['id'])
self.schedule_router(context, rtr['id'])
return rtr
| apache-2.0 |
reddraggone9/youtube-dl | youtube_dl/extractor/sina.py | 107 | 2755 | # coding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..compat import (
compat_urllib_request,
compat_urllib_parse,
)
class SinaIE(InfoExtractor):
_VALID_URL = r'''(?x)https?://(.*?\.)?video\.sina\.com\.cn/
(
(.+?/(((?P<pseudo_id>\d+).html)|(.*?(\#|(vid=)|b/)(?P<id>\d+?)($|&|\-))))
|
# This is used by external sites like Weibo
(api/sinawebApi/outplay.php/(?P<token>.+?)\.swf)
)
'''
_TESTS = [
{
'url': 'http://video.sina.com.cn/news/vlist/zt/chczlj2013/?opsubject_id=top12#110028898',
'md5': 'd65dd22ddcf44e38ce2bf58a10c3e71f',
'info_dict': {
'id': '110028898',
'ext': 'flv',
'title': '《中国新闻》 朝鲜要求巴拿马立即释放被扣船员',
}
},
{
'url': 'http://video.sina.com.cn/v/b/101314253-1290078633.html',
'info_dict': {
'id': '101314253',
'ext': 'flv',
'title': '军方提高对朝情报监视级别',
},
},
]
def _extract_video(self, video_id):
data = compat_urllib_parse.urlencode({'vid': video_id})
url_doc = self._download_xml('http://v.iask.com/v_play.php?%s' % data,
video_id, 'Downloading video url')
image_page = self._download_webpage(
'http://interface.video.sina.com.cn/interface/common/getVideoImage.php?%s' % data,
video_id, 'Downloading thumbnail info')
return {'id': video_id,
'url': url_doc.find('./durl/url').text,
'ext': 'flv',
'title': url_doc.find('./vname').text,
'thumbnail': image_page.split('=')[1],
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
if mobj.group('token') is not None:
# The video id is in the redirected url
self.to_screen('Getting video id')
request = compat_urllib_request.Request(url)
request.get_method = lambda: 'HEAD'
(_, urlh) = self._download_webpage_handle(request, 'NA', False)
return self._real_extract(urlh.geturl())
elif video_id is None:
pseudo_id = mobj.group('pseudo_id')
webpage = self._download_webpage(url, pseudo_id)
video_id = self._search_regex(r'vid:\'(\d+?)\'', webpage, 'video id')
return self._extract_video(video_id)
| unlicense |
NetApp/cinder | cinder/volume/drivers/synology/synology_common.py | 6 | 48891 | # Copyright (c) 2016 Synology Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import functools
import hashlib
import json
import math
from random import randint
import string
from Crypto.Cipher import AES
from Crypto.Cipher import PKCS1_v1_5
from Crypto.PublicKey import RSA
from Crypto import Random
import eventlet
from oslo_config import cfg
from oslo_log import log as logging
from oslo_utils import excutils
from oslo_utils import units
import requests
from six.moves import urllib
from six import string_types
from cinder import exception
from cinder import utils
from cinder.i18n import _, _LE, _LW
from cinder.objects import snapshot
from cinder.objects import volume
from cinder.volume import utils as volutils
cinder_opts = [
cfg.StrOpt('synology_pool_name',
default='',
help='Volume on Synology storage to be used for creating lun.'),
cfg.PortOpt('synology_admin_port',
default=5000,
help='Management port for Synology storage.'),
cfg.StrOpt('synology_username',
default='admin',
help='Administrator of Synology storage.'),
cfg.StrOpt('synology_password',
default='',
help='Password of administrator for logging in '
'Synology storage.',
secret=True),
cfg.BoolOpt('synology_ssl_verify',
default=True,
help='Do certificate validation or not if '
'$driver_use_ssl is True'),
cfg.StrOpt('synology_one_time_pass',
default=None,
help='One time password of administrator for logging in '
'Synology storage if OTP is enabled.',
secret=True),
cfg.StrOpt('synology_device_id',
default=None,
help='Device id for skip one time password check for '
'logging in Synology storage if OTP is enabled.'),
]
LOG = logging.getLogger(__name__)
CONF = cfg.CONF
CONF.register_opts(cinder_opts)
class AESCipher(object):
"""Encrypt with OpenSSL-compatible way"""
SALT_MAGIC = 'Salted__'
def __init__(self, password, key_length=32):
self._bs = AES.block_size
self._salt = Random.new().read(self._bs - len(self.SALT_MAGIC))
self._key, self._iv = self._derive_key_and_iv(password,
self._salt,
key_length,
self._bs)
def _pad(self, s):
bs = self._bs
return s + (bs - len(s) % bs) * chr(bs - len(s) % bs)
def _derive_key_and_iv(self, password, salt, key_length, iv_length):
d = d_i = ''
while len(d) < key_length + iv_length:
md5_str = d_i + password + salt
d_i = hashlib.md5(md5_str).digest()
d += d_i
return d[:key_length], d[key_length:key_length + iv_length]
def encrypt(self, text):
cipher = AES.new(self._key, AES.MODE_CBC, self._iv)
ciphertext = cipher.encrypt(self._pad(text))
return "%s%s%s" % (self.SALT_MAGIC, self._salt, ciphertext)
class Session(object):
def __init__(self,
host,
port,
username,
password,
https=False,
ssl_verify=True,
one_time_pass=None,
device_id=None):
self._proto = 'https' if https else 'http'
self._host = host
self._port = port
self._sess = 'dsm'
self._https = https
self._url_prefix = self._proto + '://' + host + ':' + str(port)
self._url = self._url_prefix + '/webapi/auth.cgi'
self._ssl_verify = ssl_verify
self._sid = None
self._did = device_id
data = {'api': 'SYNO.API.Auth',
'method': 'login',
'version': 6}
params = {'account': username,
'passwd': password,
'session': self._sess,
'format': 'sid'}
if one_time_pass:
if device_id:
params.update(device_id=device_id)
else:
params.update(otp_code=one_time_pass,
enable_device_token='yes')
if not https:
params = self._encrypt_params(params)
data.update(params)
resp = requests.post(self._url,
data=data,
verify=self._ssl_verify)
result = resp.json()
if result and result['success']:
self._sid = result['data']['sid']
if one_time_pass and not device_id:
self._did = result['data']['did']
else:
raise exception.SynoAuthError(reason=_('Login failed.'))
def _random_AES_passpharse(self, length):
available = ('0123456789'
'abcdefghijklmnopqrstuvwxyz'
'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'~!@#$%^&*()_+-/')
key = ''
while length > 0:
key += available[randint(0, len(available) - 1)]
length -= 1
return key
def _get_enc_info(self):
url = self.url_prefix() + '/webapi/encryption.cgi'
data = {"api": "SYNO.API.Encryption",
"method": "getinfo",
"version": 1,
"format": "module"}
resp = requests.post(url, data=data, verify=self._ssl_verify)
result = resp.json()
return result["data"]
def _encrypt_RSA(self, modulus, passphrase, text):
key = RSA.construct((modulus, passphrase))
cipher = PKCS1_v1_5.new(key)
ciphertext = cipher.encrypt(text)
return ciphertext
def _encrypt_AES(self, passphrase, text):
cipher = AESCipher(passphrase)
return cipher.encrypt(text)
def _encrypt_params(self, params):
enc_info = self._get_enc_info()
public_key = enc_info["public_key"]
cipher_key = enc_info["cipherkey"]
cipher_token = enc_info["ciphertoken"]
server_time = enc_info["server_time"]
random_passphrase = self._random_AES_passpharse(501)
params[cipher_token] = server_time
encrypted_passphrase = self._encrypt_RSA(string.atol(public_key, 16),
string.atol("10001", 16),
random_passphrase)
encrypted_params = self._encrypt_AES(random_passphrase,
urllib.parse.urlencode(params))
enc_params = {"rsa": base64.b64encode(encrypted_passphrase),
"aes": base64.b64encode(encrypted_params)}
return {cipher_key: json.dumps(enc_params)}
def sid(self):
return self._sid
def did(self):
return self._did
def url_prefix(self):
return self._url_prefix
def query(self, api):
url = self._url_prefix + '/webapi/query.cgi'
data = {'api': 'SYNO.API.Info',
'version': 1,
'method': 'query',
'query': api}
resp = requests.post(url,
data=data,
verify=self._ssl_verify)
result = resp.json()
if 'success' in result and result['success']:
return result['data'][api]
else:
return None
def __del__(self):
if not hasattr(self, '_sid'):
return
data = {'api': 'SYNO.API.Auth',
'version': 1,
'method': 'logout',
'session': self._sess,
'_sid': self._sid}
requests.post(self._url, data=data, verify=self._ssl_verify)
def _connection_checker(func):
"""Decorator to check session has expired or not."""
@functools.wraps(func)
def inner_connection_checker(self, *args, **kwargs):
LOG.debug('in _connection_checker')
for attempts in range(2):
try:
return func(self, *args, **kwargs)
except exception.SynoAuthError as e:
if attempts < 1:
LOG.debug('Session might have expired.'
' Trying to relogin')
self.new_session()
continue
else:
LOG.error(_LE('Try to renew session: [%s]'), e)
raise
return inner_connection_checker
class APIRequest(object):
def __init__(self,
host,
port,
username,
password,
https=False,
ssl_verify=True,
one_time_pass=None,
device_id=None):
self._host = host
self._port = port
self._username = username
self._password = password
self._https = https
self._ssl_verify = ssl_verify
self._one_time_pass = one_time_pass
self._device_id = device_id
self.new_session()
def new_session(self):
self.__session = Session(self._host,
self._port,
self._username,
self._password,
self._https,
self._ssl_verify,
self._one_time_pass,
self._device_id)
if not self._device_id:
self._device_id = self.__session.did()
def _start(self, api, version):
apiInfo = self.__session.query(api)
self._jsonFormat = apiInfo['requestFormat'] == 'JSON'
if (apiInfo and (apiInfo['minVersion'] <= version)
and (apiInfo['maxVersion'] >= version)):
return apiInfo['path']
else:
raise exception.APIException(service=api)
def _encode_param(self, params):
# Json encode
if self._jsonFormat:
for key, value in params.items():
params[key] = json.dumps(value)
# url encode
return urllib.parse.urlencode(params)
@utils.synchronized('Synology')
@_connection_checker
def request(self, api, method, version, **params):
cgi_path = self._start(api, version)
s = self.__session
url = s.url_prefix() + '/webapi/' + cgi_path
data = {'api': api,
'version': version,
'method': method,
'_sid': s.sid()
}
data.update(params)
LOG.debug('[%s]', url)
LOG.debug('%s', json.dumps(data, indent=4))
# Send HTTP Post Request
resp = requests.post(url,
data=self._encode_param(data),
verify=self._ssl_verify)
http_status = resp.status_code
result = resp.json()
LOG.debug('%s', json.dumps(result, indent=4))
# Check for status code
if (200 != http_status):
result = {'http_status': http_status}
elif 'success' not in result:
reason = _("'success' not found")
raise exception.MalformedResponse(cmd=json.dumps(data, indent=4),
reason=reason)
if ('error' in result and 'code' in result["error"]
and result['error']['code'] == 105):
raise exception.SynoAuthError(reason=_('Session might have '
'expired.'))
return result
class SynoCommon(object):
"""Manage Cinder volumes on Synology storage"""
TARGET_NAME_PREFIX = 'Cinder-Target-'
CINDER_LUN = 'CINDER'
METADATA_DS_SNAPSHOT_UUID = 'ds_snapshot_UUID'
def __init__(self, config, driver_type):
if not config.safe_get('iscsi_ip_address'):
raise exception.InvalidConfigurationValue(
option='iscsi_ip_address',
value='')
if not config.safe_get('synology_pool_name'):
raise exception.InvalidConfigurationValue(
option='synology_pool_name',
value='')
self.config = config
self.vendor_name = 'Synology'
self.driver_type = driver_type
self.volume_backend_name = self._get_backend_name()
self.iscsi_port = self.config.safe_get('iscsi_port')
api = APIRequest(self.config.iscsi_ip_address,
self.config.synology_admin_port,
self.config.synology_username,
self.config.synology_password,
self.config.safe_get('driver_use_ssl'),
self.config.safe_get('synology_ssl_verify'),
self.config.safe_get('synology_one_time_pass'),
self.config.safe_get('synology_device_id'),)
self.synoexec = api.request
self.host_uuid = self._get_node_uuid()
def _get_node_uuid(self):
try:
out = self.exec_webapi('SYNO.Core.ISCSI.Node',
'list',
1)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_node_uuid.'))
if (not self.check_value_valid(out, ['data', 'nodes'], list)
or 0 >= len(out['data']['nodes'])
or not self.check_value_valid(out['data']['nodes'][0],
['uuid'],
string_types)):
msg = _('Failed to _get_node_uuid.')
raise exception.VolumeDriverException(message=msg)
return out['data']['nodes'][0]['uuid']
def _get_pool_info(self):
pool_name = self.config.synology_pool_name
if not pool_name:
raise exception.InvalidConfigurationValue(option='pool_name',
value='')
try:
out = self.exec_webapi('SYNO.Core.Storage.Volume',
'get',
1,
volume_path='/' + pool_name)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_pool_status.'))
if not self.check_value_valid(out, ['data', 'volume'], object):
raise exception.MalformedResponse(cmd='_get_pool_info',
reason=_('no data found'))
return out['data']['volume']
def _get_pool_size(self):
info = self._get_pool_info()
if 'size_free_byte' not in info or 'size_total_byte' not in info:
raise exception.MalformedResponse(cmd='_get_pool_size',
reason=_('size not found'))
free_capacity_gb = int(int(info['size_free_byte']) / units.Gi)
total_capacity_gb = int(int(info['size_total_byte']) / units.Gi)
other_user_data_gb = int(math.ceil((float(info['size_total_byte']) -
float(info['size_free_byte']) -
float(info['eppool_used_byte'])) /
units.Gi))
return free_capacity_gb, total_capacity_gb, other_user_data_gb
def _get_pool_lun_provisioned_size(self):
pool_name = self.config.synology_pool_name
if not pool_name:
raise exception.InvalidConfigurationValue(option='pool_name',
value=pool_name)
try:
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'list',
1,
location='/' + pool_name)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_pool_lun_provisioned_size.'))
if not self.check_value_valid(out, ['data', 'luns'], list):
raise exception.MalformedResponse(
cmd='_get_pool_lun_provisioned_size',
reason=_('no data found'))
size = 0
for lun in out['data']['luns']:
size += lun['size']
return int(math.ceil(float(size) / units.Gi))
def _get_lun_info(self, lun_name, additional=None):
if not lun_name:
err = _('Param [lun_name] is invalid.')
raise exception.InvalidParameterValue(err=err)
params = {'uuid': lun_name}
if additional is not None:
params['additional'] = additional
try:
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'get',
1,
**params)
self.check_response(out, uuid=lun_name)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_lun_info. [%s]'), lun_name)
if not self.check_value_valid(out, ['data', 'lun'], object):
raise exception.MalformedResponse(cmd='_get_lun_info',
reason=_('lun info not found'))
return out['data']['lun']
def _get_lun_uuid(self, lun_name):
if not lun_name:
err = _('Param [lun_name] is invalid.')
raise exception.InvalidParameterValue(err=err)
try:
lun_info = self._get_lun_info(lun_name)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_lun_uuid. [%s]'), lun_name)
if not self.check_value_valid(lun_info, ['uuid'], string_types):
raise exception.MalformedResponse(cmd='_get_lun_uuid',
reason=_('uuid not found'))
return lun_info['uuid']
def _get_lun_status(self, lun_name):
if not lun_name:
err = _('Param [lun_name] is invalid.')
raise exception.InvalidParameterValue(err=err)
try:
lun_info = self._get_lun_info(lun_name,
['status', 'is_action_locked'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_lun_status. [%s]'),
lun_name)
if not self.check_value_valid(lun_info, ['status'], string_types):
raise exception.MalformedResponse(cmd='_get_lun_status',
reason=_('status not found'))
if not self.check_value_valid(lun_info, ['is_action_locked'], bool):
raise exception.MalformedResponse(cmd='_get_lun_status',
reason=_('action_locked '
'not found'))
return lun_info['status'], lun_info['is_action_locked']
def _get_snapshot_info(self, snapshot_uuid, additional=None):
if not snapshot_uuid:
err = _('Param [snapshot_uuid] is invalid.')
raise exception.InvalidParameterValue(err=err)
params = {'snapshot_uuid': snapshot_uuid}
if additional is not None:
params['additional'] = additional
try:
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'get_snapshot',
1,
**params)
self.check_response(out, snapshot_id=snapshot_uuid)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_snapshot_info. [%s]'),
snapshot_uuid)
if not self.check_value_valid(out, ['data', 'snapshot'], object):
raise exception.MalformedResponse(cmd='_get_snapshot_info',
reason=_('snapshot info not '
'found'))
return out['data']['snapshot']
def _get_snapshot_status(self, snapshot_uuid):
if not snapshot_uuid:
err = _('Param [snapshot_uuid] is invalid.')
raise exception.InvalidParameterValue(err=err)
try:
snapshot_info = self._get_snapshot_info(snapshot_uuid,
['status',
'is_action_locked'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _get_snapshot_info. [%s]'),
snapshot_uuid)
if not self.check_value_valid(snapshot_info, ['status'], string_types):
raise exception.MalformedResponse(cmd='_get_snapshot_status',
reason=_('status not found'))
if not self.check_value_valid(snapshot_info,
['is_action_locked'],
bool):
raise exception.MalformedResponse(cmd='_get_snapshot_status',
reason=_('action_locked '
'not found'))
return snapshot_info['status'], snapshot_info['is_action_locked']
def _get_metadata_value(self, obj, key):
if key not in obj['metadata']:
if isinstance(obj, volume.Volume):
raise exception.VolumeMetadataNotFound(
volume_id=obj['id'],
metadata_key=key)
elif isinstance(obj, snapshot.Snapshot):
raise exception.SnapshotMetadataNotFound(
snapshot_id=obj['id'],
metadata_key=key)
else:
raise exception.MetadataAbsent()
return obj['metadata'][key]
def _get_backend_name(self):
return self.config.safe_get('volume_backend_name') or 'Synology'
def _target_create(self, identifier):
if not identifier:
err = _('Param [identifier] is invalid.')
raise exception.InvalidParameterValue(err=err)
# 0 for no auth, 1 for single chap, 2 for mutual chap
auth_type = 0
chap_username = ''
chap_password = ''
provider_auth = ''
if self.config.safe_get('use_chap_auth') and self.config.use_chap_auth:
auth_type = 1
chap_username = (self.config.safe_get('chap_username') or
volutils.generate_username(12))
chap_password = (self.config.safe_get('chap_password') or
volutils.generate_password())
provider_auth = ' '.join(('CHAP', chap_username, chap_password))
trg_prefix = self.config.safe_get('iscsi_target_prefix')
trg_name = (self.TARGET_NAME_PREFIX + '%s') % identifier
iqn = trg_prefix + trg_name
try:
out = self.exec_webapi('SYNO.Core.ISCSI.Target',
'create',
1,
name=trg_name,
iqn=iqn,
auth_type=auth_type,
user=chap_username,
password=chap_password,
max_sessions=0)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _target_create. [%s]'),
identifier)
if not self.check_value_valid(out, ['data', 'target_id']):
msg = _('Failed to get target_id of target [%s]') % trg_name
raise exception.VolumeDriverException(message=msg)
trg_id = out['data']['target_id']
return iqn, trg_id, provider_auth
def _target_delete(self, trg_id):
if 0 > trg_id:
err = _('trg_id is invalid: %d.') % trg_id
raise exception.InvalidParameterValue(err=err)
try:
out = self.exec_webapi('SYNO.Core.ISCSI.Target',
'delete',
1,
target_id=('%d' % trg_id))
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _target_delete. [%d]'), trg_id)
# is_map True for map, False for ummap
def _lun_map_unmap_target(self, volume_name, is_map, trg_id):
if 0 > trg_id:
err = _('trg_id is invalid: %d.') % trg_id
raise exception.InvalidParameterValue(err=err)
try:
lun_uuid = self._get_lun_uuid(volume_name)
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'map_target' if is_map else 'unmap_target',
1,
uuid=lun_uuid,
target_ids=['%d' % trg_id])
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _lun_map_unmap_target.'
'[%(action)s][%(vol)s].'),
{'action': ('map_target' if is_map
else 'unmap_target'),
'vol': volume_name})
def _lun_map_target(self, volume_name, trg_id):
self._lun_map_unmap_target(volume_name, True, trg_id)
def _lun_unmap_target(self, volume_name, trg_id):
self._lun_map_unmap_target(volume_name, False, trg_id)
def _modify_lun_name(self, name, new_name):
try:
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'set',
1,
uuid=name,
new_name=new_name)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _modify_lun_name [%s].'), name)
def _check_lun_status_normal(self, volume_name):
status = ''
try:
while True:
status, locked = self._get_lun_status(volume_name)
if not locked:
break
eventlet.sleep(2)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to get lun status. [%s]'),
volume_name)
LOG.debug('Lun [%(vol)s], status [%(status)s].',
{'vol': volume_name,
'status': status})
return status == 'normal'
def _check_snapshot_status_healthy(self, snapshot_uuid):
status = ''
try:
while True:
status, locked = self._get_snapshot_status(snapshot_uuid)
if not locked:
break
eventlet.sleep(2)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to get snapshot status. [%s]'),
snapshot_uuid)
LOG.debug('Lun [%(snapshot)s], status [%(status)s].',
{'snapshot': snapshot_uuid,
'status': status})
return status == 'Healthy'
def _check_storage_response(self, out, **kwargs):
data = 'internal error'
exc = exception.VolumeBackendAPIException(data=data)
message = 'Internal error'
return (message, exc)
def _check_iscsi_response(self, out, **kwargs):
LUN_BAD_LUN_UUID = 18990505
LUN_NO_SUCH_SNAPSHOT = 18990532
if not self.check_value_valid(out, ['error', 'code'], int):
raise exception.MalformedResponse(cmd='_check_iscsi_response',
reason=_('no error code found'))
code = out['error']['code']
exc = None
message = ''
if code == LUN_BAD_LUN_UUID:
exc = exception.SynoLUNNotExist(**kwargs)
message = 'Bad LUN UUID'
elif code == LUN_NO_SUCH_SNAPSHOT:
exc = exception.SnapshotNotFound(**kwargs)
message = 'No such snapshot'
else:
data = 'internal error'
exc = exception.VolumeBackendAPIException(data=data)
message = 'Internal error'
message = '%s [%d]' % (message, code)
return (message, exc)
def _check_ds_pool_status(self):
pool_info = self._get_pool_info()
if not self.check_value_valid(pool_info, ['readonly'], bool):
raise exception.MalformedResponse(cmd='_check_ds_pool_status',
reason=_('no readonly found'))
if pool_info['readonly']:
message = (_('pool [%s] is not writable') %
self.config.synology_pool_name)
raise exception.VolumeDriverException(message=message)
def _check_ds_version(self):
try:
out = self.exec_webapi('SYNO.Core.System',
'info',
1,
type='firmware')
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _check_ds_version'))
if not self.check_value_valid(out,
['data', 'firmware_ver'],
string_types):
raise exception.MalformedResponse(cmd='_check_ds_version',
reason=_('data not found'))
firmware_version = out['data']['firmware_ver']
# e.g. 'DSM 6.1-7610', 'DSM 6.0.1-7370', 'DSM 6.0-7321 update 3'
version = firmware_version.split()[1].split('-')[0]
versions = version.split('.')
major, minor, hotfix = (versions[0],
versions[1],
versions[2] if len(versions) is 3 else '0')
major, minor, hotfix = (int(major), int(minor), int(hotfix))
if (6 > major) or (major is 6 and minor is 0 and hotfix < 2):
m = (_('DS version %s is not supperted') %
firmware_version)
raise exception.VolumeDriverException(message=m)
def _check_ds_ability(self):
try:
out = self.exec_webapi('SYNO.Core.System',
'info',
1,
type='define')
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _check_ds_ability'))
if not self.check_value_valid(out, ['data'], dict):
raise exception.MalformedResponse(cmd='_check_ds_ability',
reason=_('data not found'))
define = out['data']
if 'usbstation' in define and define['usbstation'] == 'yes':
m = _('usbstation is not supported')
raise exception.VolumeDriverException(message=m)
if ('support_storage_mgr' not in define
or define['support_storage_mgr'] != 'yes'):
m = _('Storage Manager is not supported in DS')
raise exception.VolumeDriverException(message=m)
if ('support_iscsi_target' not in define
or define['support_iscsi_target'] != 'yes'):
m = _('iSCSI target feature is not supported in DS')
raise exception.VolumeDriverException(message=m)
if ('support_vaai' not in define
or define['support_vaai'] != 'yes'):
m = _('VAAI feature is not supported in DS')
raise exception.VolumeDriverException(message=m)
if ('supportsnapshot' not in define
or define['supportsnapshot'] != 'yes'):
m = _('Snapshot feature is not supported in DS')
raise exception.VolumeDriverException(message=m)
def check_response(self, out, **kwargs):
if out['success']:
return
data = 'internal error'
exc = exception.VolumeBackendAPIException(data=data)
message = 'Internal error'
api = out['api_info']['api']
if (api.startswith('SYNO.Core.ISCSI.')):
message, exc = self._check_iscsi_response(out, **kwargs)
elif (api.startswith('SYNO.Core.Storage.')):
message, exc = self._check_storage_response(out, **kwargs)
LOG.exception(_LE('%(message)s'), {'message': message})
raise exc
def exec_webapi(self, api, method, version, **kwargs):
result = self.synoexec(api, method, version, **kwargs)
if 'http_status' in result and 200 != result['http_status']:
raise exception.SynoAPIHTTPError(code=result['http_status'])
result['api_info'] = {'api': api,
'method': method,
'version': version}
return result
def check_value_valid(self, obj, key_array, value_type=None):
curr_obj = obj
for key in key_array:
if key not in curr_obj:
LOG.error(_LE('key [%(key)s] is not in %(obj)s'),
{'key': key,
'obj': curr_obj})
return False
curr_obj = curr_obj[key]
if value_type and not isinstance(curr_obj, value_type):
LOG.error(_LE('[%(obj)s] is %(type)s, not %(value_type)s'),
{'obj': curr_obj,
'type': type(curr_obj),
'value_type': value_type})
return False
return True
def get_ip(self):
return self.config.iscsi_ip_address
def get_provider_location(self, iqn, trg_id):
portals = ['%(ip)s:%(port)d' % {'ip': self.get_ip(),
'port': self.iscsi_port}]
sec_ips = self.config.safe_get('iscsi_secondary_ip_addresses')
for ip in sec_ips:
portals.append('%(ip)s:%(port)d' %
{'ip': ip,
'port': self.iscsi_port})
return '%s,%d %s 0' % (
';'.join(portals),
trg_id,
iqn)
def is_lun_mapped(self, lun_name):
if not lun_name:
err = _('Param [lun_name] is invalid.')
raise exception.InvalidParameterValue(err=err)
try:
lun_info = self._get_lun_info(lun_name, ['is_mapped'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to _is_lun_mapped. [%s]'), lun_name)
if not self.check_value_valid(lun_info, ['is_mapped'], bool):
raise exception.MalformedResponse(cmd='_is_lun_mapped',
reason=_('is_mapped not found'))
return lun_info['is_mapped']
def check_for_setup_error(self):
self._check_ds_pool_status()
self._check_ds_version()
self._check_ds_ability()
def update_volume_stats(self):
"""Update volume statistics.
Three kinds of data are stored on the Synology backend pool:
1. Thin volumes (LUNs on the pool),
2. Thick volumes (LUNs on the pool),
3. Other user data.
other_user_data_gb is the size of the 3rd one.
lun_provisioned_gb is the summation of all thin/thick volume
provisioned size.
Only thin type is available for Cinder volumes.
"""
free_gb, total_gb, other_user_data_gb = self._get_pool_size()
lun_provisioned_gb = self._get_pool_lun_provisioned_size()
data = {}
data['volume_backend_name'] = self.volume_backend_name
data['vendor_name'] = self.vendor_name
data['storage_protocol'] = self.config.iscsi_protocol
data['consistencygroup_support'] = False
data['QoS_support'] = False
data['thin_provisioning_support'] = True
data['thick_provisioning_support'] = False
data['reserved_percentage'] = self.config.reserved_percentage
data['free_capacity_gb'] = free_gb
data['total_capacity_gb'] = total_gb
data['provisioned_capacity_gb'] = (lun_provisioned_gb +
other_user_data_gb)
data['max_over_subscription_ratio'] = (self.config.
max_over_subscription_ratio)
data['iscsi_ip_address'] = self.config.iscsi_ip_address
data['pool_name'] = self.config.synology_pool_name
data['backend_info'] = ('%s:%s:%s' %
(self.vendor_name,
self.driver_type,
self.host_uuid))
return data
def create_volume(self, volume):
try:
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'create',
1,
name=volume['name'],
type=self.CINDER_LUN,
location=('/' +
self.config.synology_pool_name),
size=volume['size'] * units.Gi)
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to create_volume. [%s]'),
volume['name'])
if not self._check_lun_status_normal(volume['name']):
message = _('Lun [%s] status is not normal') % volume['name']
raise exception.VolumeDriverException(message=message)
def delete_volume(self, volume):
try:
lun_uuid = self._get_lun_uuid(volume['name'])
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'delete',
1,
uuid=lun_uuid)
self.check_response(out)
except exception.SynoLUNNotExist:
LOG.warning(_LW('LUN does not exist'))
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to delete_volume. [%s]'),
volume['name'])
def create_cloned_volume(self, volume, src_vref):
try:
src_lun_uuid = self._get_lun_uuid(src_vref['name'])
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'clone',
1,
src_lun_uuid=src_lun_uuid,
dst_lun_name=volume['name'],
is_same_pool=True,
clone_type='CINDER')
self.check_response(out)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to create_cloned_volume. [%s]'),
volume['name'])
if not self._check_lun_status_normal(volume['name']):
message = _('Lun [%s] status is not normal.') % volume['name']
raise exception.VolumeDriverException(message=message)
if src_vref['size'] < volume['size']:
self.extend_volume(volume, volume['size'])
def extend_volume(self, volume, new_size):
try:
lun_uuid = self._get_lun_uuid(volume['name'])
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'set',
1,
uuid=lun_uuid,
new_size=new_size * units.Gi)
self.check_response(out)
except Exception as e:
LOG.exception(_LE('Failed to extend_volume. [%s]'),
volume['name'])
raise exception.ExtendVolumeError(reason=e.msg)
def update_migrated_volume(self, volume, new_volume):
try:
self._modify_lun_name(new_volume['name'], volume['name'])
except Exception:
reason = _('Failed to _modify_lun_name [%s].') % new_volume['name']
raise exception.VolumeMigrationFailed(reason=reason)
return {'_name_id': None}
def create_snapshot(self, snapshot):
desc = '(Cinder) ' + (snapshot['id'] or '')
try:
resp = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'take_snapshot',
1,
src_lun_uuid=snapshot['volume']['name'],
is_app_consistent=False,
is_locked=False,
taken_by='Cinder',
description=desc)
self.check_response(resp)
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to create_snapshot. [%s]'),
snapshot['volume']['name'])
if not self.check_value_valid(resp,
['data', 'snapshot_uuid'],
string_types):
raise exception.MalformedResponse(cmd='create_snapshot',
reason=_('uuid not found'))
snapshot_uuid = resp['data']['snapshot_uuid']
if not self._check_snapshot_status_healthy(snapshot_uuid):
message = (_('Volume [%(vol)s] snapshot [%(snapshot)s] status '
'is not healthy.') %
{'vol': snapshot['volume']['name'],
'snapshot': snapshot_uuid})
raise exception.VolumeDriverException(message=message)
return {
'metadata': {
self.METADATA_DS_SNAPSHOT_UUID: snapshot_uuid
}}
def delete_snapshot(self, snapshot):
try:
ds_snapshot_uuid = (self._get_metadata_value
(snapshot, self.METADATA_DS_SNAPSHOT_UUID))
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'delete_snapshot',
1,
snapshot_uuid=ds_snapshot_uuid,
deleted_by='Cinder')
self.check_response(out, snapshot_id=snapshot['id'])
except (exception.SnapshotNotFound,
exception.SnapshotMetadataNotFound):
return
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to delete_snapshot. [%s]'),
snapshot['id'])
def create_volume_from_snapshot(self, volume, snapshot):
try:
ds_snapshot_uuid = (self._get_metadata_value
(snapshot, self.METADATA_DS_SNAPSHOT_UUID))
out = self.exec_webapi('SYNO.Core.ISCSI.LUN',
'clone_snapshot',
1,
src_lun_uuid=snapshot['volume']['name'],
snapshot_uuid=ds_snapshot_uuid,
cloned_lun_name=volume['name'],
clone_type='CINDER')
self.check_response(out)
except exception.SnapshotMetadataNotFound:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to get snapshot UUID. [%s]'),
snapshot['id'])
except Exception:
with excutils.save_and_reraise_exception():
LOG.exception(_LE('Failed to create_volume_from_snapshot. '
'[%s]'),
snapshot['id'])
if not self._check_lun_status_normal(volume['name']):
message = (_('Volume [%(vol)s] snapshot [%(snapshot)s] status '
'is not healthy.') %
{'vol': snapshot['volume']['name'],
'snapshot': ds_snapshot_uuid})
raise exception.VolumeDriverException(message=message)
if snapshot['volume_size'] < volume['size']:
self.extend_volume(volume, volume['size'])
def get_iqn_and_trgid(self, location):
if not location:
err = _('Param [location] is invalid.')
raise exception.InvalidParameterValue(err=err)
result = location.split(' ')
if len(result) < 2:
raise exception.InvalidInput(reason=location)
data = result[0].split(',')
if len(data) < 2:
raise exception.InvalidInput(reason=location)
iqn = result[1]
trg_id = data[1]
return iqn, int(trg_id, 10)
def get_iscsi_properties(self, volume):
if not volume['provider_location']:
err = _("Param volume['provider_location'] is invalid.")
raise exception.InvalidParameterValue(err=err)
iqn, trg_id = self.get_iqn_and_trgid(volume['provider_location'])
iscsi_properties = {
'target_discovered': False,
'target_iqn': iqn,
'target_portal': '%(ip)s:%(port)d' % {'ip': self.get_ip(),
'port': self.iscsi_port},
'volume_id': volume['id'],
'access_mode': 'rw',
'discard': False
}
ips = self.config.safe_get('iscsi_secondary_ip_addresses')
if ips:
target_portals = [iscsi_properties['target_portal']]
for ip in ips:
target_portals.append('%(ip)s:%(port)d' %
{'ip': ip,
'port': self.iscsi_port})
iscsi_properties.update(target_portals=target_portals)
count = len(target_portals)
iscsi_properties.update(target_iqns=
[iscsi_properties['target_iqn']] * count)
iscsi_properties.update(target_lun=0)
iscsi_properties.update(target_luns=
[iscsi_properties['target_lun']] * count)
if 'provider_auth' in volume:
auth = volume['provider_auth']
if auth:
try:
(auth_method, auth_username, auth_password) = auth.split()
iscsi_properties['auth_method'] = auth_method
iscsi_properties['auth_username'] = auth_username
iscsi_properties['auth_password'] = auth_password
except Exception:
LOG.error(_LE('Invalid provider_auth: %s'), auth)
return iscsi_properties
def create_iscsi_export(self, volume_name, identifier):
iqn, trg_id, provider_auth = self._target_create(identifier)
self._lun_map_target(volume_name, trg_id)
return iqn, trg_id, provider_auth
def remove_iscsi_export(self, volume_name, trg_id):
self._lun_unmap_target(volume_name, trg_id)
self._target_delete(trg_id)
| apache-2.0 |
t0mk/ansible | lib/ansible/modules/cloud/misc/xenserver_facts.py | 16 | 5392 | #!/usr/bin/python -tt
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'version': '1.0'}
DOCUMENTATION = '''
---
module: xenserver_facts
version_added: "2.0"
short_description: get facts reported on xenserver
description:
- Reads data out of XenAPI, can be used instead of multiple xe commands.
author:
- Andy Hill (@andyhky)
- Tim Rupp
options: {}
'''
EXAMPLES = '''
- name: Gather facts from xenserver
xenserver:
- name: Print running VMs
debug:
msg: "{{ item }}"
with_items: "{{ xs_vms.keys() }}"
when: xs_vms[item]['power_state'] == "Running"
# Which will print:
#
# TASK: [Print running VMs] ***********************************************************
# skipping: [10.13.0.22] => (item=CentOS 4.7 (32-bit))
# ok: [10.13.0.22] => (item=Control domain on host: 10.0.13.22) => {
# "item": "Control domain on host: 10.0.13.22",
# "msg": "Control domain on host: 10.0.13.22"
# }
'''
import platform
HAVE_XENAPI = False
try:
import XenAPI
HAVE_XENAPI = True
except ImportError:
pass
class XenServerFacts:
def __init__(self):
self.codes = {
'5.5.0': 'george',
'5.6.100': 'oxford',
'6.0.0': 'boston',
'6.1.0': 'tampa',
'6.2.0': 'clearwater'
}
@property
def version(self):
# Be aware! Deprecated in Python 2.6!
result = platform.dist()[1]
return result
@property
def codename(self):
if self.version in self.codes:
result = self.codes[self.version]
else:
result = None
return result
def get_xenapi_session():
session = XenAPI.xapi_local()
session.xenapi.login_with_password('', '')
return session
def get_networks(session):
recs = session.xenapi.network.get_all_records()
xs_networks = {}
networks = change_keys(recs, key='uuid')
for network in networks.values():
xs_networks[network['name_label']] = network
return xs_networks
def get_pifs(session):
recs = session.xenapi.PIF.get_all_records()
pifs = change_keys(recs, key='uuid')
xs_pifs = {}
devicenums = range(0, 7)
for pif in pifs.values():
for eth in devicenums:
interface_name = "eth%s" % (eth)
bond_name = interface_name.replace('eth', 'bond')
if pif['device'] == interface_name:
xs_pifs[interface_name] = pif
elif pif['device'] == bond_name:
xs_pifs[bond_name] = pif
return xs_pifs
def get_vlans(session):
recs = session.xenapi.VLAN.get_all_records()
return change_keys(recs, key='tag')
def change_keys(recs, key='uuid', filter_func=None):
"""
Take a xapi dict, and make the keys the value of recs[ref][key].
Preserves the ref in rec['ref']
"""
new_recs = {}
for ref, rec in recs.items():
if filter_func is not None and not filter_func(rec):
continue
new_recs[rec[key]] = rec
new_recs[rec[key]]['ref'] = ref
return new_recs
def get_host(session):
"""Get the host"""
host_recs = session.xenapi.host.get_all()
# We only have one host, so just return its entry
return session.xenapi.host.get_record(host_recs[0])
def get_vms(session):
xs_vms = {}
recs = session.xenapi.VM.get_all()
if not recs:
return None
vms = change_keys(recs, key='uuid')
for vm in vms.values():
xs_vms[vm['name_label']] = vm
return xs_vms
def get_srs(session):
xs_srs = {}
recs = session.xenapi.SR.get_all()
if not recs:
return None
srs = change_keys(recs, key='uuid')
for sr in srs.values():
xs_srs[sr['name_label']] = sr
return xs_srs
def main():
module = AnsibleModule({})
if not HAVE_XENAPI:
module.fail_json(changed=False, msg="python xen api required for this module")
obj = XenServerFacts()
try:
session = get_xenapi_session()
except XenAPI.Failure as e:
module.fail_json(msg='%s' % e)
data = {
'xenserver_version': obj.version,
'xenserver_codename': obj.codename
}
xs_networks = get_networks(session)
xs_pifs = get_pifs(session)
xs_vlans = get_vlans(session)
xs_vms = get_vms(session)
xs_srs = get_srs(session)
if xs_vlans:
data['xs_vlans'] = xs_vlans
if xs_pifs:
data['xs_pifs'] = xs_pifs
if xs_networks:
data['xs_networks'] = xs_networks
if xs_vms:
data['xs_vms'] = xs_vms
if xs_srs:
data['xs_srs'] = xs_srs
module.exit_json(ansible=data)
from ansible.module_utils.basic import *
if __name__ == '__main__':
main()
| gpl-3.0 |
srznew/heat | heat/db/sqlalchemy/models.py | 5 | 17313 | #
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
SQLAlchemy models for heat data.
"""
import uuid
from oslo_db.sqlalchemy import models
from oslo_utils import timeutils
import six
import sqlalchemy
from sqlalchemy.ext import declarative
from sqlalchemy.orm import backref
from sqlalchemy.orm import relationship
from sqlalchemy.orm import session as orm_session
from heat.db.sqlalchemy import types
BASE = declarative.declarative_base()
def get_session():
from heat.db.sqlalchemy import api as db_api
return db_api.get_session()
class HeatBase(models.ModelBase, models.TimestampMixin):
"""Base class for Heat Models."""
__table_args__ = {'mysql_engine': 'InnoDB'}
def expire(self, session=None, attrs=None):
"""Expire this object ()."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.expire(self, attrs)
def refresh(self, session=None, attrs=None):
"""Refresh this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.refresh(self, attrs)
def delete(self, session=None):
"""Delete this object."""
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin()
session.delete(self)
session.commit()
def update_and_save(self, values, session=None):
if not session:
session = orm_session.Session.object_session(self)
if not session:
session = get_session()
session.begin()
for k, v in six.iteritems(values):
setattr(self, k, v)
session.commit()
class SoftDelete(object):
deleted_at = sqlalchemy.Column(sqlalchemy.DateTime)
def soft_delete(self, session=None):
"""Mark this object as deleted."""
self.update_and_save({'deleted_at': timeutils.utcnow()},
session=session)
class StateAware(object):
action = sqlalchemy.Column('action', sqlalchemy.String(255))
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.Text)
class RawTemplate(BASE, HeatBase):
"""Represents an unparsed template which should be in JSON format."""
__tablename__ = 'raw_template'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
template = sqlalchemy.Column(types.Json)
files = sqlalchemy.Column(types.Json)
environment = sqlalchemy.Column('environment', types.Json)
class StackTag(BASE, HeatBase):
"""Key/value store of arbitrary stack tags."""
__tablename__ = 'stack_tag'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
tag = sqlalchemy.Column('tag', sqlalchemy.Unicode(80))
stack_id = sqlalchemy.Column('stack_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
class SyncPoint(BASE, HeatBase):
"""Represents an syncpoint for an stack that is being worked on."""
__tablename__ = 'sync_point'
__table_args__ = (
sqlalchemy.PrimaryKeyConstraint('entity_id',
'traversal_id',
'is_update'),
sqlalchemy.ForeignKeyConstraint(['stack_id'], ['stack.id'])
)
entity_id = sqlalchemy.Column(sqlalchemy.String(36))
traversal_id = sqlalchemy.Column(sqlalchemy.String(36))
is_update = sqlalchemy.Column(sqlalchemy.Boolean)
# integer field for atomic update operations
atomic_key = sqlalchemy.Column(sqlalchemy.Integer, nullable=False)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
nullable=False)
input_data = sqlalchemy.Column(types.Json)
class Stack(BASE, HeatBase, SoftDelete, StateAware):
"""Represents a stack created by the heat engine."""
__tablename__ = 'stack'
__table_args__ = (
sqlalchemy.Index('ix_stack_name', 'name', mysql_length=255),
sqlalchemy.Index('ix_stack_tenant', 'tenant', mysql_length=255),
)
id = sqlalchemy.Column(sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column(sqlalchemy.String(255))
raw_template_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'),
nullable=False)
raw_template = relationship(RawTemplate, backref=backref('stack'),
foreign_keys=[raw_template_id])
prev_raw_template_id = sqlalchemy.Column(
'prev_raw_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
prev_raw_template = relationship(RawTemplate,
foreign_keys=[prev_raw_template_id])
username = sqlalchemy.Column(sqlalchemy.String(256))
tenant = sqlalchemy.Column(sqlalchemy.String(256))
user_creds_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('user_creds.id'))
owner_id = sqlalchemy.Column(sqlalchemy.String(36))
parent_resource_name = sqlalchemy.Column(sqlalchemy.String(255))
timeout = sqlalchemy.Column(sqlalchemy.Integer)
disable_rollback = sqlalchemy.Column(sqlalchemy.Boolean, nullable=False)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
backup = sqlalchemy.Column('backup', sqlalchemy.Boolean)
nested_depth = sqlalchemy.Column('nested_depth', sqlalchemy.Integer)
convergence = sqlalchemy.Column('convergence', sqlalchemy.Boolean)
tags = relationship(StackTag, cascade="all,delete",
backref=backref('stack'))
current_traversal = sqlalchemy.Column('current_traversal',
sqlalchemy.String(36))
current_deps = sqlalchemy.Column('current_deps', types.Json)
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class StackLock(BASE, HeatBase):
"""Store stack locks for deployments with multiple-engines."""
__tablename__ = 'stack_lock'
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
primary_key=True)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
class UserCreds(BASE, HeatBase):
"""
Represents user credentials and mirrors the 'context'
handed in by wsgi.
"""
__tablename__ = 'user_creds'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
username = sqlalchemy.Column(sqlalchemy.String(255))
password = sqlalchemy.Column(sqlalchemy.String(255))
region_name = sqlalchemy.Column(sqlalchemy.String(255))
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
tenant = sqlalchemy.Column(sqlalchemy.String(1024))
auth_url = sqlalchemy.Column(sqlalchemy.Text)
tenant_id = sqlalchemy.Column(sqlalchemy.String(256))
trust_id = sqlalchemy.Column(sqlalchemy.String(255))
trustor_user_id = sqlalchemy.Column(sqlalchemy.String(64))
stack = relationship(Stack, backref=backref('user_creds'))
class Event(BASE, HeatBase):
"""Represents an event generated by the heat engine."""
__tablename__ = 'event'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('events'))
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
resource_action = sqlalchemy.Column(sqlalchemy.String(255))
resource_status = sqlalchemy.Column(sqlalchemy.String(255))
resource_name = sqlalchemy.Column(sqlalchemy.String(255))
physical_resource_id = sqlalchemy.Column(sqlalchemy.String(255))
_resource_status_reason = sqlalchemy.Column(
'resource_status_reason', sqlalchemy.String(255))
resource_type = sqlalchemy.Column(sqlalchemy.String(255))
resource_properties = sqlalchemy.Column(sqlalchemy.PickleType)
@property
def resource_status_reason(self):
return self._resource_status_reason
@resource_status_reason.setter
def resource_status_reason(self, reason):
self._resource_status_reason = reason and reason[:255] or ''
class ResourceData(BASE, HeatBase):
"""Key/value store of arbitrary, resource-specific data."""
__tablename__ = 'resource_data'
id = sqlalchemy.Column('id',
sqlalchemy.Integer,
primary_key=True,
nullable=False)
key = sqlalchemy.Column('key', sqlalchemy.String(255))
value = sqlalchemy.Column('value', sqlalchemy.Text)
redact = sqlalchemy.Column('redact', sqlalchemy.Boolean)
decrypt_method = sqlalchemy.Column(sqlalchemy.String(64))
resource_id = sqlalchemy.Column('resource_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('resource.id'),
nullable=False)
class Resource(BASE, HeatBase, StateAware):
"""Represents a resource created by the heat engine."""
__tablename__ = 'resource'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
uuid = sqlalchemy.Column(sqlalchemy.String(36),
default=lambda: str(uuid.uuid4()),
unique=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
nova_instance = sqlalchemy.Column('nova_instance', sqlalchemy.String(255))
# odd name as "metadata" is reserved
rsrc_metadata = sqlalchemy.Column('rsrc_metadata', types.Json)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('resources'))
data = relationship(ResourceData,
cascade="all,delete",
backref=backref('resource'))
# Override timestamp column to store the correct value: it should be the
# time the create/update call was issued, not the time the DB entry is
# created/modified. (bug #1193269)
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
properties_data = sqlalchemy.Column('properties_data', types.Json)
properties_data_encrypted = sqlalchemy.Column('properties_data_encrypted',
sqlalchemy.Boolean)
engine_id = sqlalchemy.Column(sqlalchemy.String(36))
atomic_key = sqlalchemy.Column(sqlalchemy.Integer)
needed_by = sqlalchemy.Column('needed_by', types.List)
requires = sqlalchemy.Column('requires', types.List)
replaces = sqlalchemy.Column('replaces', sqlalchemy.Integer,
default=None)
replaced_by = sqlalchemy.Column('replaced_by', sqlalchemy.Integer,
default=None)
current_template_id = sqlalchemy.Column(
'current_template_id',
sqlalchemy.Integer,
sqlalchemy.ForeignKey('raw_template.id'))
class WatchRule(BASE, HeatBase):
"""Represents a watch_rule created by the heat engine."""
__tablename__ = 'watch_rule'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
rule = sqlalchemy.Column('rule', types.Json)
state = sqlalchemy.Column('state', sqlalchemy.String(255))
last_evaluated = sqlalchemy.Column(sqlalchemy.DateTime,
default=timeutils.utcnow)
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
stack = relationship(Stack, backref=backref('watch_rule'))
class WatchData(BASE, HeatBase):
"""Represents a watch_data created by the heat engine."""
__tablename__ = 'watch_data'
id = sqlalchemy.Column(sqlalchemy.Integer, primary_key=True)
data = sqlalchemy.Column('data', types.Json)
watch_rule_id = sqlalchemy.Column(
sqlalchemy.Integer,
sqlalchemy.ForeignKey('watch_rule.id'),
nullable=False)
watch_rule = relationship(WatchRule, backref=backref('watch_data'))
class SoftwareConfig(BASE, HeatBase):
"""
Represents a software configuration resource to be applied to
one or more servers.
"""
__tablename__ = 'software_config'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
name = sqlalchemy.Column('name', sqlalchemy.String(255))
group = sqlalchemy.Column('group', sqlalchemy.String(255))
config = sqlalchemy.Column('config', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
class SoftwareDeployment(BASE, HeatBase, StateAware):
"""
Represents applying a software configuration resource to a
single server resource.
"""
__tablename__ = 'software_deployment'
__table_args__ = (
sqlalchemy.Index('ix_software_deployment_created_at', 'created_at'),)
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
config_id = sqlalchemy.Column(
'config_id',
sqlalchemy.String(36),
sqlalchemy.ForeignKey('software_config.id'),
nullable=False)
config = relationship(SoftwareConfig, backref=backref('deployments'))
server_id = sqlalchemy.Column('server_id', sqlalchemy.String(36),
nullable=False, index=True)
input_values = sqlalchemy.Column('input_values', types.Json)
output_values = sqlalchemy.Column('output_values', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
stack_user_project_id = sqlalchemy.Column(sqlalchemy.String(64))
updated_at = sqlalchemy.Column(sqlalchemy.DateTime)
class Snapshot(BASE, HeatBase):
__tablename__ = 'snapshot'
id = sqlalchemy.Column('id', sqlalchemy.String(36), primary_key=True,
default=lambda: str(uuid.uuid4()))
stack_id = sqlalchemy.Column(sqlalchemy.String(36),
sqlalchemy.ForeignKey('stack.id'),
nullable=False)
name = sqlalchemy.Column('name', sqlalchemy.String(255))
data = sqlalchemy.Column('data', types.Json)
tenant = sqlalchemy.Column(
'tenant', sqlalchemy.String(64), nullable=False, index=True)
status = sqlalchemy.Column('status', sqlalchemy.String(255))
status_reason = sqlalchemy.Column('status_reason', sqlalchemy.String(255))
stack = relationship(Stack, backref=backref('snapshot'))
class Service(BASE, HeatBase, SoftDelete):
__tablename__ = 'service'
id = sqlalchemy.Column('id',
sqlalchemy.String(36),
primary_key=True,
default=lambda: str(uuid.uuid4()))
engine_id = sqlalchemy.Column('engine_id',
sqlalchemy.String(36),
nullable=False)
host = sqlalchemy.Column('host',
sqlalchemy.String(255),
nullable=False)
hostname = sqlalchemy.Column('hostname',
sqlalchemy.String(255),
nullable=False)
binary = sqlalchemy.Column('binary',
sqlalchemy.String(255),
nullable=False)
topic = sqlalchemy.Column('topic',
sqlalchemy.String(255),
nullable=False)
report_interval = sqlalchemy.Column('report_interval',
sqlalchemy.Integer,
nullable=False)
| apache-2.0 |
XiWenRen/you-get | src/you_get/extractors/yinyuetai.py | 15 | 1341 | #!/usr/bin/env python
__all__ = ['yinyuetai_download', 'yinyuetai_download_by_id']
from ..common import *
def yinyuetai_download_by_id(id, title = None, output_dir = '.', merge = True, info_only = False):
assert title
html = get_html('http://www.yinyuetai.com/insite/get-video-info?flex=true&videoId=' + id)
for quality in ['he\w*', 'hd\w*', 'hc\w*', '\w+']:
url = r1(r'(http://' + quality + '\.yinyuetai\.com/uploads/videos/common/\w+\.(?:flv|mp4)\?(?:sc=[a-f0-9]{16}|v=\d{12}))', html)
if url:
break
assert url
type = ext = r1(r'\.(flv|mp4)', url)
_, _, size = url_info(url)
print_info(site_info, title, type, size)
if not info_only:
download_urls([url], title, ext, size, output_dir, merge = merge)
def yinyuetai_download(url, output_dir = '.', merge = True, info_only = False):
id = r1(r'http://\w+.yinyuetai.com/video/(\d+)$', url.split('?')[0])
assert id
html = get_html(url, 'utf-8')
title = r1(r'<meta property="og:title"\s+content="([^"]+)"/>', html)
assert title
title = parse.unquote(title)
title = escape_file_path(title)
yinyuetai_download_by_id(id, title, output_dir, merge = merge, info_only = info_only)
site_info = "YinYueTai.com"
download = yinyuetai_download
download_playlist = playlist_not_supported('yinyuetai')
| mit |
odubno/microblog | venv/lib/python2.7/site-packages/pip/_vendor/requests/models.py | 277 | 26436 | # -*- coding: utf-8 -*-
"""
requests.models
~~~~~~~~~~~~~~~
This module contains the primary objects that power Requests.
"""
import collections
import datetime
from io import BytesIO, UnsupportedOperation
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url
from .packages.urllib3.exceptions import DecodeError
from .exceptions import (
HTTPError, RequestException, MissingSchema, InvalidURL,
ChunkedEncodingError, ContentDecodingError)
from .utils import (
guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring, IncompleteRead)
from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically
#: processable redirect.
REDIRECT_STATI = (
codes.moved, # 301
codes.found, # 302
codes.other, # 303
codes.temporary_moved, # 307
)
DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512
class RequestEncodingMixin(object):
@property
def path_url(self):
"""Build the path URL to use."""
url = []
p = urlsplit(self.url)
path = p.path
if not path:
path = '/'
url.append(path)
query = p.query
if query:
url.append('?')
url.append(query)
return ''.join(url)
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
Will successfully encode parameters when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if isinstance(data, (str, bytes)):
return data
elif hasattr(data, 'read'):
return data
elif hasattr(data, '__iter__'):
result = []
for k, vs in to_key_val_list(data):
if isinstance(vs, basestring) or not hasattr(vs, '__iter__'):
vs = [vs]
for v in vs:
if v is not None:
result.append(
(k.encode('utf-8') if isinstance(k, str) else k,
v.encode('utf-8') if isinstance(v, str) else v))
return urlencode(result, doseq=True)
else:
return data
@staticmethod
def _encode_files(files, data):
"""Build the body for a multipart/form-data request.
Will successfully encode files when passed as a dict or a list of
2-tuples. Order is retained if data is a list of 2-tuples but arbitrary
if parameters are supplied as a dict.
"""
if (not files):
raise ValueError("Files must be provided.")
elif isinstance(data, basestring):
raise ValueError("Data must not be a string.")
new_fields = []
fields = to_key_val_list(data or {})
files = to_key_val_list(files or {})
for field, val in fields:
if isinstance(val, basestring) or not hasattr(val, '__iter__'):
val = [val]
for v in val:
if v is not None:
# Don't call str() on bytestrings: in Py3 it all goes wrong.
if not isinstance(v, bytes):
v = str(v)
new_fields.append(
(field.decode('utf-8') if isinstance(field, bytes) else field,
v.encode('utf-8') if isinstance(v, str) else v))
for (k, v) in files:
# support for explicit filename
ft = None
fh = None
if isinstance(v, (tuple, list)):
if len(v) == 2:
fn, fp = v
elif len(v) == 3:
fn, fp, ft = v
else:
fn, fp, ft, fh = v
else:
fn = guess_filename(v) or k
fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(),
filename=fn, headers=fh)
rf.make_multipart(content_type=ft)
new_fields.append(rf)
body, content_type = encode_multipart_formdata(new_fields)
return body, content_type
class RequestHooksMixin(object):
def register_hook(self, event, hook):
"""Properly register a hook."""
if event not in self.hooks:
raise ValueError('Unsupported event specified, with event name "%s"' % (event))
if isinstance(hook, collections.Callable):
self.hooks[event].append(hook)
elif hasattr(hook, '__iter__'):
self.hooks[event].extend(h for h in hook if isinstance(h, collections.Callable))
def deregister_hook(self, event, hook):
"""Deregister a previously registered hook.
Returns True if the hook existed, False if not.
"""
try:
self.hooks[event].remove(hook)
return True
except ValueError:
return False
class Request(RequestHooksMixin):
"""A user-created :class:`Request <Request>` object.
Used to prepare a :class:`PreparedRequest <PreparedRequest>`, which is sent to the server.
:param method: HTTP method to use.
:param url: URL to send.
:param headers: dictionary of headers to send.
:param files: dictionary of {filename: fileobject} files to multipart upload.
:param data: the body to attach the request. If a dictionary is provided, form-encoding will take place.
:param params: dictionary of URL parameters to append to the URL.
:param auth: Auth handler or (user, pass) tuple.
:param cookies: dictionary or CookieJar of cookies to attach to this request.
:param hooks: dictionary of callback hooks, for internal usage.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> req.prepare()
<PreparedRequest [GET]>
"""
def __init__(self,
method=None,
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None):
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
self.hooks = default_hooks()
for (k, v) in list(hooks.items()):
self.register_hook(event=k, hook=v)
self.method = method
self.url = url
self.headers = headers
self.files = files
self.data = data
self.params = params
self.auth = auth
self.cookies = cookies
def __repr__(self):
return '<Request [%s]>' % (self.method)
def prepare(self):
"""Constructs a :class:`PreparedRequest <PreparedRequest>` for transmission and returns it."""
p = PreparedRequest()
p.prepare(
method=self.method,
url=self.url,
headers=self.headers,
files=self.files,
data=self.data,
params=self.params,
auth=self.auth,
cookies=self.cookies,
hooks=self.hooks,
)
return p
class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""The fully mutable :class:`PreparedRequest <PreparedRequest>` object,
containing the exact bytes that will be sent to the server.
Generated from either a :class:`Request <Request>` object or manually.
Usage::
>>> import requests
>>> req = requests.Request('GET', 'http://httpbin.org/get')
>>> r = req.prepare()
<PreparedRequest [GET]>
>>> s = requests.Session()
>>> s.send(r)
<Response [200]>
"""
def __init__(self):
#: HTTP verb to send to the server.
self.method = None
#: HTTP URL to send the request to.
self.url = None
#: dictionary of HTTP headers.
self.headers = None
# The `CookieJar` used to create the Cookie header will be stored here
# after prepare_cookies is called
self._cookies = None
#: request body to send to the server.
self.body = None
#: dictionary of callback hooks, for internal usage.
self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None):
"""Prepares the entire request with the given parameters."""
self.prepare_method(method)
self.prepare_url(url, params)
self.prepare_headers(headers)
self.prepare_cookies(cookies)
self.prepare_body(data, files)
self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
# This MUST go after prepare_auth. Authenticators could add a hook
self.prepare_hooks(hooks)
def __repr__(self):
return '<PreparedRequest [%s]>' % (self.method)
def copy(self):
p = PreparedRequest()
p.method = self.method
p.url = self.url
p.headers = self.headers.copy()
p._cookies = self._cookies.copy()
p.body = self.body
p.hooks = self.hooks
return p
def prepare_method(self, method):
"""Prepares the given HTTP method."""
self.method = method
if self.method is not None:
self.method = self.method.upper()
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
try:
url = unicode(url)
except NameError:
# We're on Python 3.
url = str(url)
except UnicodeDecodeError:
pass
# Don't do any URL preparation for oddball schemes
if ':' in url and not url.lower().startswith('http'):
self.url = url
return
# Support for unicode domain names and paths.
scheme, auth, host, port, path, query, fragment = parse_url(url)
if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. "
"Perhaps you meant http://{0}?".format(url))
if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url)
# Only want to apply IDNA to the hostname
try:
host = host.encode('idna').decode('utf-8')
except UnicodeError:
raise InvalidURL('URL has an invalid label.')
# Carefully reconstruct the network location
netloc = auth or ''
if netloc:
netloc += '@'
netloc += host
if port:
netloc += ':' + str(port)
# Bare domains aren't valid URLs.
if not path:
path = '/'
if is_py2:
if isinstance(scheme, str):
scheme = scheme.encode('utf-8')
if isinstance(netloc, str):
netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
if isinstance(query, str):
query = query.encode('utf-8')
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
enc_params = self._encode_params(params)
if enc_params:
if query:
query = '%s&%s' % (query, enc_params)
else:
query = enc_params
url = requote_uri(urlunparse([scheme, netloc, path, None, query, fragment]))
self.url = url
def prepare_headers(self, headers):
"""Prepares the given HTTP headers."""
if headers:
self.headers = CaseInsensitiveDict((to_native_string(name), value) for name, value in headers.items())
else:
self.headers = CaseInsensitiveDict()
def prepare_body(self, data, files):
"""Prepares the given HTTP body data."""
# Check if file, fo, generator, iterator.
# If not, run through normal process.
# Nottin' on you.
body = None
content_type = None
length = None
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, (basestring, list, tuple, dict))
])
try:
length = super_len(data)
except (TypeError, AttributeError, UnsupportedOperation):
length = None
if is_stream:
body = data
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length is not None:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
else:
# Multi-part file uploads.
if files:
(body, content_type) = self._encode_files(files, data)
else:
if data:
body = self._encode_params(data)
if isinstance(data, str) or isinstance(data, builtin_str) or hasattr(data, 'read'):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
self.body = body
def prepare_content_length(self, body):
if hasattr(body, 'seek') and hasattr(body, 'tell'):
body.seek(0, 2)
self.headers['Content-Length'] = builtin_str(body.tell())
body.seek(0, 0)
elif body is not None:
l = super_len(body)
if l:
self.headers['Content-Length'] = builtin_str(l)
elif self.method not in ('GET', 'HEAD'):
self.headers['Content-Length'] = '0'
def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
# If no Auth is explicitly provided, extract it from the URL first.
if auth is None:
url_auth = get_auth_from_url(self.url)
auth = url_auth if any(url_auth) else None
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
auth = HTTPBasicAuth(*auth)
# Allow auth to make its changes.
r = auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
# Recompute Content-Length
self.prepare_content_length(self.body)
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies
else:
self._cookies = cookiejar_from_dict(cookies)
cookie_header = get_cookie_header(self._cookies, self)
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
def prepare_hooks(self, hooks):
"""Prepares the given hooks."""
for event in hooks:
self.register_hook(event, hooks[event])
class Response(object):
"""The :class:`Response <Response>` object, which contains a
server's response to an HTTP request.
"""
__attrs__ = [
'_content',
'status_code',
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
]
def __init__(self):
super(Response, self).__init__()
self._content = False
self._content_consumed = False
#: Integer Code of responded HTTP Status, e.g. 404 or 200.
self.status_code = None
#: Case-insensitive Dictionary of Response Headers.
#: For example, ``headers['content-encoding']`` will return the
#: value of a ``'Content-Encoding'`` response header.
self.headers = CaseInsensitiveDict()
#: File-like object representation of response (for advanced usage).
#: Use of ``raw`` requires that ``stream=True`` be set on the request.
# This requirement does not apply for use internally to Requests.
self.raw = None
#: Final URL location of Response.
self.url = None
#: Encoding to decode with when accessing r.text.
self.encoding = None
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here. The list is sorted from the oldest to the most recent request.
self.history = []
#: Textual reason of responded HTTP Status, e.g. "Not Found" or "OK".
self.reason = None
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta)
self.elapsed = datetime.timedelta(0)
def __getstate__(self):
# Consume everything; accessing the content attribute makes
# sure the content has been fully read.
if not self._content_consumed:
self.content
return dict(
(attr, getattr(self, attr, None))
for attr in self.__attrs__
)
def __setstate__(self, state):
for name, value in state.items():
setattr(self, name, value)
# pickled objects do not have .raw
setattr(self, '_content_consumed', True)
setattr(self, 'raw', None)
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
def __bool__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
def __iter__(self):
"""Allows you to use a response as an iterator."""
return self.iter_content(128)
@property
def ok(self):
try:
self.raise_for_status()
except RequestException:
return False
return True
@property
def is_redirect(self):
"""True if this Response is a well-formed HTTP redirect that could have
been processed automatically (by :meth:`Session.resolve_redirects`).
"""
return ('location' in self.headers and self.status_code in REDIRECT_STATI)
@property
def apparent_encoding(self):
"""The apparent encoding, provided by the chardet library"""
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
"""Iterates over the response data. When stream=True is set on the
request, this avoids reading the content at once into memory for
large responses. The chunk size is the number of bytes it should
read into memory. This is not necessarily the length of each item
returned as decoding can take place.
If decode_unicode is True, content will be decoded using the best
available encoding based on the response.
"""
def generate():
try:
# Special case for urllib3.
try:
for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except DecodeError as e:
raise ContentDecodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = self.raw.read(chunk_size)
if not chunk:
break
yield chunk
self._content_consumed = True
# simulate reading small chunks of the content
reused_chunks = iter_slices(self._content, chunk_size)
stream_chunks = generate()
chunks = reused_chunks if self._content_consumed else stream_chunks
if decode_unicode:
chunks = stream_decode_response_unicode(chunks, self)
return chunks
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
"""Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the
content at once into memory for large responses.
"""
pending = None
for chunk in self.iter_content(chunk_size=chunk_size, decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
lines = chunk.splitlines()
if lines and lines[-1] and chunk and lines[-1][-1] == chunk[-1]:
pending = lines.pop()
else:
pending = None
for line in lines:
yield line
if pending is not None:
yield pending
@property
def content(self):
"""Content of the response, in bytes."""
if self._content is False:
# Read the contents.
try:
if self._content_consumed:
raise RuntimeError(
'The content for this response was already consumed')
if self.status_code == 0:
self._content = None
else:
self._content = bytes().join(self.iter_content(CONTENT_CHUNK_SIZE)) or bytes()
except AttributeError:
self._content = None
self._content_consumed = True
# don't need to release the connection; that's been handled by urllib3
# since we exhausted the data.
return self._content
@property
def text(self):
"""Content of the response, in unicode.
If Response.encoding is None, encoding will be guessed using
``chardet``.
The encoding of the response content is determined based solely on HTTP
headers, following RFC 2616 to the letter. If you can take advantage of
non-HTTP knowledge to make a better guess at the encoding, you should
set ``r.encoding`` appropriately before accessing this property.
"""
# Try charset from content-type
content = None
encoding = self.encoding
if not self.content:
return str('')
# Fallback to auto-detected encoding.
if self.encoding is None:
encoding = self.apparent_encoding
# Decode unicode from given encoding.
try:
content = str(self.content, encoding, errors='replace')
except (LookupError, TypeError):
# A LookupError is raised if the encoding was not found which could
# indicate a misspelling or similar mistake.
#
# A TypeError can be raised if encoding is None
#
# So we try blindly encoding.
content = str(self.content, errors='replace')
return content
def json(self, **kwargs):
"""Returns the json-encoded content of a response, if any.
:param \*\*kwargs: Optional arguments that ``json.loads`` takes.
"""
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
# UTF-8, -16 or -32. Detect which one to use; If the detection or
# decoding fails, fall back to `self.text` (using chardet to make
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
try:
return json.loads(self.content.decode(encoding), **kwargs)
except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was*
# used.
pass
return json.loads(self.text, **kwargs)
@property
def links(self):
"""Returns the parsed header links of the response, if any."""
header = self.headers.get('link')
# l = MultiDict()
l = {}
if header:
links = parse_header_links(header)
for link in links:
key = link.get('rel') or link.get('url')
l[key] = link
return l
def raise_for_status(self):
"""Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
if 400 <= self.status_code < 500:
http_error_msg = '%s Client Error: %s' % (self.status_code, self.reason)
elif 500 <= self.status_code < 600:
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
raise HTTPError(http_error_msg, response=self)
def close(self):
"""Releases the connection back to the pool. Once this method has been
called the underlying ``raw`` object must not be accessed again.
*Note: Should not normally need to be called explicitly.*
"""
return self.raw.release_conn()
| bsd-3-clause |
butozerca/fireplace | fireplace/cards/blackrock/brawl.py | 1 | 1297 | from ..utils import *
##
# Hero Powers
# Wild Magic
class TBA01_5:
activate = Buff(Give(CONTROLLER, RandomMinion()), "TBA01_5e")
class TBA01_5e:
cost = lambda self, i: 0
# Molten Rage
class TBA01_6:
activate = Summon(CONTROLLER, "CS2_118")
##
# Minions
# Dragonkin Hatcher
class BRMC_84:
play = Summon(CONTROLLER, "BRMA09_2Ht") * 2
# Lucifron
class BRMC_85:
play = Buff(ALL_MINIONS - SELF, "CS2_063e")
# Atramedes
class BRMC_86:
events = Play(OPPONENT).on(Buff(SELF, "BRMC_86e"))
# Moira Bronzebeard
class BRMC_87:
deathrattle = Summon(CONTROLLER, "BRM_028")
# Drakonid Slayer
class BRMC_88:
events = Attack(SELF).on(Hit(TARGET_ADJACENT, Attr(SELF, GameTag.ATK)))
# Son of the Flame
class BRMC_91:
play = Hit(TARGET, 6)
# Coren Direbrew
class BRMC_92:
play = Give(CONTROLLER, "EX1_407")
# Golemagg
class BRMC_95:
cost = lambda self, i: i - self.controller.hero.damage
# High Justice Grimstone
class BRMC_96:
events = OWN_TURN_BEGIN.on(Summon(CONTROLLER, RandomMinion(rarity=Rarity.LEGENDARY)))
# Razorgore
class BRMC_98:
events = OWN_TURN_BEGIN.on(Buff(FRIENDLY_MINIONS, "BRMC_98e"))
# Garr
class BRMC_99:
events = SELF_DAMAGE.on(Summon(CONTROLLER, "BRMC_99e"))
##
# Spells
# Open the Gates
class BRMC_83:
play = Summon(CONTROLLER, "BRMA09_2Ht") * 7
| agpl-3.0 |
jlcarmic/producthunt_simulator | venv/lib/python2.7/site-packages/scipy/linalg/decomp.py | 15 | 31227 | #
# Author: Pearu Peterson, March 2002
#
# additions by Travis Oliphant, March 2002
# additions by Eric Jones, June 2002
# additions by Johannes Loehnert, June 2006
# additions by Bart Vandereycken, June 2006
# additions by Andrew D Straw, May 2007
# additions by Tiziano Zito, November 2008
#
# April 2010: Functions for LU, QR, SVD, Schur and Cholesky decompositions were
# moved to their own files. Still in this file are functions for eigenstuff
# and for the Hessenberg form.
from __future__ import division, print_function, absolute_import
__all__ = ['eig', 'eigh', 'eig_banded', 'eigvals', 'eigvalsh',
'eigvals_banded', 'hessenberg']
import numpy
from numpy import (array, isfinite, inexact, nonzero, iscomplexobj, cast,
flatnonzero, conj)
# Local imports
from scipy._lib.six import xrange
from scipy._lib._util import _asarray_validated
from .misc import LinAlgError, _datacopied, norm
from .lapack import get_lapack_funcs, _compute_lwork
_I = cast['F'](1j)
def _make_complex_eigvecs(w, vin, dtype):
"""
Produce complex-valued eigenvectors from LAPACK DGGEV real-valued output
"""
# - see LAPACK man page DGGEV at ALPHAI
v = numpy.array(vin, dtype=dtype)
m = (w.imag > 0)
m[:-1] |= (w.imag[1:] < 0) # workaround for LAPACK bug, cf. ticket #709
for i in flatnonzero(m):
v.imag[:, i] = vin[:, i+1]
conj(v[:, i], v[:, i+1])
return v
def _geneig(a1, b1, left, right, overwrite_a, overwrite_b):
ggev, = get_lapack_funcs(('ggev',), (a1, b1))
cvl, cvr = left, right
res = ggev(a1, b1, lwork=-1)
lwork = res[-2][0].real.astype(numpy.int)
if ggev.typecode in 'cz':
alpha, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr, lwork,
overwrite_a, overwrite_b)
w = alpha / beta
else:
alphar, alphai, beta, vl, vr, work, info = ggev(a1, b1, cvl, cvr,
lwork, overwrite_a,
overwrite_b)
w = (alphar + _I * alphai) / beta
if info < 0:
raise ValueError('illegal value in %d-th argument of internal ggev' %
-info)
if info > 0:
raise LinAlgError("generalized eig algorithm did not converge "
"(info=%d)" % info)
only_real = numpy.logical_and.reduce(numpy.equal(w.imag, 0.0))
if not (ggev.typecode in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
# the eigenvectors returned by the lapack function are NOT normalized
for i in xrange(vr.shape[0]):
if right:
vr[:, i] /= norm(vr[:, i])
if left:
vl[:, i] /= norm(vl[:, i])
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eig(a, b=None, left=False, right=True, overwrite_a=False,
overwrite_b=False, check_finite=True):
"""
Solve an ordinary or generalized eigenvalue problem of a square matrix.
Find eigenvalues w and right or left eigenvectors of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
a.H vl[:,i] = w[i].conj() b.H vl[:,i]
where ``.H`` is the Hermitian conjugation.
Parameters
----------
a : (M, M) array_like
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : (M, M) array_like, optional
Right-hand side matrix in a generalized eigenvalue problem.
Default is None, identity matrix is assumed.
left : bool, optional
Whether to calculate and return left eigenvectors. Default is False.
right : bool, optional
Whether to calculate and return right eigenvectors. Default is True.
overwrite_a : bool, optional
Whether to overwrite `a`; may improve performance. Default is False.
overwrite_b : bool, optional
Whether to overwrite `b`; may improve performance. Default is False.
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) double or complex ndarray
The eigenvalues, each repeated according to its multiplicity.
vl : (M, M) double or complex ndarray
The normalized left eigenvector corresponding to the eigenvalue
``w[i]`` is the column vl[:,i]. Only returned if ``left=True``.
vr : (M, M) double or complex ndarray
The normalized right eigenvector corresponding to the eigenvalue
``w[i]`` is the column ``vr[:,i]``. Only returned if ``right=True``.
Raises
------
LinAlgError
If eigenvalue computation does not converge.
See Also
--------
eigh : Eigenvalues and right eigenvectors for symmetric/Hermitian arrays.
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if b is not None:
b1 = _asarray_validated(b, check_finite=check_finite)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
if b1.shape != a1.shape:
raise ValueError('a and b must have the same shape')
return _geneig(a1, b1, left, right, overwrite_a, overwrite_b)
geev, geev_lwork = get_lapack_funcs(('geev', 'geev_lwork'), (a1,))
compute_vl, compute_vr = left, right
lwork = _compute_lwork(geev_lwork, a1.shape[0],
compute_vl=compute_vl,
compute_vr=compute_vr)
if geev.typecode in 'cz':
w, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
else:
wr, wi, vl, vr, info = geev(a1, lwork=lwork,
compute_vl=compute_vl,
compute_vr=compute_vr,
overwrite_a=overwrite_a)
t = {'f': 'F', 'd': 'D'}[wr.dtype.char]
w = wr + _I * wi
if info < 0:
raise ValueError('illegal value in %d-th argument of internal geev' %
-info)
if info > 0:
raise LinAlgError("eig algorithm did not converge (only eigenvalues "
"with order >= %d have converged)" % info)
only_real = numpy.logical_and.reduce(numpy.equal(w.imag, 0.0))
if not (geev.typecode in 'cz' or only_real):
t = w.dtype.char
if left:
vl = _make_complex_eigvecs(w, vl, t)
if right:
vr = _make_complex_eigvecs(w, vr, t)
if not (left or right):
return w
if left:
if right:
return w, vl, vr
return w, vl
return w, vr
def eigh(a, b=None, lower=True, eigvals_only=False, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1,
check_finite=True):
"""
Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w and optionally eigenvectors v of matrix `a`, where
`b` is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : (M, M) array_like
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : (M, M) array_like, optional
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of `a`. (Default: lower)
eigvals_only : bool, optional
Whether to calculate only eigenvalues and no eigenvectors.
(Default: both are calculated)
turbo : bool, optional
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi), optional
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo <= hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type : int, optional
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : bool, optional
Whether to overwrite data in `a` (may improve performance)
overwrite_b : bool, optional
Whether to overwrite data in `b` (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (N,) float ndarray
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
v : (M, N) complex ndarray
(if eigvals_only == False)
The normalized selected eigenvector corresponding to the
eigenvalue w[i] is the column v[:,i].
Normalization:
type 1 and 3: v.conj() a v = w
type 2: inv(v).conj() a inv(v) = w
type = 1 or 2: v.conj() b v = I
type = 3: v.conj() inv(b) v = I
Raises
------
LinAlgError :
If eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or a1.shape[0] != a1.shape[1]:
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
if iscomplexobj(a1):
cplx = True
else:
cplx = False
if b is not None:
b1 = _asarray_validated(b, check_finite=check_finite)
overwrite_b = overwrite_b or _datacopied(b1, b)
if len(b1.shape) != 2 or b1.shape[0] != b1.shape[1]:
raise ValueError('expected square matrix')
if b1.shape != a1.shape:
raise ValueError("wrong b dimensions %s, should "
"be %s" % (str(b1.shape), str(a1.shape)))
if iscomplexobj(b1):
cplx = True
else:
cplx = cplx or False
else:
b1 = None
# Set job for fortran routines
_job = (eigvals_only and 'N') or 'V'
# port eigenvalue range from python to fortran convention
if eigvals is not None:
lo, hi = eigvals
if lo < 0 or hi >= a1.shape[0]:
raise ValueError('The eigenvalue range specified is not valid.\n'
'Valid range is [%s,%s]' % (0, a1.shape[0]-1))
lo += 1
hi += 1
eigvals = (lo, hi)
# set lower
if lower:
uplo = 'L'
else:
uplo = 'U'
# fix prefix for lapack routines
if cplx:
pfx = 'he'
else:
pfx = 'sy'
# Standard Eigenvalue Problem
# Use '*evr' routines
# FIXME: implement calculation of optimal lwork
# for all lapack routines
if b1 is None:
(evr,) = get_lapack_funcs((pfx+'evr',), (a1,))
if eigvals is None:
w, v, info = evr(a1, uplo=uplo, jobz=_job, range="A", il=1,
iu=a1.shape[0], overwrite_a=overwrite_a)
else:
(lo, hi) = eigvals
w_tot, v, info = evr(a1, uplo=uplo, jobz=_job, range="I",
il=lo, iu=hi, overwrite_a=overwrite_a)
w = w_tot[0:hi-lo+1]
# Generalized Eigenvalue Problem
else:
# Use '*gvx' routines if range is specified
if eigvals is not None:
(gvx,) = get_lapack_funcs((pfx+'gvx',), (a1, b1))
(lo, hi) = eigvals
w_tot, v, ifail, info = gvx(a1, b1, uplo=uplo, iu=hi,
itype=type, jobz=_job, il=lo,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
w = w_tot[0:hi-lo+1]
# Use '*gvd' routine if turbo is on and no eigvals are specified
elif turbo:
(gvd,) = get_lapack_funcs((pfx+'gvd',), (a1, b1))
v, w, info = gvd(a1, b1, uplo=uplo, itype=type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Use '*gv' routine if turbo is off and no eigvals are specified
else:
(gv,) = get_lapack_funcs((pfx+'gv',), (a1, b1))
v, w, info = gv(a1, b1, uplo=uplo, itype=type, jobz=_job,
overwrite_a=overwrite_a,
overwrite_b=overwrite_b)
# Check if we had a successful exit
if info == 0:
if eigvals_only:
return w
else:
return w, v
elif info < 0:
raise LinAlgError("illegal value in %i-th argument of internal"
" fortran routine." % (-info))
elif info > 0 and b1 is None:
raise LinAlgError("unrecoverable internal error.")
# The algorithm failed to converge.
elif 0 < info <= b1.shape[0]:
if eigvals is not None:
raise LinAlgError("the eigenvectors %s failed to"
" converge." % nonzero(ifail)-1)
else:
raise LinAlgError("internal fortran routine failed to converge: "
"%i off-diagonal elements of an "
"intermediate tridiagonal form did not converge"
" to zero." % info)
# This occurs when b is not positive definite
else:
raise LinAlgError("the leading minor of order %i"
" of 'b' is not positive definite. The"
" factorization of 'b' could not be completed"
" and no eigenvalues or eigenvectors were"
" computed." % (info-b1.shape[0]))
def eig_banded(a_band, lower=False, eigvals_only=False, overwrite_a_band=False,
select='a', select_range=None, max_ev=0, check_finite=True):
"""
Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w and optionally right eigenvectors v of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in a_band either in lower diagonal or upper
diagonal ordered form:
a_band[u + i - j, j] == a[i,j] (if upper form; i <= j)
a_band[ i - j, j] == a[i,j] (if lower form; i >= j)
where u is the number of bands above the diagonal.
Example of a_band (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : (u+1, M) array_like
The bands of the M by M matrix a.
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
eigvals_only : bool, optional
Compute only the eigenvalues and no eigenvectors.
(Default: calculate also eigenvectors)
overwrite_a_band : bool, optional
Discard data in a_band (may enhance performance)
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
max_ev : int, optional
For select=='v', maximum number of eigenvalues expected.
For other values of select, has no meaning.
In doubt, leave this parameter untouched.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
v : (M, M) float or complex ndarray
The normalized eigenvector corresponding to the eigenvalue w[i] is
the column v[:,i].
Raises LinAlgError if eigenvalue computation does not converge
"""
if eigvals_only or overwrite_a_band:
a1 = _asarray_validated(a_band, check_finite=check_finite)
overwrite_a_band = overwrite_a_band or (_datacopied(a1, a_band))
else:
a1 = array(a_band)
if issubclass(a1.dtype.type, inexact) and not isfinite(a1).all():
raise ValueError("array must not contain infs or NaNs")
overwrite_a_band = 1
if len(a1.shape) != 2:
raise ValueError('expected two-dimensional array')
if select.lower() not in [0, 1, 2, 'a', 'v', 'i', 'all', 'value', 'index']:
raise ValueError('invalid argument for select')
if select.lower() in [0, 'a', 'all']:
if a1.dtype.char in 'GFD':
bevd, = get_lapack_funcs(('hbevd',), (a1,))
# FIXME: implement this somewhen, for now go with builtin values
# FIXME: calc optimal lwork by calling ?hbevd(lwork=-1)
# or by using calc_lwork.f ???
# lwork = calc_lwork.hbevd(bevd.typecode, a1.shape[0], lower)
internal_name = 'hbevd'
else: # a1.dtype.char in 'fd':
bevd, = get_lapack_funcs(('sbevd',), (a1,))
# FIXME: implement this somewhen, for now go with builtin values
# see above
# lwork = calc_lwork.sbevd(bevd.typecode, a1.shape[0], lower)
internal_name = 'sbevd'
w, v, info = bevd(a1, compute_v=not eigvals_only,
lower=lower, overwrite_ab=overwrite_a_band)
if select.lower() in [1, 2, 'i', 'v', 'index', 'value']:
# calculate certain range only
if select.lower() in [2, 'i', 'index']:
select = 2
vl, vu, il, iu = 0.0, 0.0, min(select_range), max(select_range)
if min(il, iu) < 0 or max(il, iu) >= a1.shape[1]:
raise ValueError('select_range out of bounds')
max_ev = iu - il + 1
else: # 1, 'v', 'value'
select = 1
vl, vu, il, iu = min(select_range), max(select_range), 0, 0
if max_ev == 0:
max_ev = a_band.shape[1]
if eigvals_only:
max_ev = 1
# calculate optimal abstol for dsbevx (see manpage)
if a1.dtype.char in 'fF': # single precision
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='f'),))
else:
lamch, = get_lapack_funcs(('lamch',), (array(0, dtype='d'),))
abstol = 2 * lamch('s')
if a1.dtype.char in 'GFD':
bevx, = get_lapack_funcs(('hbevx',), (a1,))
internal_name = 'hbevx'
else: # a1.dtype.char in 'gfd'
bevx, = get_lapack_funcs(('sbevx',), (a1,))
internal_name = 'sbevx'
# il+1, iu+1: translate python indexing (0 ... N-1) into Fortran
# indexing (1 ... N)
w, v, m, ifail, info = bevx(a1, vl, vu, il+1, iu+1,
compute_v=not eigvals_only,
mmax=max_ev,
range=select, lower=lower,
overwrite_ab=overwrite_a_band,
abstol=abstol)
# crop off w and v
w = w[:m]
if not eigvals_only:
v = v[:, :m]
if info < 0:
raise ValueError('illegal value in %d-th argument of internal %s' %
(-info, internal_name))
if info > 0:
raise LinAlgError("eig algorithm did not converge")
if eigvals_only:
return w
return w, v
def eigvals(a, b=None, overwrite_a=False, check_finite=True):
"""
Compute eigenvalues from an ordinary or generalized eigenvalue problem.
Find eigenvalues of a general matrix::
a vr[:,i] = w[i] b vr[:,i]
Parameters
----------
a : (M, M) array_like
A complex or real matrix whose eigenvalues and eigenvectors
will be computed.
b : (M, M) array_like, optional
Right-hand side matrix in a generalized eigenvalue problem.
If omitted, identity matrix is assumed.
overwrite_a : bool, optional
Whether to overwrite data in a (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) double or complex ndarray
The eigenvalues, each repeated according to its multiplicity,
but not in any specific order.
Raises
------
LinAlgError
If eigenvalue computation does not converge
See Also
--------
eigvalsh : eigenvalues of symmetric or Hermitian arrays,
eig : eigenvalues and right eigenvectors of general arrays.
eigh : eigenvalues and eigenvectors of symmetric/Hermitian arrays.
"""
return eig(a, b=b, left=0, right=0, overwrite_a=overwrite_a,
check_finite=check_finite)
def eigvalsh(a, b=None, lower=True, overwrite_a=False,
overwrite_b=False, turbo=True, eigvals=None, type=1,
check_finite=True):
"""
Solve an ordinary or generalized eigenvalue problem for a complex
Hermitian or real symmetric matrix.
Find eigenvalues w of matrix a, where b is positive definite::
a v[:,i] = w[i] b v[:,i]
v[i,:].conj() a v[:,i] = w[i]
v[i,:].conj() b v[:,i] = 1
Parameters
----------
a : (M, M) array_like
A complex Hermitian or real symmetric matrix whose eigenvalues and
eigenvectors will be computed.
b : (M, M) array_like, optional
A complex Hermitian or real symmetric definite positive matrix in.
If omitted, identity matrix is assumed.
lower : bool, optional
Whether the pertinent array data is taken from the lower or upper
triangle of `a`. (Default: lower)
turbo : bool, optional
Use divide and conquer algorithm (faster but expensive in memory,
only for generalized eigenvalue problem and if eigvals=None)
eigvals : tuple (lo, hi), optional
Indexes of the smallest and largest (in ascending order) eigenvalues
and corresponding eigenvectors to be returned: 0 <= lo < hi <= M-1.
If omitted, all eigenvalues and eigenvectors are returned.
type : int, optional
Specifies the problem type to be solved:
type = 1: a v[:,i] = w[i] b v[:,i]
type = 2: a b v[:,i] = w[i] v[:,i]
type = 3: b a v[:,i] = w[i] v[:,i]
overwrite_a : bool, optional
Whether to overwrite data in `a` (may improve performance)
overwrite_b : bool, optional
Whether to overwrite data in `b` (may improve performance)
check_finite : bool, optional
Whether to check that the input matrices contain only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (N,) float ndarray
The N (1<=N<=M) selected eigenvalues, in ascending order, each
repeated according to its multiplicity.
Raises
------
LinAlgError :
If eigenvalue computation does not converge,
an error occurred, or b matrix is not definite positive. Note that
if input matrices are not symmetric or hermitian, no error is reported
but results will be wrong.
See Also
--------
eigvals : eigenvalues of general arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
return eigh(a, b=b, lower=lower, eigvals_only=True,
overwrite_a=overwrite_a, overwrite_b=overwrite_b,
turbo=turbo, eigvals=eigvals, type=type,
check_finite=check_finite)
def eigvals_banded(a_band, lower=False, overwrite_a_band=False,
select='a', select_range=None, check_finite=True):
"""
Solve real symmetric or complex hermitian band matrix eigenvalue problem.
Find eigenvalues w of a::
a v[:,i] = w[i] v[:,i]
v.H v = identity
The matrix a is stored in a_band either in lower diagonal or upper
diagonal ordered form:
a_band[u + i - j, j] == a[i,j] (if upper form; i <= j)
a_band[ i - j, j] == a[i,j] (if lower form; i >= j)
where u is the number of bands above the diagonal.
Example of a_band (shape of a is (6,6), u=2)::
upper form:
* * a02 a13 a24 a35
* a01 a12 a23 a34 a45
a00 a11 a22 a33 a44 a55
lower form:
a00 a11 a22 a33 a44 a55
a10 a21 a32 a43 a54 *
a20 a31 a42 a53 * *
Cells marked with * are not used.
Parameters
----------
a_band : (u+1, M) array_like
The bands of the M by M matrix a.
lower : bool, optional
Is the matrix in the lower form. (Default is upper form)
overwrite_a_band : bool, optional
Discard data in a_band (may enhance performance)
select : {'a', 'v', 'i'}, optional
Which eigenvalues to calculate
====== ========================================
select calculated
====== ========================================
'a' All eigenvalues
'v' Eigenvalues in the interval (min, max]
'i' Eigenvalues with indices min <= i <= max
====== ========================================
select_range : (min, max), optional
Range of selected eigenvalues
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
w : (M,) ndarray
The eigenvalues, in ascending order, each repeated according to its
multiplicity.
Raises LinAlgError if eigenvalue computation does not converge
See Also
--------
eig_banded : eigenvalues and right eigenvectors for symmetric/Hermitian
band matrices
eigvals : eigenvalues of general arrays
eigh : eigenvalues and right eigenvectors for symmetric/Hermitian arrays
eig : eigenvalues and right eigenvectors for non-symmetric arrays
"""
return eig_banded(a_band, lower=lower, eigvals_only=1,
overwrite_a_band=overwrite_a_band, select=select,
select_range=select_range, check_finite=check_finite)
_double_precision = ['i', 'l', 'd']
def hessenberg(a, calc_q=False, overwrite_a=False, check_finite=True):
"""
Compute Hessenberg form of a matrix.
The Hessenberg decomposition is::
A = Q H Q^H
where `Q` is unitary/orthogonal and `H` has only zero elements below
the first sub-diagonal.
Parameters
----------
a : (M, M) array_like
Matrix to bring into Hessenberg form.
calc_q : bool, optional
Whether to compute the transformation matrix. Default is False.
overwrite_a : bool, optional
Whether to overwrite `a`; may improve performance.
Default is False.
check_finite : bool, optional
Whether to check that the input matrix contains only finite numbers.
Disabling may give a performance gain, but may result in problems
(crashes, non-termination) if the inputs do contain infinities or NaNs.
Returns
-------
H : (M, M) ndarray
Hessenberg form of `a`.
Q : (M, M) ndarray
Unitary/orthogonal similarity transformation matrix ``A = Q H Q^H``.
Only returned if ``calc_q=True``.
"""
a1 = _asarray_validated(a, check_finite=check_finite)
if len(a1.shape) != 2 or (a1.shape[0] != a1.shape[1]):
raise ValueError('expected square matrix')
overwrite_a = overwrite_a or (_datacopied(a1, a))
# if 2x2 or smaller: already in Hessenberg
if a1.shape[0] <= 2:
if calc_q:
return a1, numpy.eye(a1.shape[0])
return a1
gehrd, gebal, gehrd_lwork = get_lapack_funcs(('gehrd', 'gebal',
'gehrd_lwork'), (a1,))
ba, lo, hi, pivscale, info = gebal(a1, permute=0, overwrite_a=overwrite_a)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gebal '
'(hessenberg)' % -info)
n = len(a1)
lwork = _compute_lwork(gehrd_lwork, ba.shape[0], lo=lo, hi=hi)
hq, tau, info = gehrd(ba, lo=lo, hi=hi, lwork=lwork, overwrite_a=1)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal gehrd '
'(hessenberg)' % -info)
h = numpy.triu(hq, -1)
if not calc_q:
return h
# use orghr/unghr to compute q
orghr, orghr_lwork = get_lapack_funcs(('orghr', 'orghr_lwork'), (a1,))
lwork = _compute_lwork(orghr_lwork, n, lo=lo, hi=hi)
q, info = orghr(a=hq, tau=tau, lo=lo, hi=hi, lwork=lwork, overwrite_a=1)
if info < 0:
raise ValueError('illegal value in %d-th argument of internal orghr '
'(hessenberg)' % -info)
return h, q
| mit |
manaris/jythonMusic | library/jython2.5.3/Lib/test/test_jser.py | 9 | 1144 | from test_support import *
print 'Java Serialization (test_jser.py)'
from java import io, awt
import os, sys
object1 = 42
object2 = ['a', 1, 1.0]
class Foo:
def bar(self):
return 'bar'
object3 = Foo()
object3.baz = 99
object4 = awt.Color(1,2,3)
print 'writing'
sername = os.path.join(sys.prefix, "test.ser")
fout = io.ObjectOutputStream(io.FileOutputStream(sername))
print 'Python int'
fout.writeObject(object1)
print 'Python list'
fout.writeObject(object2)
print 'Python instance'
fout.writeObject(object3)
print 'Java instance'
fout.writeObject(object4)
fout.close()
fin = io.ObjectInputStream(io.FileInputStream(sername))
print 'reading'
iobject1 = fin.readObject()
iobject2 = fin.readObject()
iobject3 = fin.readObject()
iobject4 = fin.readObject()
fin.close()
#print iobject1, iobject2, iobject3, iobject3.__class__, iobject4
print 'Python int'
assert iobject1 == object1
print 'Python list'
assert iobject2 == object2
print 'Python instance'
assert iobject3.baz == 99
assert iobject3.bar() == 'bar'
assert iobject3.__class__ == Foo
print 'Java instance'
assert iobject4 == object4
os.remove(sername)
| gpl-3.0 |
micwypych/github-cmake-project-checker | project_checker/tests/branchtest.py | 1 | 1977 | from unittest import TestCase
from project_checker.checker.gitservice import Branches
from project_checker.checker.gitservice import RemoteBranch
from project_checker.checker.gitservice import LocalBranch
class ServiceStub:
pass
class BranchTest(TestCase):
def test_branches_creation_no_branches(self):
servicestub = ServiceStub()
branches = Branches(servicestub)
self.assertEquals(self.empty_set(), branches.remotes_without_local())
def test_branches_creation_single_master_branch(self):
servicestub = ServiceStub()
branches = Branches(servicestub, local=['master'], remote=['master'])
self.assertEquals(self.empty_set(), branches.remotes_without_local())
def test_branches_creation_single_master_remote_branch_no_local(self):
servicestub = ServiceStub()
branches = Branches(servicestub, remote=['master'])
self.assertEquals(self.remotes(servicestub, names=['master']), branches.remotes_without_local())
def test_branches_creation_several_partially_matching_branches(self):
servicestub = ServiceStub()
branches = Branches(servicestub,
local=['master', 'lab1', 'lab3'],
remote=['master', 'lab1', 'lab2', 'lab5', 'lab4', 'lab9'])
self.assertEquals(self.remotes(servicestub, names=['lab2', 'lab5', 'lab4', 'lab9']),
branches.remotes_without_local())
def test_branches_creation_single_master_local_branch(self):
servicestub = ServiceStub()
branches = Branches(servicestub, local=['master'])
self.assertEquals(self.locals(servicestub, ['master']), branches.local)
def empty_set(self):
return set([])
def locals(self, servicestub, names=[]):
return set(map(lambda n: LocalBranch(n, servicestub), names))
def remotes(self, servicestub, names=[]):
return set(map(lambda n: RemoteBranch(n, servicestub), names))
| mit |
PaddlePaddle/Paddle | python/paddle/fluid/tests/unittests/test_conv3d_layer.py | 1 | 8512 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import numpy as np
from paddle import fluid, nn
import paddle.fluid.dygraph as dg
import paddle.nn.functional as F
import paddle.fluid.initializer as I
import unittest
class Conv3DTestCase(unittest.TestCase):
def __init__(self,
methodName='runTest',
batch_size=4,
spartial_shape=(8, 8, 8),
num_channels=6,
num_filters=8,
filter_size=3,
padding=0,
stride=1,
dilation=1,
groups=1,
no_bias=False,
data_format="NCDHW",
dtype="float32"):
super(Conv3DTestCase, self).__init__(methodName)
self.batch_size = batch_size
self.num_channels = num_channels
self.num_filters = num_filters
self.spartial_shape = spartial_shape
self.filter_size = filter_size
self.padding = padding
self.stride = stride
self.dilation = dilation
self.groups = groups
self.no_bias = no_bias
self.data_format = data_format
self.dtype = dtype
def setUp(self):
self.channel_last = self.data_format == "NDHWC"
if self.channel_last:
input_shape = (self.batch_size, ) + self.spartial_shape + (
self.num_channels, )
else:
input_shape = (self.batch_size, self.num_channels
) + self.spartial_shape
self.input = np.random.randn(*input_shape).astype(self.dtype)
if isinstance(self.filter_size, int):
filter_size = [self.filter_size] * 3
else:
filter_size = self.filter_size
self.weight_shape = weight_shape = (self.num_filters, self.num_channels
// self.groups) + tuple(filter_size)
self.weight = np.random.uniform(
-1, 1, size=weight_shape).astype(self.dtype)
if not self.no_bias:
self.bias = np.random.uniform(
-1, 1, size=(self.num_filters, )).astype(self.dtype)
else:
self.bias = None
def fluid_layer(self, place):
main = fluid.Program()
start = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(main, start):
input_shape = (-1, -1, -1, -1, self.num_channels) \
if self.channel_last else (-1, self.num_channels, -1, -1, -1)
x_var = fluid.data("input", input_shape, dtype=self.dtype)
weight_attr = I.NumpyArrayInitializer(self.weight)
if self.bias is None:
bias_attr = False
else:
bias_attr = I.NumpyArrayInitializer(self.bias)
y_var = fluid.layers.conv3d(
x_var,
self.num_filters,
self.filter_size,
padding=self.padding,
stride=self.stride,
dilation=self.dilation,
groups=self.groups,
param_attr=weight_attr,
bias_attr=bias_attr,
data_format=self.data_format)
feed_dict = {"input": self.input}
exe = fluid.Executor(place)
exe.run(start)
y_np, = exe.run(main, feed=feed_dict, fetch_list=[y_var])
return y_np
def functional(self, place):
main = fluid.Program()
start = fluid.Program()
with fluid.unique_name.guard():
with fluid.program_guard(main, start):
input_shape = (-1, -1, -1, -1, self.num_channels) \
if self.channel_last else (-1, self.num_channels, -1, -1, -1)
x_var = fluid.data("input", input_shape, dtype=self.dtype)
w_var = fluid.data(
"weight", self.weight_shape, dtype=self.dtype)
b_var = fluid.data(
"bias", (self.num_filters, ), dtype=self.dtype)
y_var = F.conv3d(
x_var,
w_var,
None if self.no_bias else b_var,
padding=self.padding,
stride=self.stride,
dilation=self.dilation,
groups=self.groups,
data_format=self.data_format)
feed_dict = {"input": self.input, "weight": self.weight}
if self.bias is not None:
feed_dict["bias"] = self.bias
exe = fluid.Executor(place)
exe.run(start)
y_np, = exe.run(main, feed=feed_dict, fetch_list=[y_var])
return y_np
def paddle_nn_layer(self):
x_var = dg.to_variable(self.input)
conv = nn.Conv3D(
self.num_channels,
self.num_filters,
self.filter_size,
padding=self.padding,
stride=self.stride,
dilation=self.dilation,
groups=self.groups,
data_format=self.data_format)
conv.weight.set_value(self.weight)
if not self.no_bias:
conv.bias.set_value(self.bias)
y_var = conv(x_var)
y_np = y_var.numpy()
return y_np
def _test_equivalence(self, place):
place = fluid.CPUPlace()
result1 = self.fluid_layer(place)
result2 = self.functional(place)
with dg.guard(place):
result3 = self.paddle_nn_layer()
np.testing.assert_array_almost_equal(result1, result2)
np.testing.assert_array_almost_equal(result2, result3)
def runTest(self):
place = fluid.CPUPlace()
self._test_equivalence(place)
if fluid.core.is_compiled_with_cuda():
place = fluid.CUDAPlace(0)
self._test_equivalence(place)
class Conv3DErrorTestCase(Conv3DTestCase):
def runTest(self):
place = fluid.CPUPlace()
with dg.guard(place):
with self.assertRaises(ValueError):
self.paddle_nn_layer()
def add_cases(suite):
suite.addTest(Conv3DTestCase(methodName='runTest'))
suite.addTest(
Conv3DTestCase(
methodName='runTest', stride=[1, 2, 1], dilation=2))
suite.addTest(
Conv3DTestCase(
methodName='runTest', stride=2, dilation=(2, 1, 2)))
suite.addTest(
Conv3DTestCase(
methodName='runTest', padding="same", no_bias=True))
suite.addTest(
Conv3DTestCase(
methodName='runTest', filter_size=(3, 2, 3), padding='valid'))
suite.addTest(Conv3DTestCase(methodName='runTest', padding=(2, 3, 1)))
suite.addTest(
Conv3DTestCase(
methodName='runTest', padding=[1, 2, 2, 1, 2, 3]))
suite.addTest(
Conv3DTestCase(
methodName='runTest',
padding=[[0, 0], [0, 0], [1, 2], [2, 1], [2, 2]]))
suite.addTest(Conv3DTestCase(methodName='runTest', data_format="NDHWC"))
suite.addTest(
Conv3DTestCase(
methodName='runTest',
data_format="NDHWC",
padding=[[0, 0], [1, 1], [3, 3], [2, 2], [0, 0]]))
suite.addTest(
Conv3DTestCase(
methodName='runTest', groups=2, padding="valid"))
suite.addTest(
Conv3DTestCase(
methodName='runTest',
num_filters=6,
num_channels=3,
groups=3,
padding="valid"))
def add_error_cases(suite):
suite.addTest(
Conv3DErrorTestCase(
methodName='runTest', num_channels=5, groups=2))
suite.addTest(
Conv3DErrorTestCase(
methodName='runTest', num_channels=5, groups=2, padding=[-1, 1, 3]))
def load_tests(loader, standard_tests, pattern):
suite = unittest.TestSuite()
add_cases(suite)
add_error_cases(suite)
return suite
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
twitchyliquid64/misc-scripts | s3tool/boto-develop/boto/pyami/bootstrap.py | 150 | 5748 | # Copyright (c) 2006,2007 Mitch Garnaat http://garnaat.org/
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import os
import boto
from boto.utils import get_instance_metadata, get_instance_userdata
from boto.pyami.config import Config, BotoConfigPath
from boto.pyami.scriptbase import ScriptBase
import time
class Bootstrap(ScriptBase):
"""
The Bootstrap class is instantiated and run as part of the PyAMI
instance initialization process. The methods in this class will
be run from the rc.local script of the instance and will be run
as the root user.
The main purpose of this class is to make sure the boto distribution
on the instance is the one required.
"""
def __init__(self):
self.working_dir = '/mnt/pyami'
self.write_metadata()
super(Bootstrap, self).__init__()
def write_metadata(self):
fp = open(os.path.expanduser(BotoConfigPath), 'w')
fp.write('[Instance]\n')
inst_data = get_instance_metadata()
for key in inst_data:
fp.write('%s = %s\n' % (key, inst_data[key]))
user_data = get_instance_userdata()
fp.write('\n%s\n' % user_data)
fp.write('[Pyami]\n')
fp.write('working_dir = %s\n' % self.working_dir)
fp.close()
# This file has the AWS credentials, should we lock it down?
# os.chmod(BotoConfigPath, stat.S_IREAD | stat.S_IWRITE)
# now that we have written the file, read it into a pyami Config object
boto.config = Config()
boto.init_logging()
def create_working_dir(self):
boto.log.info('Working directory: %s' % self.working_dir)
if not os.path.exists(self.working_dir):
os.mkdir(self.working_dir)
def load_boto(self):
update = boto.config.get('Boto', 'boto_update', 'svn:HEAD')
if update.startswith('svn'):
if update.find(':') >= 0:
method, version = update.split(':')
version = '-r%s' % version
else:
version = '-rHEAD'
location = boto.config.get('Boto', 'boto_location', '/usr/local/boto')
self.run('svn update %s %s' % (version, location))
elif update.startswith('git'):
location = boto.config.get('Boto', 'boto_location', '/usr/share/python-support/python-boto/boto')
num_remaining_attempts = 10
while num_remaining_attempts > 0:
num_remaining_attempts -= 1
try:
self.run('git pull', cwd=location)
num_remaining_attempts = 0
except Exception as e:
boto.log.info('git pull attempt failed with the following exception. Trying again in a bit. %s', e)
time.sleep(2)
if update.find(':') >= 0:
method, version = update.split(':')
else:
version = 'master'
self.run('git checkout %s' % version, cwd=location)
else:
# first remove the symlink needed when running from subversion
self.run('rm /usr/local/lib/python2.5/site-packages/boto')
self.run('easy_install %s' % update)
def fetch_s3_file(self, s3_file):
try:
from boto.utils import fetch_file
f = fetch_file(s3_file)
path = os.path.join(self.working_dir, s3_file.split("/")[-1])
open(path, "w").write(f.read())
except:
boto.log.exception('Problem Retrieving file: %s' % s3_file)
path = None
return path
def load_packages(self):
package_str = boto.config.get('Pyami', 'packages')
if package_str:
packages = package_str.split(',')
for package in packages:
package = package.strip()
if package.startswith('s3:'):
package = self.fetch_s3_file(package)
if package:
# if the "package" is really a .py file, it doesn't have to
# be installed, just being in the working dir is enough
if not package.endswith('.py'):
self.run('easy_install -Z %s' % package, exit_on_error=False)
def main(self):
self.create_working_dir()
self.load_boto()
self.load_packages()
self.notify('Bootstrap Completed for %s' % boto.config.get_instance('instance-id'))
if __name__ == "__main__":
# because bootstrap starts before any logging configuration can be loaded from
# the boto config files, we will manually enable logging to /var/log/boto.log
boto.set_file_logger('bootstrap', '/var/log/boto.log')
bs = Bootstrap()
bs.main()
| mit |
heeraj123/oh-mainline | vendor/packages/gdata/src/gdata/tlslite/BaseDB.py | 238 | 3508 | """Base class for SharedKeyDB and VerifierDB."""
import anydbm
import thread
class BaseDB:
def __init__(self, filename, type):
self.type = type
self.filename = filename
if self.filename:
self.db = None
else:
self.db = {}
self.lock = thread.allocate_lock()
def create(self):
"""Create a new on-disk database.
@raise anydbm.error: If there's a problem creating the database.
"""
if self.filename:
self.db = anydbm.open(self.filename, "n") #raises anydbm.error
self.db["--Reserved--type"] = self.type
self.db.sync()
else:
self.db = {}
def open(self):
"""Open a pre-existing on-disk database.
@raise anydbm.error: If there's a problem opening the database.
@raise ValueError: If the database is not of the right type.
"""
if not self.filename:
raise ValueError("Can only open on-disk databases")
self.db = anydbm.open(self.filename, "w") #raises anydbm.error
try:
if self.db["--Reserved--type"] != self.type:
raise ValueError("Not a %s database" % self.type)
except KeyError:
raise ValueError("Not a recognized database")
def __getitem__(self, username):
if self.db == None:
raise AssertionError("DB not open")
self.lock.acquire()
try:
valueStr = self.db[username]
finally:
self.lock.release()
return self._getItem(username, valueStr)
def __setitem__(self, username, value):
if self.db == None:
raise AssertionError("DB not open")
valueStr = self._setItem(username, value)
self.lock.acquire()
try:
self.db[username] = valueStr
if self.filename:
self.db.sync()
finally:
self.lock.release()
def __delitem__(self, username):
if self.db == None:
raise AssertionError("DB not open")
self.lock.acquire()
try:
del(self.db[username])
if self.filename:
self.db.sync()
finally:
self.lock.release()
def __contains__(self, username):
"""Check if the database contains the specified username.
@type username: str
@param username: The username to check for.
@rtype: bool
@return: True if the database contains the username, False
otherwise.
"""
if self.db == None:
raise AssertionError("DB not open")
self.lock.acquire()
try:
return self.db.has_key(username)
finally:
self.lock.release()
def check(self, username, param):
value = self.__getitem__(username)
return self._checkItem(value, username, param)
def keys(self):
"""Return a list of usernames in the database.
@rtype: list
@return: The usernames in the database.
"""
if self.db == None:
raise AssertionError("DB not open")
self.lock.acquire()
try:
usernames = self.db.keys()
finally:
self.lock.release()
usernames = [u for u in usernames if not u.startswith("--Reserved--")]
return usernames | agpl-3.0 |
ahaym/eden | modules/s3log.py | 17 | 11312 | # -*- coding: utf-8 -*-
""" S3 Logging Facility
@copyright: (c) 2015 Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
import sys
from gluon import current
# =============================================================================
class S3Log(object):
"""
Simple global logging facility, called like:
current.log.error("Something went wrong", value="Example")
gives:
2014-02-16 11:58:41 S3LOG ERROR: Something went wrong: Example
Configurable in 000_config.py (set up in models/00_db.py)
- to include caller details (file name, line number, function name):
2014-02-16 11:58:23 (applications/eden/modules/s3/s3rest.py 477 __init__)
ERROR: Something went wrong: Example
- to write to console (sys.stderr), to a log file, or both.
Configuration see modules/s3cfg.py.
"""
def __init__(self):
"""
Constructor
"""
settings = current.deployment_settings
log_level = settings.get_log_level()
if log_level is None:
self.critical = \
self.error = \
self.warning = \
self.info = \
self.debug = self.ignore
self.log_level = 100
else:
try:
level = getattr(logging, log_level.upper())
except AttributeError:
raise SyntaxError("Invalid settings.log.level: %s" % log_level)
self.log_level = level
self.critical = self._critical \
if level <= logging.CRITICAL else self.ignore
self.error = self._error \
if level <= logging.ERROR else self.ignore
self.warning = self._warning \
if level <= logging.WARNING else self.ignore
self.info = self._info \
if level <= logging.INFO else self.ignore
self.debug = self._debug \
if level <= logging.DEBUG else self.ignore
self.configure_logger()
# -------------------------------------------------------------------------
@classmethod
def setup(cls):
"""
Set up current.log
"""
if hasattr(current, "log"):
return
current.log = cls()
return
# -------------------------------------------------------------------------
def configure_logger(self):
"""
Configure output handlers
"""
if hasattr(current, "log"):
return
settings = current.deployment_settings
console = settings.get_log_console()
logfile = settings.get_log_logfile()
if not console and not logfile:
# No point to log without output channel
self.critical = \
self.error = \
self.warning = \
self.info = \
self.debug = self.ignore
return
logger = logging.getLogger(__name__)
logger.propagate = False
logger.setLevel(self.log_level)
logger.handlers = []
m_format = "%(asctime)s %(caller)s %(levelname)s: %(message)s"
d_format = "%Y-%m-%d %H:%M:%S"
formatter = logging.Formatter(m_format, d_format)
# Set up console handler
if console:
console_handler = logging.StreamHandler(sys.stderr)
console_handler.setFormatter(formatter)
console_handler.setLevel(self.log_level)
logger.addHandler(console_handler)
# Set up log file handler
if logfile:
from logging.handlers import RotatingFileHandler
MAXBYTES = 1048576
logfile_handler = RotatingFileHandler(logfile,
maxBytes = MAXBYTES,
backupCount = 3)
logfile_handler.setFormatter(formatter)
logfile_handler.setLevel(self.log_level)
logger.addHandler(logfile_handler)
return
# -------------------------------------------------------------------------
@staticmethod
def ignore(message, value=None):
"""
Dummy to ignore messages below minimum severity level
"""
return
# -------------------------------------------------------------------------
@staticmethod
def recorder():
"""
Return a recording facility for log messages
"""
return S3LogRecorder()
# -------------------------------------------------------------------------
@staticmethod
def _log(severity, message, value=None):
"""
Log a message
@param severity: the severity of the message
@param message: the message
@param value: message suffix (optional)
"""
logger = logging.getLogger(__name__)
logger.propagate = False
msg = "%s: %s" % (message, value) if value else message
extra = {"caller": "S3LOG"}
if current.deployment_settings.get_log_caller_info():
caller = logger.findCaller()
if caller:
extra = {"caller": "(%s %s %s)" % caller}
logger.log(severity, msg, extra=extra)
return
# -------------------------------------------------------------------------
@classmethod
def _critical(cls, message, value=None):
"""
Log a critical message (highest severity level),
called via current.log.critical()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.CRITICAL, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _error(cls, message, value=None):
"""
Log an error message,
called via current.log.error()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.ERROR, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _warning(cls, message, value=None):
"""
Log a warning message,
called via current.log.warning()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.WARNING, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _info(cls, message, value=None):
"""
Log an general info message,
called via current.log.info()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.INFO, message, value=value)
# -------------------------------------------------------------------------
@classmethod
def _debug(cls, message, value=None):
"""
Log a detailed debug message (lowest severity level),
called via current.log.debug()
@param message: the message
@param value: message suffix (optional)
"""
cls._log(logging.DEBUG, message, value=value)
# =============================================================================
class S3LogRecorder(object):
"""
S3Log recorder, simple facility to record log messages for tests
Start:
recorder = current.log.recorder()
Read out messages:
messages = recorder.read()
Stop recording:
recorder.stop()
Re-start recording:
recorder.listen()
Clear messages buffer:
recorder.clear()
"""
def __init__(self):
self.handler = None
self.strbuf = None
self.listen()
# -------------------------------------------------------------------------
def listen(self):
""" Start recording S3Log messages """
if self.handler is not None:
return
strbuf = self.strbuf
if strbuf is None:
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
strbuf = StringIO()
handler = logging.StreamHandler(strbuf)
logger = logging.getLogger(__name__)
logger.addHandler(handler)
self.handler = handler
self.strbuf = strbuf
return
# -------------------------------------------------------------------------
def read(self):
""" Read out recorded S3Log messages """
strbuf = self.strbuf
if strbuf is None:
return ""
handler = self.handler
if handler is not None:
handler.flush()
return strbuf.getvalue()
# -------------------------------------------------------------------------
def stop(self):
""" Stop recording S3Log messages (and return the messages) """
handler = self.handler
if handler is not None:
logger = logging.getLogger(__name__)
logger.removeHandler(handler)
handler.close()
self.handler = None
strbuf = self.strbuf
if strbuf is not None:
return strbuf.getvalue()
else:
return ""
# -------------------------------------------------------------------------
def clear(self):
""" Clear the messages buffer """
if self.handler is not None:
on = True
self.stop()
else:
on = False
strbuf = self.strbuf
if strbuf is not None:
strbuf.close()
self.strbuf = None
if on:
self.listen()
# END =========================================================================
| mit |
snf/servo | components/script/dom/bindings/codegen/parser/tests/test_union_nullable.py | 276 | 1292 | def WebIDLTest(parser, harness):
threw = False
try:
parser.parse("""
interface OneNullableInUnion {
void foo((object? or DOMString?) arg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"Two nullable member types of a union should have thrown.")
parser.reset()
threw = False
try:
parser.parse("""
interface NullableInNullableUnion {
void foo((object? or DOMString)? arg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"A nullable union type with a nullable member type should have "
"thrown.")
parser.reset()
threw = False
try:
parser.parse("""
interface NullableInUnionNullableUnionHelper {
};
interface NullableInUnionNullableUnion {
void foo(((object? or DOMString) or NullableInUnionNullableUnionHelper)? arg);
};
""")
results = parser.finish()
except:
threw = True
harness.ok(threw,
"A nullable union type with a nullable member type should have "
"thrown.")
| mpl-2.0 |
kvar/ansible | test/units/modules/remote_management/oneview/test_oneview_ethernet_network_info.py | 21 | 3595 | # Copyright (c) 2016-2017 Hewlett Packard Enterprise Development LP
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from units.compat import unittest
from oneview_module_loader import EthernetNetworkInfoModule
from hpe_test_utils import FactsParamsTestCase
ERROR_MSG = 'Fake message error'
PARAMS_GET_ALL = dict(
config='config.json',
name=None
)
PARAMS_GET_BY_NAME = dict(
config='config.json',
name="Test Ethernet Network",
options=[]
)
PARAMS_GET_BY_NAME_WITH_OPTIONS = dict(
config='config.json',
name="Test Ethernet Network",
options=['associatedProfiles', 'associatedUplinkGroups']
)
PRESENT_ENETS = [{
"name": "Test Ethernet Network",
"uri": "/rest/ethernet-networks/d34dcf5e-0d8e-441c-b00d-e1dd6a067188"
}]
ENET_ASSOCIATED_UPLINK_GROUP_URIS = [
"/rest/uplink-sets/c6bf9af9-48e7-4236-b08a-77684dc258a5",
"/rest/uplink-sets/e2f0031b-52bd-4223-9ac1-d91cb519d548"
]
ENET_ASSOCIATED_PROFILE_URIS = [
"/rest/server-profiles/83e2e117-59dc-4e33-9f24-462af951cbbe",
"/rest/server-profiles/57d3af2a-b6d2-4446-8645-f38dd808ea4d"
]
ENET_ASSOCIATED_UPLINK_GROUPS = [dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[0], name='Uplink Set 1'),
dict(uri=ENET_ASSOCIATED_UPLINK_GROUP_URIS[1], name='Uplink Set 2')]
ENET_ASSOCIATED_PROFILES = [dict(uri=ENET_ASSOCIATED_PROFILE_URIS[0], name='Server Profile 1'),
dict(uri=ENET_ASSOCIATED_PROFILE_URIS[1], name='Server Profile 2')]
class EthernetNetworkInfoSpec(unittest.TestCase,
FactsParamsTestCase
):
def setUp(self):
self.configure_mocks(self, EthernetNetworkInfoModule)
self.ethernet_networks = self.mock_ov_client.ethernet_networks
FactsParamsTestCase.configure_client_mock(self, self.ethernet_networks)
def test_should_get_all_enets(self):
self.ethernet_networks.get_all.return_value = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_ALL
EthernetNetworkInfoModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ethernet_networks=(PRESENT_ENETS)
)
def test_should_get_enet_by_name(self):
self.ethernet_networks.get_by.return_value = PRESENT_ENETS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME
EthernetNetworkInfoModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ethernet_networks=(PRESENT_ENETS)
)
def test_should_get_enet_by_name_with_options(self):
self.ethernet_networks.get_by.return_value = PRESENT_ENETS
self.ethernet_networks.get_associated_profiles.return_value = ENET_ASSOCIATED_PROFILE_URIS
self.ethernet_networks.get_associated_uplink_groups.return_value = ENET_ASSOCIATED_UPLINK_GROUP_URIS
self.mock_ov_client.server_profiles.get.side_effect = ENET_ASSOCIATED_PROFILES
self.mock_ov_client.uplink_sets.get.side_effect = ENET_ASSOCIATED_UPLINK_GROUPS
self.mock_ansible_module.params = PARAMS_GET_BY_NAME_WITH_OPTIONS
EthernetNetworkInfoModule().run()
self.mock_ansible_module.exit_json.assert_called_once_with(
changed=False,
ethernet_networks=PRESENT_ENETS,
enet_associated_profiles=ENET_ASSOCIATED_PROFILES,
enet_associated_uplink_groups=ENET_ASSOCIATED_UPLINK_GROUPS
)
if __name__ == '__main__':
unittest.main()
| gpl-3.0 |
malcolmgreaves/support-tools | wiki_to_md/wiki2gfm_test.py | 136 | 31840 | # Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for wiki2gfm."""
import codecs
import StringIO
import unittest
from impl import converter
from impl import formatting_handler
from impl import pragma_handler
class BaseTest(unittest.TestCase):
"""Base test for wiki2gfm tests."""
def setUp(self):
"""Create a base test."""
self.warnings = []
self.output = StringIO.StringIO()
self.pragma_handler = pragma_handler.PragmaHandler(self._TrackWarning)
self.formatting_handler = formatting_handler.FormattingHandler(
self._TrackWarning,
project="test",
issue_map={123: "https://github.com/abcxyz/test/issues/789"},
symmetric_headers=False)
self.converter = converter.Converter(
self.pragma_handler,
self.formatting_handler,
self._TrackWarning,
project="test",
wikipages=["TestPage"])
def assertOutput(self, expected_output):
"""Assert that specific output was written.
Args:
expected_output: The expected value of the output.
"""
self.assertEquals(expected_output, self.output.getvalue())
def assertNoOutput(self, expected_output):
self.assertNotEqual(expected_output, self.output.getvalue())
def assertWarning(self, warning_contents, occurrences=1):
"""Assert that a warning was issued containing the given contents.
This searches all tracked warnings for the contents.
Args:
warning_contents: Text that the warning was expected to contain.
occurrences: The number of occurrences of the warning contents.
"""
occurrences_found = 0
for warning in self.warnings:
if warning_contents in warning[1]:
occurrences_found += 1
if occurrences_found != occurrences:
self.fail("Failed to find '{0}' in {1} warnings (found it in {2})."
.format(warning_contents, occurrences, occurrences_found))
def assertNoWarnings(self):
"""Assert that no warnings were issued."""
self.assertListEqual([], self.warnings)
def _TrackWarning(self, input_line, message):
"""Track a warning by storing it in memory.
Args:
input_line: Line the warning was issued on.
message: The warning message.
"""
self.warnings.append((input_line, message))
class TestPragmaHandler(BaseTest):
"""Tests the pragma handler."""
def testSummaryPragmaGivesWarning(self):
self.pragma_handler.HandlePragma(1, self.output, "summary", "abc")
self.assertWarning("summary")
def testSidebarPragmaGivesWarning(self):
self.pragma_handler.HandlePragma(1, self.output, "sidebar", "abc")
self.assertWarning("sidebar")
def testUnknownPragmaGivesWarning(self):
self.pragma_handler.HandlePragma(1, self.output, "fail!", "abc")
self.assertWarning("fail!")
class TestFormattingHandler(BaseTest):
"""Tests the formatting handler."""
def testHandleHeaderOpen(self):
self.formatting_handler.HandleHeaderOpen(1, self.output, 3)
self.assertOutput("### ")
self.assertNoWarnings()
def testHandleHeaderOpenInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleHeaderOpen(1, self.output, 3)
self.assertOutput("<h3>")
self.assertNoWarnings()
def testHandleHeaderClose(self):
self.formatting_handler.HandleHeaderClose(1, self.output, 3)
self.assertOutput("") # No header closing markup by default.
self.assertNoWarnings()
def testHandleHeaderCloseInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleHeaderClose(1, self.output, 3)
self.assertOutput("</h3>")
self.assertNoWarnings()
def testHandleHeaderCloseSymmetric(self):
self.formatting_handler._symmetric_headers = True
self.formatting_handler.HandleHeaderClose(1, self.output, 3)
self.assertOutput(" ###")
self.assertNoWarnings()
def testHandleHeaderCloseSymmetricInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler._symmetric_headers = True
self.formatting_handler.HandleHeaderClose(1, self.output, 3)
self.assertOutput("</h3>")
self.assertNoWarnings()
def testHandleHRule(self):
self.formatting_handler.HandleHRule(1, self.output)
self.assertOutput("\n---\n")
self.assertNoWarnings()
def testHandleHRuleInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleHRule(1, self.output)
self.assertOutput("<hr />")
self.assertNoWarnings()
def testHandleCodeBlockOpen(self):
self.formatting_handler.HandleCodeBlockOpen(1, self.output, None)
self.assertOutput("```\n")
self.assertNoWarnings()
def testHandleCodeBlockOpenInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleCodeBlockOpen(1, self.output, None)
self.assertOutput("<pre><code>")
self.assertWarning("Code markup was used")
def testHandleCodeBlockOpenWithLanguage(self):
self.formatting_handler.HandleCodeBlockOpen(1, self.output, "idris")
self.assertOutput("```idris\n")
self.assertNoWarnings()
def testHandleCodeBlockOpenWithLanguageInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleCodeBlockOpen(1, self.output, "idris")
self.assertOutput("<pre><code>")
self.assertWarning("Code markup was used")
def testHandleCodeBlockClose(self):
self.formatting_handler.HandleCodeBlockClose(1, self.output)
self.assertOutput("```")
self.assertNoWarnings()
def testHandleCodeBlockCloseInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleCodeBlockClose(1, self.output)
self.assertOutput("</code></pre>")
self.assertNoWarnings()
def testHandleNumericList(self):
self.formatting_handler.HandleNumericListOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleNumericListOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleNumericListOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleNumericListOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput(" 1. a\n 1. b\n 1. c\n 1. d\n")
self.assertNoWarnings()
def testHandleNumericListInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleNumericListOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleNumericListOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleNumericListOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleNumericListOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput("<ol><li>a\n</li><li>b\n<ol><li>c\n</li></ol></li>"
"<li>d\n</li></ol>")
self.assertWarning("Numeric list markup was used", occurrences=2)
def testHandleBulletList(self):
self.formatting_handler.HandleBulletListOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleBulletListOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleBulletListOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleBulletListOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput(" * a\n * b\n * c\n * d\n")
self.assertNoWarnings()
def testHandleBulletListInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleBulletListOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleBulletListOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleBulletListOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleBulletListOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput("<ul><li>a\n</li><li>b\n<ul><li>c\n</li></ul></li>"
"<li>d\n</li></ul>")
self.assertWarning("Bulleted list markup was used", occurrences=2)
def testHandleBlockQuote(self):
self.formatting_handler.HandleBlockQuoteOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleBlockQuoteOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleBlockQuoteOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleBlockQuoteOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput("> a\n> b\n> > c\n\n> d\n")
self.assertNoWarnings()
def testHandleBlockQuoteInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleBlockQuoteOpen(1, self.output, 1)
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleBlockQuoteOpen(2, self.output, 1)
self.formatting_handler.HandleText(2, self.output, "b\n")
self.formatting_handler.HandleBlockQuoteOpen(3, self.output, 2)
self.formatting_handler.HandleText(3, self.output, "c\n")
self.formatting_handler.HandleListClose(4, self.output) # Closing 2.
self.formatting_handler.HandleBlockQuoteOpen(4, self.output, 1)
self.formatting_handler.HandleText(4, self.output, "d\n")
self.formatting_handler.HandleListClose(5, self.output) # Closing 1.
self.assertOutput("<blockquote>a\nb<br>\n<blockquote>c\n</blockquote>"
"d\n</blockquote>")
self.assertWarning("Blockquote markup was used", occurrences=2)
def testHandleParagraphBreak(self):
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleParagraphBreak(2, self.output)
self.formatting_handler.HandleText(3, self.output, "b\n")
self.assertOutput("a\n\nb\n")
self.assertNoWarnings()
def testHandleParagraphBreakInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleText(1, self.output, "a\n")
self.formatting_handler.HandleParagraphBreak(2, self.output)
self.formatting_handler.HandleText(3, self.output, "b\n")
self.assertOutput("a\n<br>\nb<br>\n")
self.assertNoWarnings()
def testHandleBold(self):
self.formatting_handler.HandleBoldOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleBoldClose(3, self.output)
self.assertOutput("**xyz**")
self.assertNoWarnings()
def testHandleBoldInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleBoldOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleBoldClose(3, self.output)
self.assertOutput("<b>xyz</b>")
self.assertWarning("Bold markup was used")
def testHandleItalic(self):
self.formatting_handler.HandleItalicOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleItalicClose(3, self.output)
self.assertOutput("_xyz_")
self.assertNoWarnings()
def testHandleItalicInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleItalicOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleItalicClose(3, self.output)
self.assertOutput("<i>xyz</i>")
self.assertWarning("Italic markup was used")
def testHandleStrikethrough(self):
self.formatting_handler.HandleStrikethroughOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleStrikethroughClose(3, self.output)
self.assertOutput("~~xyz~~")
self.assertNoWarnings()
def testHandleStrikethroughInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleStrikethroughOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleStrikethroughClose(3, self.output)
self.assertOutput("<del>xyz</del>")
self.assertWarning("Strikethrough markup was used")
def testHandleSuperscript(self):
self.formatting_handler.HandleSuperscript(1, self.output, "xyz")
self.assertOutput("<sup>xyz</sup>")
self.assertNoWarnings()
def testHandleSuperscriptInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleSuperscript(1, self.output, "xyz")
self.assertOutput("<sup>xyz</sup>")
self.assertNoWarnings()
def testHandleSubscript(self):
self.formatting_handler.HandleSubscript(1, self.output, "xyz")
self.assertOutput("<sub>xyz</sub>")
self.assertNoWarnings()
def testHandleSubscriptInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleSubscript(1, self.output, "xyz")
self.assertOutput("<sub>xyz</sub>")
self.assertNoWarnings()
def testHandleInlineCode(self):
self.formatting_handler.HandleInlineCode(1, self.output, "xyz")
self.assertOutput("`xyz`")
self.assertNoWarnings()
def testHandleInlineCodeInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleInlineCode(1, self.output, "xyz")
self.assertOutput("<code>xyz</code>")
self.assertNoWarnings()
# Table handling is tested in the Converter tests,
# as the interactions are multiple and handled there.
def testHandleLink(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", None)
self.assertOutput("http://example.com")
self.assertNoWarnings()
def testHandleLinkInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", None)
self.assertOutput("<a href='http://example.com'>http://example.com</a>")
self.assertWarning("Link markup was used")
def testHandleLinkWithDescription(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", "Description")
self.assertOutput("[Description](http://example.com)")
self.assertNoWarnings()
def testHandleLinkWithDescriptionInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", "Description")
self.assertOutput("<a href='http://example.com'>Description</a>")
self.assertWarning("Link markup was used")
def testHandleLinkWithImageDescription(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", "http://example.com/a.png")
self.assertOutput("[](http://example.com)")
self.assertNoWarnings()
def testHandleLinkWithImageDescriptionInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com", "http://example.com/a.png")
self.assertOutput("<a href='http://example.com'>"
"<img src='http://example.com/a.png' /></a>")
self.assertWarning("Link markup was used")
def testHandleImageLink(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", None)
self.assertOutput("")
self.assertNoWarnings()
def testHandleImageLinkInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", None)
self.assertOutput("<img src='http://example.com/a.png' />")
self.assertWarning("Link markup was used")
def testHandleImageLinkWithDescription(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", "Description")
self.assertOutput("[Description](http://example.com/a.png)")
self.assertNoWarnings()
def testHandleImageLinkWithDescriptionInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", "Description")
self.assertOutput("<a href='http://example.com/a.png'>Description</a>")
self.assertWarning("Link markup was used")
def testHandleImageLinkWithImageDescription(self):
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", "http://example.com/b.png")
self.assertOutput("]"
"(http://example.com/a.png)")
self.assertNoWarnings()
def testHandleImageLinkWithImageDescriptionInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleLink(
1, self.output, "http://example.com/a.png", "http://example.com/b.png")
self.assertOutput("<a href='http://example.com/a.png'>"
"<img src='http://example.com/b.png' /></a>")
self.assertWarning("Link markup was used")
def testHandleWiki(self):
self.formatting_handler.HandleWiki(1, self.output, "TestPage", "Test Page")
self.assertOutput("[Test Page](TestPage.md)")
self.assertNoWarnings()
def testHandleWikiInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleWiki(1, self.output, "TestPage", "Test Page")
self.assertOutput("<a href='TestPage.md'>Test Page</a>")
self.assertWarning("Link markup was used")
def testHandleIssue(self):
self.formatting_handler.HandleIssue(1, self.output, "issue ", 123)
self.assertOutput("[issue 789](https://github.com/abcxyz/test/issues/789)")
self.assertWarning("Issue 123 was auto-linked")
self.assertWarning("In the output, it has been linked to the "
"migrated issue on GitHub: 789.")
def testHandleIssueInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleIssue(1, self.output, "issue ", 123)
self.assertOutput("<a href='https://github.com/abcxyz/test/issues/789'>"
"issue 789</a>")
self.assertWarning("Link markup was used")
self.assertWarning("Issue 123 was auto-linked")
self.assertWarning("In the output, it has been linked to the "
"migrated issue on GitHub: 789.")
def testHandleIssueNotInMap(self):
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("[issue 456](https://code.google.com/p/"
"test/issues/detail?id=456)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, it was not found in the issue migration map")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code issue page")
def testHandleIssueNotInMapInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("<a href='https://code.google.com/p/"
"test/issues/detail?id=456'>issue 456</a>")
self.assertWarning("Link markup was used")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, it was not found in the issue migration map")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code issue page")
def testHandleIssueNoMap(self):
self.formatting_handler._issue_map = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("[issue 456](https://code.google.com/p/"
"test/issues/detail?id=456)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, no issue migration map was specified")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code issue page")
def testHandleIssueNoMapInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler._issue_map = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("<a href='https://code.google.com/p/"
"test/issues/detail?id=456'>issue 456</a>")
self.assertWarning("Link markup was used")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, no issue migration map was specified")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code issue page")
def testHandleIssueNotInMapNoProject(self):
self.formatting_handler._project = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("issue 456 (on Google Code)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, it was not found in the issue migration map")
self.assertWarning("Additionally, because no project name was specified "
"the issue could not be linked to the original Google "
"Code issue page.")
self.assertWarning("The auto-link has been removed")
def testHandleIssueNotInMapNoProjectInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler._project = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("issue 456 (on Google Code)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, it was not found in the issue migration map")
self.assertWarning("Additionally, because no project name was specified "
"the issue could not be linked to the original Google "
"Code issue page.")
self.assertWarning("The auto-link has been removed")
def testHandleIssueNoMapNoProject(self):
self.formatting_handler._issue_map = None
self.formatting_handler._project = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("issue 456 (on Google Code)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, no issue migration map was specified")
self.assertWarning("Additionally, because no project name was specified "
"the issue could not be linked to the original Google "
"Code issue page.")
self.assertWarning("The auto-link has been removed")
def testHandleIssueNoMapNoProjectInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler._issue_map = None
self.formatting_handler._project = None
self.formatting_handler.HandleIssue(1, self.output, "issue ", 456)
self.assertOutput("issue 456 (on Google Code)")
self.assertWarning("Issue 456 was auto-linked")
self.assertWarning("However, no issue migration map was specified")
self.assertWarning("Additionally, because no project name was specified "
"the issue could not be linked to the original Google "
"Code issue page.")
self.assertWarning("The auto-link has been removed")
def testHandleRevision(self):
self.formatting_handler.HandleRevision(1, self.output, "revision ", 7)
self.assertOutput("[revision 7](https://code.google.com/p/"
"test/source/detail?r=7)")
self.assertWarning("Revision 7 was auto-linked")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code source page")
def testHandleRevisionInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleRevision(1, self.output, "revision ", 7)
self.assertOutput("<a href='https://code.google.com/p/"
"test/source/detail?r=7'>revision 7</a>")
self.assertWarning("Link markup was used")
self.assertWarning("Revision 7 was auto-linked")
self.assertWarning("As a placeholder, the text has been modified to "
"link to the original Google Code source page")
def testHandleRevisionNoProject(self):
self.formatting_handler._project = None
self.formatting_handler.HandleRevision(1, self.output, "revision ", 7)
self.assertOutput("revision 7 (on Google Code)")
self.assertWarning("Revision 7 was auto-linked")
self.assertWarning("Additionally, because no project name was specified "
"the revision could not be linked to the original "
"Google Code source page.")
self.assertWarning("The auto-link has been removed")
def testHandleRevisionNoProjectInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler._project = None
self.formatting_handler.HandleRevision(1, self.output, "revision ", 7)
self.assertOutput("revision 7 (on Google Code)")
self.assertWarning("Revision 7 was auto-linked")
self.assertWarning("Additionally, because no project name was specified "
"the revision could not be linked to the original "
"Google Code source page.")
self.assertWarning("The auto-link has been removed")
def testHandleInHtml(self):
self.formatting_handler.HandleHtmlOpen(
1, self.output, "tag", {"a": "1", "b": "2"}, False)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleHtmlClose(3, self.output, "tag")
self.assertOutput("<tag a='1' b='2'>xyz</tag>")
self.assertNoWarnings()
def testHandleHtmlInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleHtmlOpen(
1, self.output, "tag", {"a": "1", "b": "2"}, False)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleHtmlClose(3, self.output, "tag")
self.assertOutput("<tag a='1' b='2'>xyz</tag>")
self.assertNoWarnings()
def testHandleInHtmlSelfClose(self):
self.formatting_handler.HandleHtmlOpen(
1, self.output, "tag", {"a": "1", "b": "2"}, True)
self.assertOutput("<tag a='1' b='2' />")
self.assertNoWarnings()
def testHandleHtmlSelfCloseInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleHtmlOpen(
1, self.output, "tag", {"a": "1", "b": "2"}, True)
self.assertOutput("<tag a='1' b='2' />")
self.assertNoWarnings()
def testHandleGPlus(self):
self.formatting_handler.HandleGPlusOpen(1, self.output, None)
self.formatting_handler.HandleGPlusClose(1, self.output)
self.assertNoOutput("(TODO: Link to Google+ page.)")
self.assertWarning("A Google+ +1 button was embedded on this page")
def testHandleGPlusInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleGPlusOpen(1, self.output, None)
self.formatting_handler.HandleGPlusClose(1, self.output)
self.assertNoOutput("(TODO: Link to Google+ page.)")
self.assertWarning("A Google+ +1 button was embedded on this page")
def testHandleComment(self):
self.formatting_handler.HandleCommentOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleCommentClose(3, self.output)
self.assertOutput("<a href='Hidden comment: xyz'></a>")
self.assertWarning("A comment was used in the wiki file")
def testHandleCommentInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleCommentOpen(1, self.output)
self.formatting_handler.HandleText(2, self.output, "xyz")
self.formatting_handler.HandleCommentClose(3, self.output)
self.assertOutput("<a href='Hidden comment: xyz'></a>")
self.assertWarning("A comment was used in the wiki file")
def testHandleVideo(self):
self.formatting_handler.HandleVideoOpen(
1, self.output, "FiARsQSlzDc", 320, 240)
self.formatting_handler.HandleVideoClose(1, self.output)
self.assertOutput("<a href='http://www.youtube.com/watch?"
"feature=player_embedded&v=FiARsQSlzDc' target='_blank'>"
"<img src='http://img.youtube.com/vi/FiARsQSlzDc/0.jpg' "
"width='320' height=240 /></a>")
self.assertWarning("GFM does not support embedding the YouTube player")
def testHandleVideoInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleVideoOpen(
1, self.output, "FiARsQSlzDc", 320, 240)
self.formatting_handler.HandleVideoClose(1, self.output)
self.assertOutput("<a href='http://www.youtube.com/watch?"
"feature=player_embedded&v=FiARsQSlzDc' target='_blank'>"
"<img src='http://img.youtube.com/vi/FiARsQSlzDc/0.jpg' "
"width='320' height=240 /></a>")
self.assertWarning("GFM does not support embedding the YouTube player")
def testHandleText(self):
self.formatting_handler.HandleText(1, self.output, "xyz")
self.assertOutput("xyz")
self.assertNoWarnings()
def testHandleTextInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleText(1, self.output, "xyz")
self.assertOutput("xyz")
self.assertNoWarnings()
def testHandleEscapedText(self):
self.formatting_handler.HandleEscapedText(1, self.output, "**_xyz_** <a>")
self.assertOutput("\\*\\*\\_xyz\\_\\*\\* <a>")
self.assertNoWarnings()
def testHandleEscapedTextInHtml(self):
self.formatting_handler._in_html = 1
self.formatting_handler.HandleEscapedText(1, self.output, "**_xyz_** <a>")
self.assertOutput("**_xyz_** <a>")
self.assertNoWarnings()
class TestConverter(BaseTest):
"""Tests the converter."""
def testExamplePage(self):
with codecs.open("example.wiki", "rU", "utf-8") as example_input:
with codecs.open("example.md", "rU", "utf-8") as example_output:
self.converter.Convert(example_input, self.output)
self.assertOutput(example_output.read())
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
pteichman/cobe | tests/test_tokenizers.py | 1 | 6195 | import unittest
from cobe.tokenizers import CobeStemmer, CobeTokenizer, MegaHALTokenizer
class testMegaHALTokenizer(unittest.TestCase):
def setUp(self):
self.tokenizer = MegaHALTokenizer()
def testSplitEmpty(self):
self.assertEqual(len(self.tokenizer.split("")), 0)
def testSplitSentence(self):
words = self.tokenizer.split("hi.")
self.assertEqual(words, ["HI", "."])
def testSplitComma(self):
words = self.tokenizer.split("hi, cobe")
self.assertEqual(words, ["HI", ", ", "COBE", "."])
def testSplitImplicitStop(self):
words = self.tokenizer.split("hi")
self.assertEqual(words, ["HI", "."])
def testSplitUrl(self):
words = self.tokenizer.split("http://www.google.com/")
self.assertEqual(words, ["HTTP", "://", "WWW", ".", "GOOGLE", ".", "COM", "/."])
def testSplitApostrophe(self):
words = self.tokenizer.split("hal's brain")
self.assertEqual(words, ["HAL'S", " ", "BRAIN", "."])
words = self.tokenizer.split("',','")
self.assertEqual(words, ["'", ",", "'", ",", "'", "."])
def testSplitAlphaAndNumeric(self):
words = self.tokenizer.split("hal9000, test blah 12312")
self.assertEqual(words, ["HAL", "9000", ", ", "TEST", " ", "BLAH", " ", "12312", "."])
words = self.tokenizer.split("hal9000's test")
self.assertEqual(words, ["HAL", "9000", "'S", " ", "TEST", "."])
def testCapitalize(self):
words = self.tokenizer.split("this is a test")
self.assertEqual("This is a test.", self.tokenizer.join(words))
words = self.tokenizer.split("A.B. Hal test test. will test")
self.assertEqual("A.b. Hal test test. Will test.",
self.tokenizer.join(words))
words = self.tokenizer.split("2nd place test")
self.assertEqual("2Nd place test.", self.tokenizer.join(words))
class testCobeTokenizer(unittest.TestCase):
def setUp(self):
self.tokenizer = CobeTokenizer()
def testSplitEmpty(self):
self.assertEqual(len(self.tokenizer.split("")), 0)
def testSplitSentence(self):
words = self.tokenizer.split("hi.")
self.assertEqual(words, ["hi", "."])
def testSplitComma(self):
words = self.tokenizer.split("hi, cobe")
self.assertEqual(words, ["hi", ",", " ", "cobe"])
def testSplitDash(self):
words = self.tokenizer.split("hi - cobe")
self.assertEqual(words, ["hi", " ", "-", " ", "cobe"])
def testSplitMultipleSpacesWithDash(self):
words = self.tokenizer.split("hi - cobe")
self.assertEqual(words, ["hi", " ", "-", " ", "cobe"])
def testSplitLeadingDash(self):
words = self.tokenizer.split("-foo")
self.assertEqual(words, ["-foo"])
def testSplitLeadingSpace(self):
words = self.tokenizer.split(" foo")
self.assertEqual(words, ["foo"])
words = self.tokenizer.split(" foo")
self.assertEqual(words, ["foo"])
def testSplitTrailingSpace(self):
words = self.tokenizer.split("foo ")
self.assertEqual(words, ["foo"])
words = self.tokenizer.split("foo ")
self.assertEqual(words, ["foo"])
def testSplitSmiles(self):
words = self.tokenizer.split(":)")
self.assertEqual(words, [":)"])
words = self.tokenizer.split(";)")
self.assertEqual(words, [";)"])
# not smiles
words = self.tokenizer.split(":(")
self.assertEqual(words, [":("])
words = self.tokenizer.split(";(")
self.assertEqual(words, [";("])
def testSplitUrl(self):
words = self.tokenizer.split("http://www.google.com/")
self.assertEqual(words, ["http://www.google.com/"])
words = self.tokenizer.split("https://www.google.com/")
self.assertEqual(words, ["https://www.google.com/"])
# odd protocols
words = self.tokenizer.split("cobe://www.google.com/")
self.assertEqual(words, ["cobe://www.google.com/"])
words = self.tokenizer.split("cobe:www.google.com/")
self.assertEqual(words, ["cobe:www.google.com/"])
words = self.tokenizer.split(":foo")
self.assertEqual(words, [":", "foo"])
def testSplitMultipleSpaces(self):
words = self.tokenizer.split("this is a test")
self.assertEqual(words, ["this", " ", "is", " ", "a", " ", "test"])
def testSplitVerySadFrown(self):
words = self.tokenizer.split("testing : (")
self.assertEqual(words, ["testing", " ", ": ("])
words = self.tokenizer.split("testing : (")
self.assertEqual(words, ["testing", " ", ": ("])
words = self.tokenizer.split("testing : ( foo")
self.assertEqual(words, ["testing", " ", ": (", " ", "foo"])
def testSplitHyphenatedWord(self):
words = self.tokenizer.split("test-ing")
self.assertEqual(words, ["test-ing"])
words = self.tokenizer.split(":-)")
self.assertEqual(words, [":-)"])
words = self.tokenizer.split("test-ing :-) 1-2-3")
self.assertEqual(words, ["test-ing", " ", ":-)", " ", "1-2-3"])
def testSplitApostrophes(self):
words = self.tokenizer.split("don't :'(")
self.assertEqual(words, ["don't", " ", ":'("])
def testJoin(self):
self.assertEqual("foo bar baz",
self.tokenizer.join(["foo", " ", "bar", " ", "baz"]))
class testCobeStemmer(unittest.TestCase):
def setUp(self):
self.stemmer = CobeStemmer("english")
def testStemmer(self):
self.assertEqual("foo", self.stemmer.stem("foo"))
self.assertEqual("jump", self.stemmer.stem("jumping"))
self.assertEqual("run", self.stemmer.stem("running"))
def testStemmerCase(self):
self.assertEqual("foo", self.stemmer.stem("Foo"))
self.assertEqual("foo", self.stemmer.stem("FOO"))
self.assertEqual("foo", self.stemmer.stem("FOO'S"))
self.assertEqual("foo", self.stemmer.stem("FOOING"))
self.assertEqual("foo", self.stemmer.stem("Fooing"))
if __name__ == '__main__':
unittest.main()
| mit |
fnordahl/nova | nova/api/ec2/cloud.py | 1 | 86863 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Cloud Controller: Implementation of EC2 REST API calls, which are
dispatched to other nodes via AMQP RPC. State is via distributed
datastore.
"""
import base64
import time
from oslo_config import cfg
from oslo_log import log as logging
from oslo_log import versionutils
from oslo_utils import timeutils
import six
from nova.api.ec2 import ec2utils
from nova.api.ec2 import inst_state
from nova.api.metadata import password
from nova.api.openstack import extensions
from nova.api import validator
from nova import availability_zones
from nova import block_device
from nova.cloudpipe import pipelib
from nova import compute
from nova.compute import api as compute_api
from nova.compute import vm_states
from nova import exception
from nova.i18n import _
from nova.i18n import _LI
from nova.i18n import _LW
from nova.image import s3
from nova import network
from nova.network.security_group import neutron_driver
from nova.network.security_group import openstack_driver
from nova import objects
from nova import quota
from nova import servicegroup
from nova import utils
from nova import volume
ec2_opts = [
cfg.StrOpt('ec2_host',
default='$my_ip',
help='The IP address of the EC2 API server'),
cfg.StrOpt('ec2_dmz_host',
default='$my_ip',
help='The internal IP address of the EC2 API server'),
cfg.IntOpt('ec2_port',
default=8773,
min=1,
max=65535,
help='The port of the EC2 API server'),
cfg.StrOpt('ec2_scheme',
default='http',
choices=('http', 'https'),
help='The protocol to use when connecting to the EC2 API '
'server'),
cfg.StrOpt('ec2_path',
default='/',
help='The path prefix used to call the ec2 API server'),
cfg.ListOpt('region_list',
default=[],
help='List of region=fqdn pairs separated by commas'),
]
CONF = cfg.CONF
CONF.register_opts(ec2_opts)
CONF.import_opt('my_ip', 'nova.netconf')
CONF.import_opt('vpn_key_suffix', 'nova.cloudpipe.pipelib')
CONF.import_opt('internal_service_availability_zone',
'nova.availability_zones')
LOG = logging.getLogger(__name__)
QUOTAS = quota.QUOTAS
# EC2 ID can return the following error codes:
# http://docs.aws.amazon.com/AWSEC2/latest/APIReference/api-error-codes.html
# Validate methods are split to return valid EC2 error codes for different
# resource types
def _validate_ec2_id(val):
if not validator.validate_str()(val):
raise exception.InvalidEc2Id(ec2_id=val)
ec2utils.ec2_id_to_id(val)
def validate_volume_id(volume_id):
try:
_validate_ec2_id(volume_id)
except exception.InvalidEc2Id:
raise exception.InvalidVolumeIDMalformed(volume_id=volume_id)
def validate_instance_id(instance_id):
try:
_validate_ec2_id(instance_id)
except exception.InvalidEc2Id:
raise exception.InvalidInstanceIDMalformed(instance_id=instance_id)
# EC2 API can return the following values as documented in the EC2 API
# http://docs.amazonwebservices.com/AWSEC2/latest/APIReference/
# ApiReference-ItemType-InstanceStateType.html
# pending 0 | running 16 | shutting-down 32 | terminated 48 | stopping 64 |
# stopped 80
_STATE_DESCRIPTION_MAP = {
None: inst_state.PENDING,
vm_states.ACTIVE: inst_state.RUNNING,
vm_states.BUILDING: inst_state.PENDING,
vm_states.DELETED: inst_state.TERMINATED,
vm_states.SOFT_DELETED: inst_state.TERMINATED,
vm_states.STOPPED: inst_state.STOPPED,
vm_states.PAUSED: inst_state.PAUSE,
vm_states.SUSPENDED: inst_state.SUSPEND,
vm_states.RESCUED: inst_state.RESCUE,
vm_states.RESIZED: inst_state.RESIZE,
}
def _state_description(vm_state, _shutdown_terminate):
"""Map the vm state to the server status string."""
# Note(maoy): We do not provide EC2 compatibility
# in shutdown_terminate flag behavior. So we ignore
# it here.
name = _STATE_DESCRIPTION_MAP.get(vm_state, vm_state)
return {'code': inst_state.name_to_code(name),
'name': name}
def _parse_block_device_mapping(bdm):
"""Parse BlockDeviceMappingItemType into flat hash
BlockDevicedMapping.<N>.DeviceName
BlockDevicedMapping.<N>.Ebs.SnapshotId
BlockDevicedMapping.<N>.Ebs.VolumeSize
BlockDevicedMapping.<N>.Ebs.DeleteOnTermination
BlockDevicedMapping.<N>.Ebs.NoDevice
BlockDevicedMapping.<N>.VirtualName
=> remove .Ebs and allow volume id in SnapshotId
"""
ebs = bdm.pop('ebs', None)
if ebs:
ec2_id = ebs.pop('snapshot_id', None)
if ec2_id:
if ec2_id.startswith('snap-'):
bdm['snapshot_id'] = ec2utils.ec2_snap_id_to_uuid(ec2_id)
elif ec2_id.startswith('vol-'):
bdm['volume_id'] = ec2utils.ec2_vol_id_to_uuid(ec2_id)
ebs.setdefault('delete_on_termination', True)
bdm.update(ebs)
return bdm
def _properties_get_mappings(properties):
return block_device.mappings_prepend_dev(properties.get('mappings', []))
def _format_block_device_mapping(bdm):
"""Construct BlockDeviceMappingItemType
{'device_name': '...', 'snapshot_id': , ...}
=> BlockDeviceMappingItemType
"""
keys = (('deviceName', 'device_name'),
('virtualName', 'virtual_name'))
item = {}
for name, k in keys:
if k in bdm:
item[name] = bdm[k]
if bdm.get('no_device'):
item['noDevice'] = True
if ('snapshot_id' in bdm) or ('volume_id' in bdm):
ebs_keys = (('snapshotId', 'snapshot_id'),
('snapshotId', 'volume_id'), # snapshotId is abused
('volumeSize', 'volume_size'),
('deleteOnTermination', 'delete_on_termination'))
ebs = {}
for name, k in ebs_keys:
if bdm.get(k) is not None:
if k == 'snapshot_id':
ebs[name] = ec2utils.id_to_ec2_snap_id(bdm[k])
elif k == 'volume_id':
ebs[name] = ec2utils.id_to_ec2_vol_id(bdm[k])
else:
ebs[name] = bdm[k]
assert 'snapshotId' in ebs
item['ebs'] = ebs
return item
def _format_mappings(properties, result):
"""Format multiple BlockDeviceMappingItemType."""
mappings = [{'virtualName': m['virtual'], 'deviceName': m['device']}
for m in _properties_get_mappings(properties)
if block_device.is_swap_or_ephemeral(m['virtual'])]
block_device_mapping = [_format_block_device_mapping(bdm) for bdm in
properties.get('block_device_mapping', [])]
# NOTE(yamahata): overwrite mappings with block_device_mapping
for bdm in block_device_mapping:
for i in range(len(mappings)):
if bdm.get('deviceName') == mappings[i].get('deviceName'):
del mappings[i]
break
mappings.append(bdm)
# NOTE(yamahata): trim ebs.no_device == true. Is this necessary?
mappings = [bdm for bdm in mappings if not (bdm.get('noDevice', False))]
if mappings:
result['blockDeviceMapping'] = mappings
class CloudController(object):
"""CloudController provides the critical dispatch between
inbound API calls through the endpoint and messages
sent to the other nodes.
"""
def __init__(self):
versionutils.report_deprecated_feature(
LOG,
_LW('The in tree EC2 API is deprecated as of Kilo release and may '
'be removed in a future release. The openstack ec2-api '
'project http://git.openstack.org/cgit/openstack/ec2-api/ '
'is the target replacement for this functionality.')
)
self.image_service = s3.S3ImageService()
self.network_api = network.API()
self.volume_api = volume.API()
self.security_group_api = get_cloud_security_group_api()
self.compute_api = compute.API(network_api=self.network_api,
volume_api=self.volume_api,
security_group_api=self.security_group_api)
self.keypair_api = compute_api.KeypairAPI()
self.servicegroup_api = servicegroup.API()
def __str__(self):
return 'CloudController'
def _enforce_valid_instance_ids(self, context, instance_ids):
# NOTE(mikal): Amazon's implementation of the EC2 API requires that
# _all_ instance ids passed in be valid.
instances = {}
if instance_ids:
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
instances[ec2_id] = instance
return instances
def _get_image_state(self, image):
# NOTE(vish): fallback status if image_state isn't set
state = image.get('status')
if state == 'active':
state = 'available'
return image['properties'].get('image_state', state)
def describe_availability_zones(self, context, **kwargs):
if ('zone_name' in kwargs and
'verbose' in kwargs['zone_name'] and
context.is_admin):
return self._describe_availability_zones_verbose(context,
**kwargs)
else:
return self._describe_availability_zones(context, **kwargs)
def _describe_availability_zones(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
result = []
for zone in available_zones:
# Hide internal_service_availability_zone
if zone == CONF.internal_service_availability_zone:
continue
result.append({'zoneName': zone,
'zoneState': "available"})
for zone in not_available_zones:
result.append({'zoneName': zone,
'zoneState': "not available"})
return {'availabilityZoneInfo': result}
def _describe_availability_zones_verbose(self, context, **kwargs):
ctxt = context.elevated()
available_zones, not_available_zones = \
availability_zones.get_availability_zones(ctxt)
# Available services
enabled_services = objects.ServiceList.get_all(context,
disabled=False, set_zones=True)
zone_hosts = {}
host_services = {}
for service in enabled_services:
zone_hosts.setdefault(service.availability_zone, [])
if service.host not in zone_hosts[service.availability_zone]:
zone_hosts[service.availability_zone].append(
service.host)
host_services.setdefault(service.availability_zone +
service.host, [])
host_services[service.availability_zone + service.host].\
append(service)
result = []
for zone in available_zones:
result.append({'zoneName': zone,
'zoneState': "available"})
for host in zone_hosts[zone]:
result.append({'zoneName': '|- %s' % host,
'zoneState': ''})
for service in host_services[zone + host]:
alive = self.servicegroup_api.service_is_up(service)
art = (alive and ":-)") or "XXX"
active = 'enabled'
if service.disabled:
active = 'disabled'
result.append({'zoneName': '| |- %s' % service.binary,
'zoneState': ('%s %s %s'
% (active, art,
service.updated_at))})
for zone in not_available_zones:
result.append({'zoneName': zone,
'zoneState': "not available"})
return {'availabilityZoneInfo': result}
def describe_regions(self, context, region_name=None, **kwargs):
if CONF.region_list:
regions = []
for region in CONF.region_list:
name, _sep, host = region.partition('=')
endpoint = '%s://%s:%s%s' % (CONF.ec2_scheme,
host,
CONF.ec2_port,
CONF.ec2_path)
regions.append({'regionName': name,
'regionEndpoint': endpoint})
else:
regions = [{'regionName': 'nova',
'regionEndpoint': '%s://%s:%s%s' % (CONF.ec2_scheme,
CONF.ec2_host,
CONF.ec2_port,
CONF.ec2_path)}]
return {'regionInfo': regions}
def describe_snapshots(self,
context,
snapshot_id=None,
owner=None,
restorable_by=None,
**kwargs):
if snapshot_id:
snapshots = []
for ec2_id in snapshot_id:
internal_id = ec2utils.ec2_snap_id_to_uuid(ec2_id)
snapshot = self.volume_api.get_snapshot(
context,
snapshot_id=internal_id)
snapshots.append(snapshot)
else:
snapshots = self.volume_api.get_all_snapshots(context)
formatted_snapshots = []
for s in snapshots:
formatted = self._format_snapshot(context, s)
if formatted:
formatted_snapshots.append(formatted)
return {'snapshotSet': formatted_snapshots}
def _format_snapshot(self, context, snapshot):
# NOTE(mikal): this is just a set of strings in cinder. If they
# implement an enum, then we should move this code to use it. The
# valid ec2 statuses are "pending", "completed", and "error".
status_map = {'new': 'pending',
'creating': 'pending',
'available': 'completed',
'active': 'completed',
'deleting': 'pending',
'deleted': None,
'error': 'error'}
mapped_status = status_map.get(snapshot['status'], snapshot['status'])
if not mapped_status:
return None
s = {}
s['snapshotId'] = ec2utils.id_to_ec2_snap_id(snapshot['id'])
s['volumeId'] = ec2utils.id_to_ec2_vol_id(snapshot['volume_id'])
s['status'] = mapped_status
s['startTime'] = snapshot['created_at']
s['progress'] = snapshot['progress']
s['ownerId'] = snapshot['project_id']
s['volumeSize'] = snapshot['volume_size']
s['description'] = snapshot['display_description']
return s
def create_snapshot(self, context, volume_id, **kwargs):
validate_volume_id(volume_id)
LOG.info(_LI("Create snapshot of volume %s"), volume_id,
context=context)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
args = (context, volume_id, kwargs.get('name'),
kwargs.get('description'))
if kwargs.get('force', False):
snapshot = self.volume_api.create_snapshot_force(*args)
else:
snapshot = self.volume_api.create_snapshot(*args)
smap = objects.EC2SnapshotMapping(context, uuid=snapshot['id'])
smap.create()
return self._format_snapshot(context, snapshot)
def delete_snapshot(self, context, snapshot_id, **kwargs):
snapshot_id = ec2utils.ec2_snap_id_to_uuid(snapshot_id)
self.volume_api.delete_snapshot(context, snapshot_id)
return True
def describe_key_pairs(self, context, key_name=None, **kwargs):
key_pairs = self.keypair_api.get_key_pairs(context, context.user_id)
if key_name is not None:
key_pairs = [x for x in key_pairs if x['name'] in key_name]
# If looking for non existent key pair
if key_name is not None and not key_pairs:
msg = _('Could not find key pair(s): %s') % ','.join(key_name)
raise exception.KeypairNotFound(message=msg)
result = []
for key_pair in key_pairs:
# filter out the vpn keys
suffix = CONF.vpn_key_suffix
if context.is_admin or not key_pair['name'].endswith(suffix):
result.append({
'keyName': key_pair['name'],
'keyFingerprint': key_pair['fingerprint'],
})
return {'keySet': result}
def create_key_pair(self, context, key_name, **kwargs):
LOG.info(_LI("Create key pair %s"), key_name, context=context)
keypair, private_key = self.keypair_api.create_key_pair(
context, context.user_id, key_name)
return {'keyName': key_name,
'keyFingerprint': keypair['fingerprint'],
'keyMaterial': private_key}
# TODO(vish): when context is no longer an object, pass it here
def import_key_pair(self, context, key_name, public_key_material,
**kwargs):
LOG.info(_LI("Import key %s"), key_name, context=context)
public_key = base64.b64decode(public_key_material)
keypair = self.keypair_api.import_key_pair(context,
context.user_id,
key_name,
public_key)
return {'keyName': key_name,
'keyFingerprint': keypair['fingerprint']}
def delete_key_pair(self, context, key_name, **kwargs):
LOG.info(_LI("Delete key pair %s"), key_name, context=context)
try:
self.keypair_api.delete_key_pair(context, context.user_id,
key_name)
except exception.NotFound:
# aws returns true even if the key doesn't exist
pass
return True
def describe_security_groups(self, context, group_name=None, group_id=None,
**kwargs):
search_opts = ec2utils.search_opts_from_filters(kwargs.get('filter'))
raw_groups = self.security_group_api.list(context,
group_name,
group_id,
context.project_id,
search_opts=search_opts)
groups = [self._format_security_group(context, g) for g in raw_groups]
return {'securityGroupInfo':
list(sorted(groups,
key=lambda k: (k['ownerId'], k['groupName'])))}
def _format_security_group(self, context, group):
g = {}
g['groupDescription'] = group['description']
g['groupName'] = group['name']
g['ownerId'] = group['project_id']
g['ipPermissions'] = []
for rule in group['rules']:
r = {}
r['groups'] = []
r['ipRanges'] = []
if rule['group_id']:
if rule.get('grantee_group'):
source_group = rule['grantee_group']
r['groups'] += [{'groupName': source_group['name'],
'userId': source_group['project_id']}]
else:
# rule is not always joined with grantee_group
# for example when using neutron driver.
source_group = self.security_group_api.get(
context, id=rule['group_id'])
r['groups'] += [{'groupName': source_group.get('name'),
'userId': source_group.get('project_id')}]
if rule['protocol']:
r['ipProtocol'] = rule['protocol'].lower()
r['fromPort'] = rule['from_port']
r['toPort'] = rule['to_port']
g['ipPermissions'] += [dict(r)]
else:
for protocol, min_port, max_port in (('icmp', -1, -1),
('tcp', 1, 65535),
('udp', 1, 65535)):
r['ipProtocol'] = protocol
r['fromPort'] = min_port
r['toPort'] = max_port
g['ipPermissions'] += [dict(r)]
else:
r['ipProtocol'] = rule['protocol']
r['fromPort'] = rule['from_port']
r['toPort'] = rule['to_port']
r['ipRanges'] += [{'cidrIp': rule['cidr']}]
g['ipPermissions'] += [r]
return g
def _rule_args_to_dict(self, context, kwargs):
rules = []
if 'groups' not in kwargs and 'ip_ranges' not in kwargs:
rule = self._rule_dict_last_step(context, **kwargs)
if rule:
rules.append(rule)
return rules
if 'ip_ranges' in kwargs:
rules = self._cidr_args_split(kwargs)
else:
rules = [kwargs]
finalset = []
for rule in rules:
if 'groups' in rule:
groups_values = self._groups_args_split(rule)
for groups_value in groups_values:
final = self._rule_dict_last_step(context, **groups_value)
finalset.append(final)
else:
final = self._rule_dict_last_step(context, **rule)
finalset.append(final)
return finalset
def _cidr_args_split(self, kwargs):
cidr_args_split = []
cidrs = kwargs['ip_ranges']
for key, cidr in six.iteritems(cidrs):
mykwargs = kwargs.copy()
del mykwargs['ip_ranges']
mykwargs['cidr_ip'] = cidr['cidr_ip']
cidr_args_split.append(mykwargs)
return cidr_args_split
def _groups_args_split(self, kwargs):
groups_args_split = []
groups = kwargs['groups']
for key, group in six.iteritems(groups):
mykwargs = kwargs.copy()
del mykwargs['groups']
if 'group_name' in group:
mykwargs['source_security_group_name'] = group['group_name']
if 'user_id' in group:
mykwargs['source_security_group_owner_id'] = group['user_id']
if 'group_id' in group:
mykwargs['source_security_group_id'] = group['group_id']
groups_args_split.append(mykwargs)
return groups_args_split
def _rule_dict_last_step(self, context, to_port=None, from_port=None,
ip_protocol=None, cidr_ip=None, user_id=None,
source_security_group_name=None,
source_security_group_owner_id=None):
if source_security_group_name:
source_project_id = self._get_source_project_id(context,
source_security_group_owner_id)
source_security_group = objects.SecurityGroup.get_by_name(
context.elevated(),
source_project_id,
source_security_group_name)
notfound = exception.SecurityGroupNotFound
if not source_security_group:
raise notfound(security_group_id=source_security_group_name)
group_id = source_security_group.id
return self.security_group_api.new_group_ingress_rule(
group_id, ip_protocol, from_port, to_port)
else:
cidr = self.security_group_api.parse_cidr(cidr_ip)
return self.security_group_api.new_cidr_ingress_rule(
cidr, ip_protocol, from_port, to_port)
def _validate_group_identifier(self, group_name, group_id):
if not group_name and not group_id:
err = _("need group_name or group_id")
raise exception.MissingParameter(reason=err)
def _validate_rulevalues(self, rulesvalues):
if not rulesvalues:
err = _("can't build a valid rule")
raise exception.MissingParameter(reason=err)
def _validate_security_group_protocol(self, values):
validprotocols = ['tcp', 'udp', 'icmp', '6', '17', '1']
if 'ip_protocol' in values and \
values['ip_protocol'] not in validprotocols:
protocol = values['ip_protocol']
err = _("Invalid IP protocol %(protocol)s") % \
{'protocol': protocol}
raise exception.InvalidParameterValue(message=err)
def revoke_security_group_ingress(self, context, group_name=None,
group_id=None, **kwargs):
self._validate_group_identifier(group_name, group_id)
security_group = self.security_group_api.get(context, group_name,
group_id)
extensions.check_compute_policy(context, 'security_groups',
security_group, 'compute_extension')
prevalues = kwargs.get('ip_permissions', [kwargs])
rule_ids = []
for values in prevalues:
rulesvalues = self._rule_args_to_dict(context, values)
self._validate_rulevalues(rulesvalues)
for values_for_rule in rulesvalues:
values_for_rule['parent_group_id'] = security_group['id']
rule_ids.append(self.security_group_api.rule_exists(
security_group, values_for_rule))
rule_ids = [id for id in rule_ids if id]
if rule_ids:
self.security_group_api.remove_rules(context, security_group,
rule_ids)
return True
msg = _("No rule for the specified parameters.")
raise exception.InvalidParameterValue(message=msg)
# TODO(soren): This has only been tested with Boto as the client.
# Unfortunately, it seems Boto is using an old API
# for these operations, so support for newer API versions
# is sketchy.
def authorize_security_group_ingress(self, context, group_name=None,
group_id=None, **kwargs):
self._validate_group_identifier(group_name, group_id)
security_group = self.security_group_api.get(context, group_name,
group_id)
extensions.check_compute_policy(context, 'security_groups',
security_group, 'compute_extension')
prevalues = kwargs.get('ip_permissions', [kwargs])
postvalues = []
for values in prevalues:
self._validate_security_group_protocol(values)
rulesvalues = self._rule_args_to_dict(context, values)
self._validate_rulevalues(rulesvalues)
for values_for_rule in rulesvalues:
values_for_rule['parent_group_id'] = security_group['id']
if self.security_group_api.rule_exists(security_group,
values_for_rule):
raise exception.SecurityGroupRuleExists(
rule=values_for_rule)
postvalues.append(values_for_rule)
if postvalues:
self.security_group_api.add_rules(context, security_group['id'],
security_group['name'], postvalues)
return True
msg = _("No rule for the specified parameters.")
raise exception.InvalidParameterValue(message=msg)
def _get_source_project_id(self, context, source_security_group_owner_id):
if source_security_group_owner_id:
# Parse user:project for source group.
source_parts = source_security_group_owner_id.split(':')
# If no project name specified, assume it's same as user name.
# Since we're looking up by project name, the user name is not
# used here. It's only read for EC2 API compatibility.
if len(source_parts) == 2:
source_project_id = source_parts[1]
else:
source_project_id = source_parts[0]
else:
source_project_id = context.project_id
return source_project_id
def create_security_group(self, context, group_name, group_description):
if isinstance(group_name, six.text_type):
group_name = utils.utf8(group_name)
if CONF.ec2_strict_validation:
# EC2 specification gives constraints for name and description:
# Accepts alphanumeric characters, spaces, dashes, and underscores
allowed = '^[a-zA-Z0-9_\- ]+$'
self.security_group_api.validate_property(group_name, 'name',
allowed)
self.security_group_api.validate_property(group_description,
'description', allowed)
else:
# Amazon accepts more symbols.
# So, allow POSIX [:print:] characters.
allowed = r'^[\x20-\x7E]+$'
self.security_group_api.validate_property(group_name, 'name',
allowed)
group_ref = self.security_group_api.create_security_group(
context, group_name, group_description)
return {'securityGroupSet': [self._format_security_group(context,
group_ref)]}
def delete_security_group(self, context, group_name=None, group_id=None,
**kwargs):
if not group_name and not group_id:
err = _("need group_name or group_id")
raise exception.MissingParameter(reason=err)
security_group = self.security_group_api.get(context, group_name,
group_id)
extensions.check_compute_policy(context, 'security_groups',
security_group, 'compute_extension')
self.security_group_api.destroy(context, security_group)
return True
def get_password_data(self, context, instance_id, **kwargs):
# instance_id may be passed in as a list of instances
if isinstance(instance_id, list):
ec2_id = instance_id[0]
else:
ec2_id = instance_id
validate_instance_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid)
output = password.extract_password(instance)
# NOTE(vish): this should be timestamp from the metadata fields
# but it isn't important enough to implement properly
now = timeutils.utcnow()
return {"InstanceId": ec2_id,
"Timestamp": now,
"passwordData": output}
def get_console_output(self, context, instance_id, **kwargs):
LOG.info(_LI("Get console output for instance %s"), instance_id,
context=context)
# instance_id may be passed in as a list of instances
if isinstance(instance_id, list):
ec2_id = instance_id[0]
else:
ec2_id = instance_id
validate_instance_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
output = self.compute_api.get_console_output(context, instance)
now = timeutils.utcnow()
return {"InstanceId": ec2_id,
"Timestamp": now,
"output": base64.b64encode(output)}
def describe_volumes(self, context, volume_id=None, **kwargs):
if volume_id:
volumes = []
for ec2_id in volume_id:
validate_volume_id(ec2_id)
internal_id = ec2utils.ec2_vol_id_to_uuid(ec2_id)
volume = self.volume_api.get(context, internal_id)
volumes.append(volume)
else:
volumes = self.volume_api.get_all(context)
volumes = [self._format_volume(context, v) for v in volumes]
return {'volumeSet': volumes}
def _format_volume(self, context, volume):
valid_ec2_api_volume_status_map = {
'attaching': 'in-use',
'detaching': 'in-use'}
instance_ec2_id = None
if volume.get('instance_uuid', None):
instance_uuid = volume['instance_uuid']
# Make sure instance exists
objects.Instance.get_by_uuid(context.elevated(), instance_uuid)
instance_ec2_id = ec2utils.id_to_ec2_inst_id(instance_uuid)
v = {}
v['volumeId'] = ec2utils.id_to_ec2_vol_id(volume['id'])
v['status'] = valid_ec2_api_volume_status_map.get(volume['status'],
volume['status'])
v['size'] = volume['size']
v['availabilityZone'] = volume['availability_zone']
v['createTime'] = volume['created_at']
if v['status'] == 'in-use':
v['attachmentSet'] = [{'attachTime': volume.get('attach_time'),
'deleteOnTermination': False,
'device': volume['mountpoint'],
'instanceId': instance_ec2_id,
'status': self._get_volume_attach_status(
volume),
'volumeId': v['volumeId']}]
else:
v['attachmentSet'] = [{}]
if volume.get('snapshot_id') is not None:
v['snapshotId'] = ec2utils.id_to_ec2_snap_id(volume['snapshot_id'])
else:
v['snapshotId'] = None
return v
def create_volume(self, context, **kwargs):
snapshot_ec2id = kwargs.get('snapshot_id', None)
if snapshot_ec2id is not None:
snapshot_id = ec2utils.ec2_snap_id_to_uuid(kwargs['snapshot_id'])
snapshot = self.volume_api.get_snapshot(context, snapshot_id)
LOG.info(_LI("Create volume from snapshot %s"), snapshot_ec2id,
context=context)
else:
snapshot = None
LOG.info(_LI("Create volume of %s GB"),
kwargs.get('size'),
context=context)
create_kwargs = dict(snapshot=snapshot,
volume_type=kwargs.get('volume_type'),
metadata=kwargs.get('metadata'),
availability_zone=kwargs.get('availability_zone'))
volume = self.volume_api.create(context,
kwargs.get('size'),
kwargs.get('name'),
kwargs.get('description'),
**create_kwargs)
vmap = objects.EC2VolumeMapping(context)
vmap.uuid = volume['id']
vmap.create()
# TODO(vish): Instance should be None at db layer instead of
# trying to lazy load, but for now we turn it into
# a dict to avoid an error.
return self._format_volume(context, dict(volume))
def delete_volume(self, context, volume_id, **kwargs):
validate_volume_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
self.volume_api.delete(context, volume_id)
return True
def attach_volume(self, context,
volume_id,
instance_id,
device, **kwargs):
validate_instance_id(instance_id)
validate_volume_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
LOG.info(_LI('Attach volume %(volume_id)s to instance %(instance_id)s '
'at %(device)s'),
{'volume_id': volume_id,
'instance_id': instance_id,
'device': device},
context=context)
self.compute_api.attach_volume(context, instance, volume_id, device)
volume = self.volume_api.get(context, volume_id)
ec2_attach_status = ec2utils.status_to_ec2_attach_status(volume)
return {'attachTime': volume['attach_time'],
'device': volume['mountpoint'],
'instanceId': ec2utils.id_to_ec2_inst_id(instance_uuid),
'requestId': context.request_id,
'status': ec2_attach_status,
'volumeId': ec2utils.id_to_ec2_vol_id(volume_id)}
def _get_instance_from_volume(self, context, volume):
if volume.get('instance_uuid'):
try:
inst_uuid = volume['instance_uuid']
return objects.Instance.get_by_uuid(context, inst_uuid)
except exception.InstanceNotFound:
pass
raise exception.VolumeUnattached(volume_id=volume['id'])
def detach_volume(self, context, volume_id, **kwargs):
validate_volume_id(volume_id)
volume_id = ec2utils.ec2_vol_id_to_uuid(volume_id)
LOG.info(_LI("Detach volume %s"), volume_id, context=context)
volume = self.volume_api.get(context, volume_id)
instance = self._get_instance_from_volume(context, volume)
self.compute_api.detach_volume(context, instance, volume)
resp_volume = self.volume_api.get(context, volume_id)
ec2_attach_status = ec2utils.status_to_ec2_attach_status(resp_volume)
return {'attachTime': volume['attach_time'],
'device': volume['mountpoint'],
'instanceId': ec2utils.id_to_ec2_inst_id(
volume['instance_uuid']),
'requestId': context.request_id,
'status': ec2_attach_status,
'volumeId': ec2utils.id_to_ec2_vol_id(volume_id)}
def _format_kernel_id(self, context, instance_ref, result, key):
kernel_uuid = instance_ref['kernel_id']
if kernel_uuid is None or kernel_uuid == '':
return
result[key] = ec2utils.glance_id_to_ec2_id(context, kernel_uuid, 'aki')
def _format_ramdisk_id(self, context, instance_ref, result, key):
ramdisk_uuid = instance_ref['ramdisk_id']
if ramdisk_uuid is None or ramdisk_uuid == '':
return
result[key] = ec2utils.glance_id_to_ec2_id(context, ramdisk_uuid,
'ari')
def describe_instance_attribute(self, context, instance_id, attribute,
**kwargs):
def _unsupported_attribute(instance, result):
raise exception.InvalidAttribute(attr=attribute)
def _format_attr_block_device_mapping(instance, result):
tmp = {}
self._format_instance_root_device_name(instance, tmp)
self._format_instance_bdm(context, instance.uuid,
tmp['rootDeviceName'], result)
def _format_attr_disable_api_termination(instance, result):
result['disableApiTermination'] = instance.disable_terminate
def _format_attr_group_set(instance, result):
CloudController._format_group_set(instance, result)
def _format_attr_instance_initiated_shutdown_behavior(instance,
result):
if instance.shutdown_terminate:
result['instanceInitiatedShutdownBehavior'] = 'terminate'
else:
result['instanceInitiatedShutdownBehavior'] = 'stop'
def _format_attr_instance_type(instance, result):
self._format_instance_type(instance, result)
def _format_attr_kernel(instance, result):
self._format_kernel_id(context, instance, result, 'kernel')
def _format_attr_ramdisk(instance, result):
self._format_ramdisk_id(context, instance, result, 'ramdisk')
def _format_attr_root_device_name(instance, result):
self._format_instance_root_device_name(instance, result)
def _format_attr_source_dest_check(instance, result):
_unsupported_attribute(instance, result)
def _format_attr_user_data(instance, result):
result['userData'] = base64.b64decode(instance.user_data)
attribute_formatter = {
'blockDeviceMapping': _format_attr_block_device_mapping,
'disableApiTermination': _format_attr_disable_api_termination,
'groupSet': _format_attr_group_set,
'instanceInitiatedShutdownBehavior':
_format_attr_instance_initiated_shutdown_behavior,
'instanceType': _format_attr_instance_type,
'kernel': _format_attr_kernel,
'ramdisk': _format_attr_ramdisk,
'rootDeviceName': _format_attr_root_device_name,
'sourceDestCheck': _format_attr_source_dest_check,
'userData': _format_attr_user_data,
}
fn = attribute_formatter.get(attribute)
if fn is None:
raise exception.InvalidAttribute(attr=attribute)
validate_instance_id(instance_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
result = {'instance_id': instance_id}
fn(instance, result)
return result
def describe_instances(self, context, **kwargs):
# Optional DescribeInstances argument
instance_id = kwargs.get('instance_id', None)
filters = kwargs.get('filter', None)
instances = self._enforce_valid_instance_ids(context, instance_id)
return self._format_describe_instances(context,
instance_id=instance_id,
instance_cache=instances,
filter=filters)
def describe_instances_v6(self, context, **kwargs):
# Optional DescribeInstancesV6 argument
instance_id = kwargs.get('instance_id', None)
filters = kwargs.get('filter', None)
instances = self._enforce_valid_instance_ids(context, instance_id)
return self._format_describe_instances(context,
instance_id=instance_id,
instance_cache=instances,
filter=filters,
use_v6=True)
def _format_describe_instances(self, context, **kwargs):
return {'reservationSet': self._format_instances(context, **kwargs)}
def _format_run_instances(self, context, reservation_id):
i = self._format_instances(context, reservation_id=reservation_id)
assert len(i) == 1
return i[0]
def _format_terminate_instances(self, context, instance_id,
previous_states):
instances_set = []
for (ec2_id, previous_state) in zip(instance_id, previous_states):
i = {}
i['instanceId'] = ec2_id
i['previousState'] = _state_description(previous_state['vm_state'],
previous_state['shutdown_terminate'])
try:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
i['currentState'] = _state_description(instance.vm_state,
instance.shutdown_terminate)
except exception.NotFound:
i['currentState'] = _state_description(
inst_state.SHUTTING_DOWN, True)
instances_set.append(i)
return {'instancesSet': instances_set}
def _format_stop_instances(self, context, instance_ids, previous_states):
instances_set = []
for (ec2_id, previous_state) in zip(instance_ids, previous_states):
i = {}
i['instanceId'] = ec2_id
i['previousState'] = _state_description(previous_state['vm_state'],
previous_state['shutdown_terminate'])
i['currentState'] = _state_description(inst_state.STOPPING, True)
instances_set.append(i)
return {'instancesSet': instances_set}
def _format_start_instances(self, context, instance_id, previous_states):
instances_set = []
for (ec2_id, previous_state) in zip(instance_id, previous_states):
i = {}
i['instanceId'] = ec2_id
i['previousState'] = _state_description(previous_state['vm_state'],
previous_state['shutdown_terminate'])
i['currentState'] = _state_description(None, True)
instances_set.append(i)
return {'instancesSet': instances_set}
def _format_instance_bdm(self, context, instance_uuid, root_device_name,
result):
"""Format InstanceBlockDeviceMappingResponseItemType."""
root_device_type = 'instance-store'
root_device_short_name = block_device.strip_dev(root_device_name)
if root_device_name == root_device_short_name:
root_device_name = block_device.prepend_dev(root_device_name)
mapping = []
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance_uuid)
for bdm in bdms:
volume_id = bdm.volume_id
if volume_id is None or bdm.no_device:
continue
if (bdm.is_volume and
(bdm.device_name == root_device_name or
bdm.device_name == root_device_short_name)):
root_device_type = 'ebs'
vol = self.volume_api.get(context, volume_id)
LOG.debug("vol = %s\n", vol)
# TODO(yamahata): volume attach time
ebs = {'volumeId': ec2utils.id_to_ec2_vol_id(volume_id),
'deleteOnTermination': bdm.delete_on_termination,
'attachTime': vol['attach_time'] or '',
'status': self._get_volume_attach_status(vol), }
res = {'deviceName': bdm.device_name,
'ebs': ebs, }
mapping.append(res)
if mapping:
result['blockDeviceMapping'] = mapping
result['rootDeviceType'] = root_device_type
@staticmethod
def _get_volume_attach_status(volume):
return (volume['status']
if volume['status'] in ('attaching', 'detaching') else
volume['attach_status'])
@staticmethod
def _format_instance_root_device_name(instance, result):
result['rootDeviceName'] = (instance.get('root_device_name') or
block_device.DEFAULT_ROOT_DEV_NAME)
@staticmethod
def _format_instance_type(instance, result):
flavor = instance.get_flavor()
result['instanceType'] = flavor.name
@staticmethod
def _format_group_set(instance, result):
security_group_names = []
if instance.get('security_groups'):
for security_group in instance.security_groups:
security_group_names.append(security_group['name'])
result['groupSet'] = utils.convert_to_list_dict(
security_group_names, 'groupId')
def _format_instances(self, context, instance_id=None, use_v6=False,
instances_cache=None, **search_opts):
# TODO(termie): this method is poorly named as its name does not imply
# that it will be making a variety of database calls
# rather than simply formatting a bunch of instances that
# were handed to it
reservations = {}
if not instances_cache:
instances_cache = {}
# NOTE(vish): instance_id is an optional list of ids to filter by
if instance_id:
instances = []
for ec2_id in instance_id:
if ec2_id in instances_cache:
instances.append(instances_cache[ec2_id])
else:
try:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context,
ec2_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
except exception.NotFound:
continue
instances.append(instance)
else:
try:
# always filter out deleted instances
search_opts['deleted'] = False
instances = self.compute_api.get_all(context,
search_opts=search_opts,
sort_keys=['created_at'],
sort_dirs=['asc'],
want_objects=True)
except exception.NotFound:
instances = []
for instance in instances:
if not context.is_admin:
if pipelib.is_vpn_image(instance.image_ref):
continue
i = {}
instance_uuid = instance.uuid
ec2_id = ec2utils.id_to_ec2_inst_id(instance_uuid)
i['instanceId'] = ec2_id
image_uuid = instance.image_ref
i['imageId'] = ec2utils.glance_id_to_ec2_id(context, image_uuid)
self._format_kernel_id(context, instance, i, 'kernelId')
self._format_ramdisk_id(context, instance, i, 'ramdiskId')
i['instanceState'] = _state_description(
instance.vm_state, instance.shutdown_terminate)
fixed_ip = None
floating_ip = None
ip_info = ec2utils.get_ip_info_for_instance(context, instance)
if ip_info['fixed_ips']:
fixed_ip = ip_info['fixed_ips'][0]
if ip_info['floating_ips']:
floating_ip = ip_info['floating_ips'][0]
if ip_info['fixed_ip6s']:
i['dnsNameV6'] = ip_info['fixed_ip6s'][0]
if CONF.ec2_private_dns_show_ip:
i['privateDnsName'] = fixed_ip
else:
i['privateDnsName'] = instance.hostname
i['privateIpAddress'] = fixed_ip
if floating_ip is not None:
i['ipAddress'] = floating_ip
i['dnsName'] = floating_ip
i['keyName'] = instance.key_name
i['tagSet'] = []
for k, v in six.iteritems(utils.instance_meta(instance)):
i['tagSet'].append({'key': k, 'value': v})
client_token = self._get_client_token(context, instance_uuid)
if client_token:
i['clientToken'] = client_token
if context.is_admin:
i['keyName'] = '%s (%s, %s)' % (i['keyName'],
instance.project_id,
instance.host)
i['productCodesSet'] = utils.convert_to_list_dict([],
'product_codes')
self._format_instance_type(instance, i)
i['launchTime'] = instance.created_at
i['amiLaunchIndex'] = instance.launch_index
self._format_instance_root_device_name(instance, i)
self._format_instance_bdm(context, instance.uuid,
i['rootDeviceName'], i)
zone = availability_zones.get_instance_availability_zone(context,
instance)
i['placement'] = {'availabilityZone': zone}
if instance.reservation_id not in reservations:
r = {}
r['reservationId'] = instance.reservation_id
r['ownerId'] = instance.project_id
self._format_group_set(instance, r)
r['instancesSet'] = []
reservations[instance.reservation_id] = r
reservations[instance.reservation_id]['instancesSet'].append(i)
return list(reservations.values())
def describe_addresses(self, context, public_ip=None, **kwargs):
if public_ip:
floatings = []
for address in public_ip:
floating = self.network_api.get_floating_ip_by_address(context,
address)
floatings.append(floating)
else:
floatings = self.network_api.get_floating_ips_by_project(context)
addresses = [self._format_address(context, f) for f in floatings]
return {'addressesSet': addresses}
def _format_address(self, context, floating_ip):
ec2_id = None
if floating_ip['fixed_ip_id']:
if utils.is_neutron():
fixed_vm_uuid = floating_ip['instance']['uuid']
if fixed_vm_uuid is not None:
ec2_id = ec2utils.id_to_ec2_inst_id(fixed_vm_uuid)
else:
fixed_id = floating_ip['fixed_ip_id']
fixed = self.network_api.get_fixed_ip(context, fixed_id)
if fixed['instance_uuid'] is not None:
ec2_id = ec2utils.id_to_ec2_inst_id(fixed['instance_uuid'])
address = {'public_ip': floating_ip['address'],
'instance_id': ec2_id}
if context.is_admin:
details = "%s (%s)" % (address['instance_id'],
floating_ip['project_id'])
address['instance_id'] = details
return address
def allocate_address(self, context, **kwargs):
LOG.info(_LI("Allocate address"), context=context)
public_ip = self.network_api.allocate_floating_ip(context)
return {'publicIp': public_ip}
def release_address(self, context, public_ip, **kwargs):
LOG.info(_LI('Release address %s'), public_ip, context=context)
self.network_api.release_floating_ip(context, address=public_ip)
return {'return': "true"}
def associate_address(self, context, instance_id, public_ip, **kwargs):
LOG.info(_LI("Associate address %(public_ip)s to instance "
"%(instance_id)s"),
{'public_ip': public_ip, 'instance_id': instance_id},
context=context)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, instance_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
cached_ipinfo = ec2utils.get_ip_info_for_instance(context, instance)
fixed_ips = cached_ipinfo['fixed_ips'] + cached_ipinfo['fixed_ip6s']
if not fixed_ips:
msg = _('Unable to associate IP Address, no fixed_ips.')
raise exception.NoMoreFixedIps(message=msg)
# TODO(tr3buchet): this will associate the floating IP with the
# first fixed_ip an instance has. This should be
# changed to support specifying a particular fixed_ip if
# multiple exist but this may not apply to ec2..
if len(fixed_ips) > 1:
LOG.warning(_LW('multiple fixed_ips exist, using the first: %s'),
fixed_ips[0])
self.network_api.associate_floating_ip(context, instance,
floating_address=public_ip,
fixed_address=fixed_ips[0])
return {'return': 'true'}
def disassociate_address(self, context, public_ip, **kwargs):
instance_id = self.network_api.get_instance_id_by_floating_address(
context, public_ip)
if instance_id:
instance = self.compute_api.get(context, instance_id,
want_objects=True)
LOG.info(_LI("Disassociate address %s"),
public_ip, context=context)
self.network_api.disassociate_floating_ip(context, instance,
address=public_ip)
else:
msg = _('Floating ip is not associated.')
raise exception.InvalidAssociation(message=msg)
return {'return': "true"}
def run_instances(self, context, **kwargs):
min_count = int(kwargs.get('min_count', 1))
max_count = int(kwargs.get('max_count', min_count))
try:
min_count = utils.validate_integer(
min_count, "min_count", min_value=1)
max_count = utils.validate_integer(
max_count, "max_count", min_value=1)
except exception.InvalidInput as e:
raise exception.InvalidInput(message=e.format_message())
if min_count > max_count:
msg = _('min_count must be <= max_count')
raise exception.InvalidInput(message=msg)
client_token = kwargs.get('client_token')
if client_token:
resv_id = self._resv_id_from_token(context, client_token)
if resv_id:
# since this client_token already corresponds to a reservation
# id, this returns a proper response without creating a new
# instance
return self._format_run_instances(context, resv_id)
if kwargs.get('kernel_id'):
kernel = self._get_image(context, kwargs['kernel_id'])
kwargs['kernel_id'] = ec2utils.id_to_glance_id(context,
kernel['id'])
if kwargs.get('ramdisk_id'):
ramdisk = self._get_image(context, kwargs['ramdisk_id'])
kwargs['ramdisk_id'] = ec2utils.id_to_glance_id(context,
ramdisk['id'])
for bdm in kwargs.get('block_device_mapping', []):
_parse_block_device_mapping(bdm)
image = self._get_image(context, kwargs['image_id'])
image_uuid = ec2utils.id_to_glance_id(context, image['id'])
if image:
image_state = self._get_image_state(image)
else:
raise exception.ImageNotFoundEC2(image_id=kwargs['image_id'])
if image_state != 'available':
msg = _('Image must be available')
raise exception.ImageNotActive(message=msg)
iisb = kwargs.get('instance_initiated_shutdown_behavior', 'stop')
shutdown_terminate = (iisb == 'terminate')
flavor = objects.Flavor.get_by_name(context,
kwargs.get('instance_type', None))
(instances, resv_id) = self.compute_api.create(context,
instance_type=flavor,
image_href=image_uuid,
max_count=int(kwargs.get('max_count', min_count)),
min_count=min_count,
kernel_id=kwargs.get('kernel_id'),
ramdisk_id=kwargs.get('ramdisk_id'),
key_name=kwargs.get('key_name'),
user_data=kwargs.get('user_data'),
security_group=kwargs.get('security_group'),
availability_zone=kwargs.get('placement', {}).get(
'availability_zone'),
block_device_mapping=kwargs.get('block_device_mapping', {}),
shutdown_terminate=shutdown_terminate)
instances = self._format_run_instances(context, resv_id)
if instances:
instance_ids = [i['instanceId'] for i in instances['instancesSet']]
self._add_client_token(context, client_token, instance_ids)
return instances
def _add_client_token(self, context, client_token, instance_ids):
"""Add client token to reservation ID mapping."""
if client_token:
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = objects.Instance.get_by_uuid(context,
instance_uuid, expected_attrs=['system_metadata'])
instance.system_metadata.update(
{'EC2_client_token': client_token})
instance.save()
def _get_client_token(self, context, instance_uuid):
"""Get client token for a given instance."""
instance = objects.Instance.get_by_uuid(context,
instance_uuid, expected_attrs=['system_metadata'])
return instance.system_metadata.get('EC2_client_token')
def _remove_client_token(self, context, instance_ids):
"""Remove client token to reservation ID mapping."""
for ec2_id in instance_ids:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = objects.Instance.get_by_uuid(context,
instance_uuid, expected_attrs=['system_metadata'])
instance.system_metadata.pop('EC2_client_token', None)
instance.save()
def _resv_id_from_token(self, context, client_token):
"""Get reservation ID from db."""
resv_id = None
sys_metas = self.compute_api.get_all_system_metadata(
context, search_filts=[{'key': ['EC2_client_token']},
{'value': [client_token]}])
for sys_meta in sys_metas:
if sys_meta and sys_meta.get('value') == client_token:
instance = objects.Instance.get_by_uuid(
context, sys_meta['instance_id'], expected_attrs=None)
resv_id = instance.get('reservation_id')
break
return resv_id
def _ec2_ids_to_instances(self, context, instance_id):
"""Get all instances first, to prevent partial executions."""
instances = []
extra = ['system_metadata', 'metadata', 'info_cache']
for ec2_id in instance_id:
validate_instance_id(ec2_id)
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = objects.Instance.get_by_uuid(
context, instance_uuid, expected_attrs=extra)
instances.append(instance)
return instances
def terminate_instances(self, context, instance_id, **kwargs):
"""Terminate each instance in instance_id, which is a list of ec2 ids.
instance_id is a kwarg so its name cannot be modified.
"""
previous_states = self._ec2_ids_to_instances(context, instance_id)
self._remove_client_token(context, instance_id)
LOG.debug("Going to start terminating instances")
for instance in previous_states:
self.compute_api.delete(context, instance)
return self._format_terminate_instances(context,
instance_id,
previous_states)
def reboot_instances(self, context, instance_id, **kwargs):
"""instance_id is a list of instance ids."""
instances = self._ec2_ids_to_instances(context, instance_id)
LOG.info(_LI("Reboot instance %r"), instance_id, context=context)
for instance in instances:
self.compute_api.reboot(context, instance, 'HARD')
return True
def stop_instances(self, context, instance_id, **kwargs):
"""Stop each instances in instance_id.
Here instance_id is a list of instance ids
"""
instances = self._ec2_ids_to_instances(context, instance_id)
LOG.debug("Going to stop instances")
for instance in instances:
extensions.check_compute_policy(context, 'stop', instance)
self.compute_api.stop(context, instance)
return self._format_stop_instances(context, instance_id,
instances)
def start_instances(self, context, instance_id, **kwargs):
"""Start each instances in instance_id.
Here instance_id is a list of instance ids
"""
instances = self._ec2_ids_to_instances(context, instance_id)
LOG.debug("Going to start instances")
for instance in instances:
extensions.check_compute_policy(context, 'start', instance)
self.compute_api.start(context, instance)
return self._format_start_instances(context, instance_id,
instances)
def _get_image(self, context, ec2_id):
try:
internal_id = ec2utils.ec2_id_to_id(ec2_id)
image = self.image_service.show(context, internal_id)
except (exception.InvalidEc2Id, exception.ImageNotFound):
filters = {'name': ec2_id}
images = self.image_service.detail(context, filters=filters)
try:
return images[0]
except IndexError:
raise exception.ImageNotFound(image_id=ec2_id)
image_type = ec2_id.split('-')[0]
if ec2utils.image_type(image.get('container_format')) != image_type:
raise exception.ImageNotFound(image_id=ec2_id)
return image
def _format_image(self, image):
"""Convert from format defined by GlanceImageService to S3 format."""
i = {}
image_type = ec2utils.image_type(image.get('container_format'))
ec2_id = ec2utils.image_ec2_id(image.get('id'), image_type)
name = image.get('name')
i['imageId'] = ec2_id
kernel_id = image['properties'].get('kernel_id')
if kernel_id:
i['kernelId'] = ec2utils.image_ec2_id(kernel_id, 'aki')
ramdisk_id = image['properties'].get('ramdisk_id')
if ramdisk_id:
i['ramdiskId'] = ec2utils.image_ec2_id(ramdisk_id, 'ari')
i['imageOwnerId'] = image.get('owner')
img_loc = image['properties'].get('image_location')
if img_loc:
i['imageLocation'] = img_loc
else:
i['imageLocation'] = "%s (%s)" % (img_loc, name)
i['name'] = name
if not name and img_loc:
# This should only occur for images registered with ec2 api
# prior to that api populating the glance name
i['name'] = img_loc
i['imageState'] = self._get_image_state(image)
i['description'] = image.get('description')
display_mapping = {'aki': 'kernel',
'ari': 'ramdisk',
'ami': 'machine'}
i['imageType'] = display_mapping.get(image_type)
i['isPublic'] = not not image.get('is_public')
i['architecture'] = image['properties'].get('architecture')
properties = image['properties']
root_device_name = block_device.properties_root_device_name(properties)
root_device_type = 'instance-store'
for bdm in properties.get('block_device_mapping', []):
if (block_device.strip_dev(bdm.get('device_name')) ==
block_device.strip_dev(root_device_name) and
('snapshot_id' in bdm or 'volume_id' in bdm) and
not bdm.get('no_device')):
root_device_type = 'ebs'
i['rootDeviceName'] = (root_device_name or
block_device.DEFAULT_ROOT_DEV_NAME)
i['rootDeviceType'] = root_device_type
_format_mappings(properties, i)
return i
def describe_images(self, context, image_id=None, **kwargs):
# NOTE: image_id is a list!
if image_id:
images = []
for ec2_id in image_id:
try:
image = self._get_image(context, ec2_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=ec2_id)
images.append(image)
else:
images = self.image_service.detail(context)
images = [self._format_image(i) for i in images]
return {'imagesSet': images}
def deregister_image(self, context, image_id, **kwargs):
LOG.info(_LI("De-registering image %s"), image_id, context=context)
image = self._get_image(context, image_id)
internal_id = image['id']
self.image_service.delete(context, internal_id)
return True
def _register_image(self, context, metadata):
image = self.image_service.create(context, metadata)
image_type = ec2utils.image_type(image.get('container_format'))
image_id = ec2utils.image_ec2_id(image['id'], image_type)
return image_id
def register_image(self, context, image_location=None, **kwargs):
if image_location is None and kwargs.get('name'):
image_location = kwargs['name']
if image_location is None:
msg = _('imageLocation is required')
raise exception.MissingParameter(reason=msg)
metadata = {'properties': {'image_location': image_location}}
if kwargs.get('name'):
metadata['name'] = kwargs['name']
else:
metadata['name'] = image_location
if 'root_device_name' in kwargs:
metadata['properties']['root_device_name'] = kwargs.get(
'root_device_name')
mappings = [_parse_block_device_mapping(bdm) for bdm in
kwargs.get('block_device_mapping', [])]
if mappings:
metadata['properties']['block_device_mapping'] = mappings
image_id = self._register_image(context, metadata)
LOG.info(_LI('Registered image %(image_location)s with id '
'%(image_id)s'),
{'image_location': image_location, 'image_id': image_id},
context=context)
return {'imageId': image_id}
def describe_image_attribute(self, context, image_id, attribute, **kwargs):
def _block_device_mapping_attribute(image, result):
_format_mappings(image['properties'], result)
def _launch_permission_attribute(image, result):
result['launchPermission'] = []
if image['is_public']:
result['launchPermission'].append({'group': 'all'})
def _root_device_name_attribute(image, result):
_prop_root_dev_name = block_device.properties_root_device_name
result['rootDeviceName'] = _prop_root_dev_name(image['properties'])
if result['rootDeviceName'] is None:
result['rootDeviceName'] = block_device.DEFAULT_ROOT_DEV_NAME
def _kernel_attribute(image, result):
kernel_id = image['properties'].get('kernel_id')
if kernel_id:
result['kernel'] = {
'value': ec2utils.image_ec2_id(kernel_id, 'aki')
}
def _ramdisk_attribute(image, result):
ramdisk_id = image['properties'].get('ramdisk_id')
if ramdisk_id:
result['ramdisk'] = {
'value': ec2utils.image_ec2_id(ramdisk_id, 'ari')
}
supported_attributes = {
'blockDeviceMapping': _block_device_mapping_attribute,
'launchPermission': _launch_permission_attribute,
'rootDeviceName': _root_device_name_attribute,
'kernel': _kernel_attribute,
'ramdisk': _ramdisk_attribute,
}
fn = supported_attributes.get(attribute)
if fn is None:
raise exception.InvalidAttribute(attr=attribute)
try:
image = self._get_image(context, image_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=image_id)
result = {'imageId': image_id}
fn(image, result)
return result
def modify_image_attribute(self, context, image_id, attribute,
operation_type, **kwargs):
# TODO(devcamcar): Support users and groups other than 'all'.
if attribute != 'launchPermission':
raise exception.InvalidAttribute(attr=attribute)
if 'user_group' not in kwargs:
msg = _('user or group not specified')
raise exception.MissingParameter(reason=msg)
if len(kwargs['user_group']) != 1 and kwargs['user_group'][0] != 'all':
msg = _('only group "all" is supported')
raise exception.InvalidParameterValue(message=msg)
if operation_type not in ['add', 'remove']:
msg = _('operation_type must be add or remove')
raise exception.InvalidParameterValue(message=msg)
LOG.info(_LI("Updating image %s publicity"), image_id, context=context)
try:
image = self._get_image(context, image_id)
except exception.NotFound:
raise exception.ImageNotFound(image_id=image_id)
internal_id = image['id']
del(image['id'])
image['is_public'] = (operation_type == 'add')
try:
return self.image_service.update(context, internal_id, image)
except exception.ImageNotAuthorized:
msg = _('Not allowed to modify attributes for image %s') % image_id
raise exception.Forbidden(message=msg)
def update_image(self, context, image_id, **kwargs):
internal_id = ec2utils.ec2_id_to_id(image_id)
result = self.image_service.update(context, internal_id, dict(kwargs))
return result
# TODO(yamahata): race condition
# At the moment there is no way to prevent others from
# manipulating instances/volumes/snapshots.
# As other code doesn't take it into consideration, here we don't
# care of it for now. Ostrich algorithm
# TODO(mriedem): Consider auto-locking the instance when stopping it and
# doing the snapshot, then unlock it when that is done. Locking the
# instance in the database would prevent other APIs from changing the state
# of the instance during this operation for non-admin users.
def create_image(self, context, instance_id, **kwargs):
# NOTE(yamahata): name/description are ignored by register_image(),
# do so here
no_reboot = kwargs.get('no_reboot', False)
name = kwargs.get('name')
validate_instance_id(instance_id)
ec2_instance_id = instance_id
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_instance_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
# CreateImage only supported for the analogue of EBS-backed instances
if not self.compute_api.is_volume_backed_instance(context, instance):
msg = _("Invalid value '%(ec2_instance_id)s' for instanceId. "
"Instance does not have a volume attached at root "
"(%(root)s)") % {'root': instance.root_device_name,
'ec2_instance_id': ec2_instance_id}
raise exception.InvalidParameterValue(err=msg)
# stop the instance if necessary
restart_instance = False
if not no_reboot:
vm_state = instance.vm_state
# if the instance is in subtle state, refuse to proceed.
if vm_state not in (vm_states.ACTIVE, vm_states.STOPPED):
raise exception.InstanceNotRunning(instance_id=ec2_instance_id)
if vm_state == vm_states.ACTIVE:
restart_instance = True
# NOTE(mriedem): We do a call here so that we're sure the
# stop request is complete before we begin polling the state.
self.compute_api.stop(context, instance, do_cast=False)
# wait instance for really stopped (and not transitioning tasks)
start_time = time.time()
while (vm_state != vm_states.STOPPED and
instance.task_state is not None):
time.sleep(1)
instance.refresh()
vm_state = instance.vm_state
# NOTE(yamahata): timeout and error. 1 hour for now for safety.
# Is it too short/long?
# Or is there any better way?
timeout = 1 * 60 * 60
if time.time() > start_time + timeout:
err = (_("Couldn't stop instance %(instance)s within "
"1 hour. Current vm_state: %(vm_state)s, "
"current task_state: %(task_state)s") %
{'instance': instance_uuid,
'vm_state': vm_state,
'task_state': instance.task_state})
raise exception.InternalError(message=err)
# meaningful image name
name_map = dict(instance=instance_uuid, now=timeutils.isotime())
name = name or _('image of %(instance)s at %(now)s') % name_map
new_image = self.compute_api.snapshot_volume_backed(context,
instance,
name)
ec2_id = ec2utils.glance_id_to_ec2_id(context, new_image['id'])
if restart_instance:
self.compute_api.start(context, instance)
return {'imageId': ec2_id}
def create_tags(self, context, **kwargs):
"""Add tags to a resource
Returns True on success, error on failure.
:param context: context under which the method is called
"""
resources = kwargs.get('resource_id', None)
tags = kwargs.get('tag', None)
if resources is None or tags is None:
msg = _('resource_id and tag are required')
raise exception.MissingParameter(reason=msg)
if not isinstance(resources, (tuple, list, set)):
msg = _('Expecting a list of resources')
raise exception.InvalidParameterValue(message=msg)
for r in resources:
if ec2utils.resource_type_from_id(context, r) != 'instance':
msg = _('Only instances implemented')
raise exception.InvalidParameterValue(message=msg)
if not isinstance(tags, (tuple, list, set)):
msg = _('Expecting a list of tagSets')
raise exception.InvalidParameterValue(message=msg)
metadata = {}
for tag in tags:
if not isinstance(tag, dict):
err = _('Expecting tagSet to be key/value pairs')
raise exception.InvalidParameterValue(message=err)
key = tag.get('key', None)
val = tag.get('value', None)
if key is None or val is None:
err = _('Expecting both key and value to be set')
raise exception.InvalidParameterValue(message=err)
metadata[key] = val
for ec2_id in resources:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
self.compute_api.update_instance_metadata(context,
instance, metadata)
return True
def delete_tags(self, context, **kwargs):
"""Delete tags
Returns True on success, error on failure.
:param context: context under which the method is called
"""
resources = kwargs.get('resource_id', None)
tags = kwargs.get('tag', None)
if resources is None or tags is None:
msg = _('resource_id and tag are required')
raise exception.MissingParameter(reason=msg)
if not isinstance(resources, (tuple, list, set)):
msg = _('Expecting a list of resources')
raise exception.InvalidParameterValue(message=msg)
for r in resources:
if ec2utils.resource_type_from_id(context, r) != 'instance':
msg = _('Only instances implemented')
raise exception.InvalidParameterValue(message=msg)
if not isinstance(tags, (tuple, list, set)):
msg = _('Expecting a list of tagSets')
raise exception.InvalidParameterValue(message=msg)
for ec2_id in resources:
instance_uuid = ec2utils.ec2_inst_id_to_uuid(context, ec2_id)
instance = self.compute_api.get(context, instance_uuid,
want_objects=True)
for tag in tags:
if not isinstance(tag, dict):
msg = _('Expecting tagSet to be key/value pairs')
raise exception.InvalidParameterValue(message=msg)
key = tag.get('key', None)
if key is None:
msg = _('Expecting key to be set')
raise exception.InvalidParameterValue(message=msg)
self.compute_api.delete_instance_metadata(context,
instance, key)
return True
def describe_tags(self, context, **kwargs):
"""List tags
Returns a dict with a single key 'tagSet' on success, error on failure.
:param context: context under which the method is called
"""
filters = kwargs.get('filter', None)
search_filts = []
if filters:
for filter_block in filters:
key_name = filter_block.get('name', None)
val = filter_block.get('value', None)
if val:
if isinstance(val, dict):
val = val.values()
if not isinstance(val, (tuple, list, set)):
val = (val,)
if key_name:
search_block = {}
if key_name in ('resource_id', 'resource-id'):
search_block['resource_id'] = []
for res_id in val:
search_block['resource_id'].append(
ec2utils.ec2_inst_id_to_uuid(context, res_id))
elif key_name in ['key', 'value']:
search_block[key_name] = \
[ec2utils.regex_from_ec2_regex(v) for v in val]
elif key_name in ('resource_type', 'resource-type'):
for res_type in val:
if res_type != 'instance':
raise exception.InvalidParameterValue(
message=_('Only instances implemented'))
search_block[key_name] = 'instance'
if len(search_block.keys()) > 0:
search_filts.append(search_block)
ts = []
for tag in self.compute_api.get_all_instance_metadata(context,
search_filts):
ts.append({
'resource_id': ec2utils.id_to_ec2_inst_id(tag['instance_id']),
'resource_type': 'instance',
'key': tag['key'],
'value': tag['value']
})
return {"tagSet": ts}
class EC2SecurityGroupExceptions(object):
@staticmethod
def raise_invalid_property(msg):
raise exception.InvalidParameterValue(message=msg)
@staticmethod
def raise_group_already_exists(msg):
raise exception.SecurityGroupExists(message=msg)
@staticmethod
def raise_invalid_group(msg):
raise exception.InvalidGroup(reason=msg)
@staticmethod
def raise_invalid_cidr(cidr, decoding_exception=None):
if decoding_exception:
raise decoding_exception
else:
raise exception.InvalidParameterValue(message=_("Invalid CIDR"))
@staticmethod
def raise_over_quota(msg):
raise exception.SecurityGroupLimitExceeded(msg)
@staticmethod
def raise_not_found(msg):
pass
class CloudSecurityGroupNovaAPI(EC2SecurityGroupExceptions,
compute_api.SecurityGroupAPI):
pass
class CloudSecurityGroupNeutronAPI(EC2SecurityGroupExceptions,
neutron_driver.SecurityGroupAPI):
pass
def get_cloud_security_group_api():
if cfg.CONF.security_group_api.lower() == 'nova':
return CloudSecurityGroupNovaAPI()
elif openstack_driver.is_neutron_security_groups():
return CloudSecurityGroupNeutronAPI()
else:
raise NotImplementedError()
| apache-2.0 |
benschulz/servo | tests/wpt/web-platform-tests/tools/html5lib/html5lib/tests/test_encoding.py | 445 | 2228 | from __future__ import absolute_import, division, unicode_literals
import os
import unittest
try:
unittest.TestCase.assertEqual
except AttributeError:
unittest.TestCase.assertEqual = unittest.TestCase.assertEquals
from .support import get_data_files, TestData, test_dir, errorMessage
from html5lib import HTMLParser, inputstream
class Html5EncodingTestCase(unittest.TestCase):
def test_codec_name_a(self):
self.assertEqual(inputstream.codecName("utf-8"), "utf-8")
def test_codec_name_b(self):
self.assertEqual(inputstream.codecName("utf8"), "utf-8")
def test_codec_name_c(self):
self.assertEqual(inputstream.codecName(" utf8 "), "utf-8")
def test_codec_name_d(self):
self.assertEqual(inputstream.codecName("ISO_8859--1"), "windows-1252")
def runParserEncodingTest(data, encoding):
p = HTMLParser()
assert p.documentEncoding is None
p.parse(data, useChardet=False)
encoding = encoding.lower().decode("ascii")
assert encoding == p.documentEncoding, errorMessage(data, encoding, p.documentEncoding)
def runPreScanEncodingTest(data, encoding):
stream = inputstream.HTMLBinaryInputStream(data, chardet=False)
encoding = encoding.lower().decode("ascii")
# Very crude way to ignore irrelevant tests
if len(data) > stream.numBytesMeta:
return
assert encoding == stream.charEncoding[0], errorMessage(data, encoding, stream.charEncoding[0])
def test_encoding():
for filename in get_data_files("encoding"):
tests = TestData(filename, b"data", encoding=None)
for idx, test in enumerate(tests):
yield (runParserEncodingTest, test[b'data'], test[b'encoding'])
yield (runPreScanEncodingTest, test[b'data'], test[b'encoding'])
try:
try:
import charade # flake8: noqa
except ImportError:
import chardet # flake8: noqa
except ImportError:
print("charade/chardet not found, skipping chardet tests")
else:
def test_chardet():
with open(os.path.join(test_dir, "encoding" , "chardet", "test_big5.txt"), "rb") as fp:
encoding = inputstream.HTMLInputStream(fp.read()).charEncoding
assert encoding[0].lower() == "big5"
| mpl-2.0 |
toshywoshy/ansible | lib/ansible/plugins/lookup/random_choice.py | 157 | 1573 | # (c) 2013, Michael DeHaan <michael.dehaan@gmail.com>
# (c) 2017 Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: random_choice
author: Michael DeHaan <michael.dehaan@gmail.com>
version_added: "1.1"
short_description: return random element from list
description:
- The 'random_choice' feature can be used to pick something at random. While it's not a load balancer (there are modules for those),
it can somewhat be used as a poor man's load balancer in a MacGyver like situation.
- At a more basic level, they can be used to add chaos and excitement to otherwise predictable automation environments.
"""
EXAMPLES = """
- name: Magic 8 ball for MUDs
debug:
msg: "{{ item }}"
with_random_choice:
- "go through the door"
- "drink from the goblet"
- "press the red button"
- "do nothing"
"""
RETURN = """
_raw:
description:
- random item
"""
import random
from ansible.errors import AnsibleError
from ansible.module_utils._text import to_native
from ansible.plugins.lookup import LookupBase
class LookupModule(LookupBase):
def run(self, terms, inject=None, **kwargs):
ret = terms
if terms:
try:
ret = [random.choice(terms)]
except Exception as e:
raise AnsibleError("Unable to choose random term: %s" % to_native(e))
return ret
| gpl-3.0 |
RandallDW/Aruba_plugin | plugins/org.python.pydev.jython/Lib/distutils/unixccompiler.py | 90 | 12314 | """distutils.unixccompiler
Contains the UnixCCompiler class, a subclass of CCompiler that handles
the "typical" Unix-style command-line C compiler:
* macros defined with -Dname[=value]
* macros undefined with -Uname
* include search directories specified with -Idir
* libraries specified with -lllib
* library search directories specified with -Ldir
* compile handled by 'cc' (or similar) executable with -c option:
compiles .c to .o
* link static library handled by 'ar' command (possibly with 'ranlib')
* link shared library handled by 'cc -shared'
"""
__revision__ = "$Id$"
import os, sys, re
from types import StringType, NoneType
from distutils import sysconfig
from distutils.dep_util import newer
from distutils.ccompiler import \
CCompiler, gen_preprocess_options, gen_lib_options
from distutils.errors import \
DistutilsExecError, CompileError, LibError, LinkError
from distutils import log
if sys.platform == 'darwin':
import _osx_support
# XXX Things not currently handled:
# * optimization/debug/warning flags; we just use whatever's in Python's
# Makefile and live with it. Is this adequate? If not, we might
# have to have a bunch of subclasses GNUCCompiler, SGICCompiler,
# SunCCompiler, and I suspect down that road lies madness.
# * even if we don't know a warning flag from an optimization flag,
# we need some way for outsiders to feed preprocessor/compiler/linker
# flags in to us -- eg. a sysadmin might want to mandate certain flags
# via a site config file, or a user might want to set something for
# compiling this module distribution only via the setup.py command
# line, whatever. As long as these options come from something on the
# current system, they can be as system-dependent as they like, and we
# should just happily stuff them into the preprocessor/compiler/linker
# options and carry on.
class UnixCCompiler(CCompiler):
compiler_type = 'unix'
# These are used by CCompiler in two places: the constructor sets
# instance attributes 'preprocessor', 'compiler', etc. from them, and
# 'set_executable()' allows any of these to be set. The defaults here
# are pretty generic; they will probably have to be set by an outsider
# (eg. using information discovered by the sysconfig about building
# Python extensions).
executables = {'preprocessor' : None,
'compiler' : ["cc"],
'compiler_so' : ["cc"],
'compiler_cxx' : ["cc"],
'linker_so' : ["cc", "-shared"],
'linker_exe' : ["cc"],
'archiver' : ["ar", "-cr"],
'ranlib' : None,
}
if sys.platform[:6] == "darwin":
executables['ranlib'] = ["ranlib"]
# Needed for the filename generation methods provided by the base
# class, CCompiler. NB. whoever instantiates/uses a particular
# UnixCCompiler instance should set 'shared_lib_ext' -- we set a
# reasonable common default here, but it's not necessarily used on all
# Unices!
src_extensions = [".c",".C",".cc",".cxx",".cpp",".m"]
obj_extension = ".o"
static_lib_extension = ".a"
shared_lib_extension = ".so"
dylib_lib_extension = ".dylib"
static_lib_format = shared_lib_format = dylib_lib_format = "lib%s%s"
if sys.platform == "cygwin":
exe_extension = ".exe"
def preprocess(self, source,
output_file=None, macros=None, include_dirs=None,
extra_preargs=None, extra_postargs=None):
ignore, macros, include_dirs = \
self._fix_compile_args(None, macros, include_dirs)
pp_opts = gen_preprocess_options(macros, include_dirs)
pp_args = self.preprocessor + pp_opts
if output_file:
pp_args.extend(['-o', output_file])
if extra_preargs:
pp_args[:0] = extra_preargs
if extra_postargs:
pp_args.extend(extra_postargs)
pp_args.append(source)
# We need to preprocess: either we're being forced to, or we're
# generating output to stdout, or there's a target output file and
# the source file is newer than the target (or the target doesn't
# exist).
if self.force or output_file is None or newer(source, output_file):
if output_file:
self.mkpath(os.path.dirname(output_file))
try:
self.spawn(pp_args)
except DistutilsExecError, msg:
raise CompileError, msg
def _compile(self, obj, src, ext, cc_args, extra_postargs, pp_opts):
compiler_so = self.compiler_so
if sys.platform == 'darwin':
compiler_so = _osx_support.compiler_fixup(compiler_so,
cc_args + extra_postargs)
try:
self.spawn(compiler_so + cc_args + [src, '-o', obj] +
extra_postargs)
except DistutilsExecError, msg:
raise CompileError, msg
def create_static_lib(self, objects, output_libname,
output_dir=None, debug=0, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = \
self.library_filename(output_libname, output_dir=output_dir)
if self._need_link(objects, output_filename):
self.mkpath(os.path.dirname(output_filename))
self.spawn(self.archiver +
[output_filename] +
objects + self.objects)
# Not many Unices required ranlib anymore -- SunOS 4.x is, I
# think the only major Unix that does. Maybe we need some
# platform intelligence here to skip ranlib if it's not
# needed -- or maybe Python's configure script took care of
# it for us, hence the check for leading colon.
if self.ranlib:
try:
self.spawn(self.ranlib + [output_filename])
except DistutilsExecError, msg:
raise LibError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self, target_desc, objects,
output_filename, output_dir=None, libraries=None,
library_dirs=None, runtime_library_dirs=None,
export_symbols=None, debug=0, extra_preargs=None,
extra_postargs=None, build_temp=None, target_lang=None):
objects, output_dir = self._fix_object_args(objects, output_dir)
libraries, library_dirs, runtime_library_dirs = \
self._fix_lib_args(libraries, library_dirs, runtime_library_dirs)
lib_opts = gen_lib_options(self, library_dirs, runtime_library_dirs,
libraries)
if type(output_dir) not in (StringType, NoneType):
raise TypeError, "'output_dir' must be a string or None"
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ld_args = (objects + self.objects +
lib_opts + ['-o', output_filename])
if debug:
ld_args[:0] = ['-g']
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
if target_desc == CCompiler.EXECUTABLE:
linker = self.linker_exe[:]
else:
linker = self.linker_so[:]
if target_lang == "c++" and self.compiler_cxx:
# skip over environment variable settings if /usr/bin/env
# is used to set up the linker's environment.
# This is needed on OSX. Note: this assumes that the
# normal and C++ compiler have the same environment
# settings.
i = 0
if os.path.basename(linker[0]) == "env":
i = 1
while '=' in linker[i]:
i = i + 1
linker[i] = self.compiler_cxx[i]
if sys.platform == 'darwin':
linker = _osx_support.compiler_fixup(linker, ld_args)
self.spawn(linker + ld_args)
except DistutilsExecError, msg:
raise LinkError, msg
else:
log.debug("skipping %s (up-to-date)", output_filename)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "-L" + dir
def _is_gcc(self, compiler_name):
return "gcc" in compiler_name or "g++" in compiler_name
def runtime_library_dir_option(self, dir):
# XXX Hackish, at the very least. See Python bug #445902:
# http://sourceforge.net/tracker/index.php
# ?func=detail&aid=445902&group_id=5470&atid=105470
# Linkers on different platforms need different options to
# specify that directories need to be added to the list of
# directories searched for dependencies when a dynamic library
# is sought. GCC has to be told to pass the -R option through
# to the linker, whereas other compilers just know this.
# Other compilers may need something slightly different. At
# this time, there's no way to determine this information from
# the configuration data stored in the Python installation, so
# we use this hack.
compiler = os.path.basename(sysconfig.get_config_var("CC"))
if sys.platform[:6] == "darwin":
# MacOSX's linker doesn't understand the -R flag at all
return "-L" + dir
elif sys.platform[:5] == "hp-ux":
if self._is_gcc(compiler):
return ["-Wl,+s", "-L" + dir]
return ["+s", "-L" + dir]
elif sys.platform[:7] == "irix646" or sys.platform[:6] == "osf1V5":
return ["-rpath", dir]
elif self._is_gcc(compiler):
return "-Wl,-R" + dir
else:
return "-R" + dir
def library_option(self, lib):
return "-l" + lib
def find_library_file(self, dirs, lib, debug=0):
shared_f = self.library_filename(lib, lib_type='shared')
dylib_f = self.library_filename(lib, lib_type='dylib')
static_f = self.library_filename(lib, lib_type='static')
if sys.platform == 'darwin':
# On OSX users can specify an alternate SDK using
# '-isysroot', calculate the SDK root if it is specified
# (and use it further on)
cflags = sysconfig.get_config_var('CFLAGS')
m = re.search(r'-isysroot\s+(\S+)', cflags)
if m is None:
sysroot = '/'
else:
sysroot = m.group(1)
for dir in dirs:
shared = os.path.join(dir, shared_f)
dylib = os.path.join(dir, dylib_f)
static = os.path.join(dir, static_f)
if sys.platform == 'darwin' and (
dir.startswith('/System/') or (
dir.startswith('/usr/') and not dir.startswith('/usr/local/'))):
shared = os.path.join(sysroot, dir[1:], shared_f)
dylib = os.path.join(sysroot, dir[1:], dylib_f)
static = os.path.join(sysroot, dir[1:], static_f)
# We're second-guessing the linker here, with not much hard
# data to go on: GCC seems to prefer the shared library, so I'm
# assuming that *all* Unix C compilers do. And of course I'm
# ignoring even GCC's "-static" option. So sue me.
if os.path.exists(dylib):
return dylib
elif os.path.exists(shared):
return shared
elif os.path.exists(static):
return static
# Oops, didn't find it in *any* of 'dirs'
return None
| epl-1.0 |
stefwalter/sssd | src/config/SSSDConfig/ipachangeconf.py | 7 | 19190 | #
# ipachangeconf - configuration file manipulation classes and functions
# partially based on authconfig code
# Copyright (c) 1999-2007 Red Hat, Inc.
# Author: Simo Sorce <ssorce@redhat.com>
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 only
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import fcntl
import os
import string
import time
import shutil
import re
def openLocked(filename, perms, create = True):
fd = -1
flags = os.O_RDWR
if create:
flags = flags | os.O_CREAT
try:
fd = os.open(filename, flags, perms)
fcntl.lockf(fd, fcntl.LOCK_EX)
except OSError, (errno, strerr):
if fd != -1:
try:
os.close(fd)
except OSError:
pass
raise IOError(errno, strerr)
return os.fdopen(fd, "r+")
#TODO: add subsection as a concept
# (ex. REALM.NAME = { foo = x bar = y } )
#TODO: put section delimiters as separating element of the list
# so that we can process multiple sections in one go
#TODO: add a comment all but provided options as a section option
class IPAChangeConf:
def __init__(self, name):
self.progname = name
self.indent = ("","","")
self.assign = (" = ","=")
self.dassign = self.assign[0]
self.comment = ("#",)
self.dcomment = self.comment[0]
self.eol = ("\n",)
self.deol = self.eol[0]
self.sectnamdel = ("[","]")
self.subsectdel = ("{","}")
self.backup_suffix = ".ipabkp"
def setProgName(self, name):
self.progname = name
def setIndent(self, indent):
if type(indent) is tuple:
self.indent = indent
elif type(indent) is str:
self.indent = (indent, )
else:
raise ValueError, 'Indent must be a list of strings'
def setOptionAssignment(self, assign):
if type(assign) is tuple:
self.assign = assign
else:
self.assign = (assign, )
self.dassign = self.assign[0]
def setCommentPrefix(self, comment):
if type(comment) is tuple:
self.comment = comment
else:
self.comment = (comment, )
self.dcomment = self.comment[0]
def setEndLine(self, eol):
if type(eol) is tuple:
self.eol = eol
else:
self.eol = (eol, )
self.deol = self.eol[0]
def setSectionNameDelimiters(self, delims):
self.sectnamdel = delims
def setSubSectionDelimiters(self, delims):
self.subsectdel = delims
def matchComment(self, line):
for v in self.comment:
if line.lstrip().startswith(v):
return line.lstrip()[len(v):]
return False
def matchEmpty(self, line):
if line.strip() == "":
return True
return False
def matchSection(self, line):
cl = "".join(line.strip().split())
if len(self.sectnamdel) != 2:
return False
if not cl.startswith(self.sectnamdel[0]):
return False
if not cl.endswith(self.sectnamdel[1]):
return False
return cl[len(self.sectnamdel[0]):-len(self.sectnamdel[1])]
def matchSubSection(self, line):
if self.matchComment(line):
return False
parts = line.split(self.dassign, 1)
if len(parts) < 2:
return False
if parts[1].strip() == self.subsectdel[0]:
return parts[0].strip()
return False
def matchSubSectionEnd(self, line):
if self.matchComment(line):
return False
if line.strip() == self.subsectdel[1]:
return True
return False
def getSectionLine(self, section):
if len(self.sectnamdel) != 2:
return section
return self.sectnamdel[0]+section+self.sectnamdel[1]+self.deol
def dump(self, options, level=0):
output = ""
if level >= len(self.indent):
level = len(self.indent)-1
for o in options:
if o['type'] == "section":
output += self.sectnamdel[0]+o['name']+self.sectnamdel[1]+self.deol
output += self.dump(o['value'], level+1)
continue
if o['type'] == "subsection":
output += self.indent[level]+o['name']+self.dassign+self.subsectdel[0]+self.deol
output += self.dump(o['value'], level+1)
output += self.indent[level]+self.subsectdel[1]+self.deol
continue
if o['type'] == "option":
output += self.indent[level]+o['name']+self.dassign+o['value']+self.deol
continue
if o['type'] == "comment":
output += self.dcomment+o['value']+self.deol
continue
if o['type'] == "empty":
output += self.deol
continue
raise SyntaxError, 'Unknown type: ['+o['type']+']'
return output
def parseLine(self, line):
if self.matchEmpty(line):
return {'name':'empty', 'type':'empty'}
value = self.matchComment(line)
if value:
return {'name':'comment', 'type':'comment', 'value':value.rstrip()}
parts = line.split(self.dassign, 1)
if len(parts) < 2:
raise SyntaxError, 'Syntax Error: Unknown line format'
return {'name':parts[0].strip(), 'type':'option', 'value':parts[1].rstrip()}
def findOpts(self, opts, type, name, exclude_sections=False):
num = 0
for o in opts:
if o['type'] == type and o['name'] == name:
return (num, o)
if exclude_sections and (o['type'] == "section" or o['type'] == "subsection"):
return (num, None)
num += 1
return (num, None)
def commentOpts(self, inopts, level = 0):
opts = []
if level >= len(self.indent):
level = len(self.indent)-1
for o in inopts:
if o['type'] == 'section':
no = self.commentOpts(o['value'], level+1)
val = self.dcomment+self.sectnamdel[0]+o['name']+self.sectnamdel[1]
opts.append({'name':'comment', 'type':'comment', 'value':val})
for n in no:
opts.append(n)
continue
if o['type'] == 'subsection':
no = self.commentOpts(o['value'], level+1)
val = self.indent[level]+o['name']+self.dassign+self.subsectdel[0]
opts.append({'name':'comment', 'type':'comment', 'value':val})
for n in no:
opts.append(n)
val = self.indent[level]+self.subsectdel[1]
opts.append({'name':'comment', 'type':'comment', 'value':val})
continue
if o['type'] == 'option':
val = self.indent[level]+o['name']+self.dassign+o['value']
opts.append({'name':'comment', 'type':'comment', 'value':val})
continue
if o['type'] == 'comment':
opts.append(o)
continue
if o['type'] == 'empty':
opts.append({'name':'comment', 'type':'comment', 'value':''})
continue
raise SyntaxError, 'Unknown type: ['+o['type']+']'
return opts
def mergeOld(self, oldopts, newopts):
opts = []
for o in oldopts:
if o['type'] == "section" or o['type'] == "subsection":
(num, no) = self.findOpts(newopts, o['type'], o['name'])
if not no:
opts.append(o)
continue
if no['action'] == "set":
mo = self.mergeOld(o['value'], no['value'])
opts.append({'name':o['name'], 'type':o['type'], 'value':mo})
continue
if no['action'] == "comment":
co = self.commentOpts(o['value'])
for c in co:
opts.append(c)
continue
if no['action'] == "remove":
continue
raise SyntaxError, 'Unknown action: ['+no['action']+']'
if o['type'] == "comment" or o['type'] == "empty":
opts.append(o)
continue
if o['type'] == "option":
(num, no) = self.findOpts(newopts, 'option', o['name'], True)
if not no:
opts.append(o)
continue
if no['action'] == 'comment' or no['action'] == 'remove':
if no['value'] != None and o['value'] != no['value']:
opts.append(o)
continue
if no['action'] == 'comment':
opts.append({'name':'comment', 'type':'comment',
'value':self.dcomment+o['name']+self.dassign+o['value']})
continue
if no['action'] == 'set':
opts.append(no)
continue
raise SyntaxError, 'Unknown action: ['+o['action']+']'
raise SyntaxError, 'Unknown type: ['+o['type']+']'
return opts
def mergeNew(self, opts, newopts):
cline = 0
for no in newopts:
if no['type'] == "section" or no['type'] == "subsection":
(num, o) = self.findOpts(opts, no['type'], no['name'])
if not o:
if no['action'] == 'set':
opts.append(no)
continue
if no['action'] == "set":
self.mergeNew(o['value'], no['value'])
continue
cline = num+1
continue
if no['type'] == "option":
(num, o) = self.findOpts(opts, no['type'], no['name'], True)
if not o:
if no['action'] == 'set':
opts.append(no)
continue
cline = num+1
continue
if no['type'] == "comment" or no['type'] == "empty":
opts.insert(cline, no)
cline += 1
continue
raise SyntaxError, 'Unknown type: ['+no['type']+']'
def merge(self, oldopts, newopts):
#Use a two pass strategy
#First we create a new opts tree from oldopts removing/commenting
# the options as indicated by the contents of newopts
#Second we fill in the new opts tree with options as indicated
# in the newopts tree (this is becaus eentire (sub)sections may
# exist in the newopts that do not exist in oldopts)
opts = self.mergeOld(oldopts, newopts)
self.mergeNew(opts, newopts)
return opts
#TODO: Make parse() recursive?
def parse(self, f):
opts = []
sectopts = []
section = None
subsectopts = []
subsection = None
curopts = opts
fatheropts = opts
# Read in the old file.
for line in f:
# It's a section start.
value = self.matchSection(line)
if value:
if section is not None:
opts.append({'name':section, 'type':'section', 'value':sectopts})
sectopts = []
curopts = sectopts
fatheropts = sectopts
section = value
continue
# It's a subsection start.
value = self.matchSubSection(line)
if value:
if subsection is not None:
raise SyntaxError, 'nested subsections are not supported yet'
subsectopts = []
curopts = subsectopts
subsection = value
continue
value = self.matchSubSectionEnd(line)
if value:
if subsection is None:
raise SyntaxError, 'Unmatched end subsection terminator found'
fatheropts.append({'name':subsection, 'type':'subsection', 'value':subsectopts})
subsection = None
curopts = fatheropts
continue
# Copy anything else as is.
curopts.append(self.parseLine(line))
#Add last section if any
if len(sectopts) is not 0:
opts.append({'name':section, 'type':'section', 'value':sectopts})
return opts
# Write settings to configuration file
# file is a path
# options is a set of dictionaries in the form:
# [{'name': 'foo', 'value': 'bar', 'action': 'set/comment'}]
# section is a section name like 'global'
def changeConf(self, file, newopts):
autosection = False
savedsection = None
done = False
output = ""
f = None
try:
#Do not catch an unexisting file error, we want to fail in that case
shutil.copy2(file, file+self.backup_suffix)
f = openLocked(file, 0644)
oldopts = self.parse(f)
options = self.merge(oldopts, newopts)
output = self.dump(options)
# Write it out and close it.
f.seek(0)
f.truncate(0)
f.write(output)
finally:
try:
if f:
f.close()
except IOError:
pass
return True
# Write settings to new file, backup old
# file is a path
# options is a set of dictionaries in the form:
# [{'name': 'foo', 'value': 'bar', 'action': 'set/comment'}]
# section is a section name like 'global'
def newConf(self, file, options):
autosection = False
savedsection = None
done = False
output = ""
f = None
try:
try:
shutil.copy2(file, file+self.backup_suffix)
except IOError, err:
if err.errno == 2:
# The orign file did not exist
pass
f = openLocked(file, 0644)
# Trunkate
f.seek(0)
f.truncate(0)
output = self.dump(options)
f.write(output)
finally:
try:
if f:
f.close()
except IOError:
pass
return True
# A SSSD-specific subclass of IPAChangeConf
class SSSDChangeConf(IPAChangeConf):
OPTCRE = re.compile(
r'(?P<option>[^:=\s][^:=]*)' # very permissive!
r'\s*=\s*' # any number of space/tab,
# followed by separator
# followed by any # space/tab
r'(?P<value>.*)$' # everything up to eol
)
def __init__(self):
IPAChangeConf.__init__(self, "SSSD")
self.comment = ("#",";")
self.backup_suffix = ".bak"
self.opts = []
def parseLine(self, line):
"""
Overrides IPAChangeConf parseLine so that lines are splitted
using any separator in self.assign, not just the default one
"""
if self.matchEmpty(line):
return {'name':'empty', 'type':'empty'}
value = self.matchComment(line)
if value:
return {'name':'comment', 'type':'comment', 'value':value.rstrip()}
mo = self.OPTCRE.match(line)
if not mo:
raise SyntaxError, 'Syntax Error: Unknown line format'
try:
name, value = mo.group('option', 'value')
except IndexError:
raise SyntaxError, 'Syntax Error: Unknown line format'
return {'name':name.strip(), 'type':'option', 'value':value.strip()}
def readfp(self, fd):
self.opts.extend(self.parse(fd))
def read(self, filename):
fd = open(filename, 'r')
self.readfp(fd)
fd.close()
def get(self, section, name):
index, item = self.get_option_index(section, name)
if item:
return item['value']
def set(self, section, name, value):
modkw = { 'type' : 'section',
'name' : section,
'value' : [{
'type' : 'option',
'name' : name,
'value' : value,
'action': 'set',
}],
'action': 'set',
}
self.opts = self.merge(self.opts, [ modkw ])
def add_section(self, name, optkw, index=0):
optkw.append({'type':'empty', 'value':'empty'})
addkw = { 'type' : 'section',
'name' : name,
'value' : optkw,
}
self.opts.insert(index, addkw)
def delete_section(self, name):
self.delete_option('section', name)
def sections(self):
return [ o for o in self.opts if o['type'] == 'section' ]
def has_section(self, section):
return len([ o for o in self.opts if o['type'] == 'section' if o['name'] == section ]) > 0
def options(self, section):
for opt in self.opts:
if opt['type'] == 'section' and opt['name'] == section:
return opt['value']
def delete_option(self, type, name, exclude_sections=False):
return self.delete_option_subtree(self.opts, type, name)
def delete_option_subtree(self, subtree, type, name, exclude_sections=False):
index, item = self.findOpts(subtree, type, name, exclude_sections)
if item:
del subtree[index]
return index
def has_option(self, section, name):
index, item = self.get_option_index(section, name)
if index != -1 and item != None:
return True
return False
def strip_comments_empty(self, optlist):
retlist = []
for opt in optlist:
if opt['type'] in ('comment', 'empty'):
continue
retlist.append(opt)
return retlist
def get_option_index(self, parent_name, name, type='option'):
subtree = None
if parent_name:
pindex, pdata = self.findOpts(self.opts, 'section', parent_name)
if not pdata:
return (-1, None)
subtree = pdata['value']
else:
subtree = self.opts
return self.findOpts(subtree, type, name)
| gpl-3.0 |
SamuelDauzon/Improllow-up | users/views.py | 1 | 4768 | # coding:utf-8
import json
import datetime
import csv
from django.shortcuts import render
from django.contrib.auth import authenticate, login, logout
from django.http import HttpResponseRedirect, HttpResponse, JsonResponse
from django.core.urlresolvers import reverse
from django.shortcuts import get_object_or_404
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from django.db.models import Q, Count, Sum
from django.core import serializers
from tasks.models import Task
from .models import UserProfile
from .forms import FormConnection, TimeRangeForm
def connection(request):
"""
Cette view permet aux utilisateurs de se connecter
"""
form = FormConnection()
if request.POST:
form = FormConnection(request.POST)
if form.is_valid():
username = form.cleaned_data["username"]
password = form.cleaned_data["password"]
user = authenticate(username=username, password=password)
if user:
login(request, user)
if request.GET.get('next') is not None:
return HttpResponseRedirect(reverse(request.GET['next']))
else:
return HttpResponseRedirect(reverse('users:list'))
return render(request, 'users/connection.html', {'form' : form})
def logout_user(request):
logout(request)
return HttpResponseRedirect(reverse('login'))
def detail(request, pk):
userprofile = get_object_or_404(UserProfile, pk=pk)
task_list = Task.objects.filter(
Q(userprofile=userprofile) |
Q(user_add=userprofile)
)
paginator = Paginator(task_list, 10)
page = request.GET.get('page')
try:
task_list_page = paginator.page(page)
except PageNotAnInteger:
task_list_page = paginator.page(1)
except EmptyPage:
task_list_page = paginator.page(paginator.num_pages)
task_to_do = Task.objects.filter(
userprofile = userprofile,
execution_date__isnull = True
).order_by('-execution_date')[:10]
form = TimeRangeForm()
return render(
request,
'users/detail.html',
{
'userprofile' : userprofile,
'task_list_page' : task_list_page,
'task_to_do' : task_to_do,
'form' : form,
}
)
def repartition_task_base(userprofile, start, end):
data_list = Task.objects.filter(
userprofile=userprofile,
duration__gt=0,
execution_date__isnull=False
)
if start and end:
start_date = datetime.datetime.strptime(start, "%Y-%m-%d")
end_date = datetime.datetime.strptime(end, "%Y-%m-%d")
data_list = data_list.filter(
execution_date__gte = start,
execution_date__lte = end
)
return data_list
def repartition_project(request, pk, start=None, end=None):
userprofile = get_object_or_404(UserProfile, pk=pk)
data_list = repartition_task_base(userprofile, start, end)
data_list = data_list.values('project__name').annotate(duration_sum=Sum('duration')).order_by()
data_list = list(data_list)
return JsonResponse(json.dumps(data_list), safe=False)
def export_csv(request, pk, start=None, end=None):
userprofile = get_object_or_404(UserProfile, pk=pk)
start_date = datetime.datetime.strptime(start, "%Y-%m-%d")
end_date = datetime.datetime.strptime(end, "%Y-%m-%d")
data_list = Task.objects.filter(
userprofile=userprofile,
duration__gt=0,
execution_date__gte = start,
execution_date__lte = end
).order_by('execution_date')
response = HttpResponse(content_type='text/csv')
response['Content-Disposition'] = 'attachment; filename="'+str(userprofile)+'.csv"'
writer = csv.writer(response, delimiter=';')
field_names = ['Date', 'Projet', 'Tâche', 'Durée', 'Type']
writer.writerow(field_names)
for obj in data_list:
row_list = [
str(obj.execution_date),
str(obj.project),
obj.name,
str(obj.duration),
str(obj.task_type)
]
new_row_list = []
for i in row_list:
if i == 'None':
new_row_list.append('')
else:
new_row_list.append(i)
writer.writerow(new_row_list)
return response
def repartition_temps(request, pk, start=None, end=None):
userprofile = get_object_or_404(UserProfile, pk=pk)
data_list = repartition_task_base(userprofile, start, end)
data_list = data_list.values('task_type__name').annotate(duration_sum=Sum('duration')).order_by()
data_list = list(data_list)
return JsonResponse(json.dumps(data_list), safe=False)
| mit |
drjeep/django | django/contrib/gis/db/backends/oracle/schema.py | 608 | 4050 | from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.oracle.schema import DatabaseSchemaEditor
from django.db.backends.utils import truncate_name
class OracleGISSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_metadata = ("""
INSERT INTO USER_SDO_GEOM_METADATA
("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID")
VALUES (
%(table)s,
%(column)s,
MDSYS.SDO_DIM_ARRAY(
MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s),
MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s)
),
%(srid)s
)""")
sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX'
sql_drop_spatial_index = 'DROP INDEX %(index)s'
sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s'
sql_clear_geometry_field_metadata = (
'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s '
'AND COLUMN_NAME = %(column)s'
)
def __init__(self, *args, **kwargs):
super(OracleGISSchemaEditor, self).__init__(*args, **kwargs)
self.geometry_sql = []
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def column_sql(self, model, field, include_default=False):
column_sql = super(OracleGISSchemaEditor, self).column_sql(model, field, include_default)
if isinstance(field, GeometryField):
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_geometry_metadata % {
'table': self.geo_quote_name(db_table),
'column': self.geo_quote_name(field.column),
'dim0': field._extent[0],
'dim1': field._extent[1],
'dim2': field._extent[2],
'dim3': field._extent[3],
'tolerance': field._tolerance,
'srid': field.srid,
}
)
if field.spatial_index:
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
'table': self.quote_name(db_table),
'column': self.quote_name(field.column),
}
)
return column_sql
def create_model(self, model):
super(OracleGISSchemaEditor, self).create_model(model)
self.run_geometry_sql()
def delete_model(self, model):
super(OracleGISSchemaEditor, self).delete_model(model)
self.execute(self.sql_clear_geometry_table_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
})
def add_field(self, model, field):
super(OracleGISSchemaEditor, self).add_field(model, field)
self.run_geometry_sql()
def remove_field(self, model, field):
if isinstance(field, GeometryField):
self.execute(self.sql_clear_geometry_field_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
'column': self.geo_quote_name(field.column),
})
if field.spatial_index:
self.execute(self.sql_drop_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
})
super(OracleGISSchemaEditor, self).remove_field(model, field)
def run_geometry_sql(self):
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
def _create_spatial_index_name(self, model, field):
# Oracle doesn't allow object names > 30 characters. Use this scheme
# instead of self._create_index_name() for backwards compatibility.
return truncate_name('%s_%s_id' % (model._meta.db_table, field.column), 30)
| bsd-3-clause |
Fougere87/unsec | test_art.py | 1 | 2473 | import os
import glob
from unsec import Email
from unsec import Tools
from unsec import Collection
from unsec import Clustering
from sklearn import cluster
import matplotlib.pyplot as plt
import matplotlib as mpl
from mpl_toolkits.mplot3d import Axes3D
from sklearn import decomposition
# Tools.vectorize("data/bioinfo_2014-01")
# coll = Collection.Email_Collection("data/bioinfo_2014-01/*")
articles = glob.glob("dataset_test/*txt")
collec = []
art_list = []
for art in articles :
art_file = open(art, "r")
art_list.append(art)
collec.append(art_file.readlines())
# for e in Tools.words_in_collection(coll.all_cleaned_bodies) :
# print(e)
# input("plop")
# print(len(Tools.words_in_collection(coll.all_cleaned_body)))
# print(Tools.term_freq(coll[0]))
# print(Tools.invert_doc_freq(coll.all_cleaned_subjects))
# matrix_sub = Tools.vectorize_tf_idf(coll.all_cleaned_subjects) # create data matrix
# matrix_bod = Tools.vectorize_tf_idf(coll.all_cleaned_bodies)
coll2 =[]
for e in collec :
print(str(e))
coll2.append(Tools.clean(str(e)))
matrix_art = Tools.vectorize_tf_idf(coll2)
# matrixTest = [[4,2,3], [5,3,2], [12,42,54], [4,1,2], [91,87,7], [41,21,31], [51,13,67], [12,2,4], [4,1,2], [31,31,14]]
k_means = cluster.KMeans(n_clusters=2) #create k-mean objet with n clusters as param
print("K-mean fitting...")
k_means.fit(matrix_art)
print(k_means.labels_)
print("list des documents : ",Clustering.get_clustered_docs(k_means.labels_, art_list))
mat_dist_sub = k_means.fit_transform(matrix_art)
print(mat_dist_sub)
print(k_means.score(matrix_art))
# k_means.fit(matrix_bod)
# print(k_means.labels_)
pca = decomposition.PCA(n_components=3)
reduced_mat_bod = pca.fit(matrix_art).transform(matrix_art)
print(pca.components_)
Tools.matrix_to_csv(matrix_art, Tools.words_in_collection(coll2), "tfidf_art.csv")
# Tools.vectorize_to_csv(coll, "data.csv")
# clusters = Clustering.get_clustered_docs(k_means.labels_,coll.all_cleaned_subjects)
# [print(e) for e in clusters]
# clusters_files = Clustering.get_clustered_docs(k_means.labels_,coll.files_list)
# [print(e) for e in clusters_files]
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
x_data = [e[0] for e in reduced_mat_bod]
y_data = [e[1] for e in reduced_mat_bod]
z_data = [e[2] for e in reduced_mat_bod]
ax.scatter(x_data, y_data, z_data, depthshade=True)
plt.show()
# Clustering.kmeans(matrix, 3)
# print(Tools.vectorize_tf_idf(coll)[1])
#print(e.clean_body())
| unlicense |
bguillot/OpenUpgrade | addons/event/res_partner.py | 43 | 1265 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class res_partner(osv.osv):
_inherit = 'res.partner'
_columns = {
'speaker': fields.boolean('Speaker', help="Check this box if this contact is a speaker."),
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fyffyt/scikit-learn | sklearn/linear_model/base.py | 66 | 16933 | """
Generalized Linear models.
"""
# Author: Alexandre Gramfort <alexandre.gramfort@inria.fr>
# Fabian Pedregosa <fabian.pedregosa@inria.fr>
# Olivier Grisel <olivier.grisel@ensta.org>
# Vincent Michel <vincent.michel@inria.fr>
# Peter Prettenhofer <peter.prettenhofer@gmail.com>
# Mathieu Blondel <mathieu@mblondel.org>
# Lars Buitinck <L.J.Buitinck@uva.nl>
#
# License: BSD 3 clause
from __future__ import division
from abc import ABCMeta, abstractmethod
import numbers
import warnings
import numpy as np
import scipy.sparse as sp
from scipy import linalg
from scipy import sparse
from ..externals import six
from ..externals.joblib import Parallel, delayed
from ..base import BaseEstimator, ClassifierMixin, RegressorMixin
from ..utils import as_float_array, check_array, check_X_y, deprecated
from ..utils import check_random_state, column_or_1d
from ..utils.extmath import safe_sparse_dot
from ..utils.sparsefuncs import mean_variance_axis, inplace_column_scale
from ..utils.fixes import sparse_lsqr
from ..utils.validation import NotFittedError, check_is_fitted
from ..utils.seq_dataset import ArrayDataset, CSRDataset
###
### TODO: intercept for all models
### We should define a common function to center data instead of
### repeating the same code inside each fit method.
### TODO: bayesian_ridge_regression and bayesian_regression_ard
### should be squashed into its respective objects.
SPARSE_INTERCEPT_DECAY = 0.01
# For sparse data intercept updates are scaled by this decay factor to avoid
# intercept oscillation.
def make_dataset(X, y, sample_weight, random_state=None):
"""Create ``Dataset`` abstraction for sparse and dense inputs.
This also returns the ``intercept_decay`` which is different
for sparse datasets.
"""
rng = check_random_state(random_state)
# seed should never be 0 in SequentialDataset
seed = rng.randint(1, np.iinfo(np.int32).max)
if sp.issparse(X):
dataset = CSRDataset(X.data, X.indptr, X.indices,
y, sample_weight, seed=seed)
intercept_decay = SPARSE_INTERCEPT_DECAY
else:
dataset = ArrayDataset(X, y, sample_weight, seed=seed)
intercept_decay = 1.0
return dataset, intercept_decay
def sparse_center_data(X, y, fit_intercept, normalize=False):
"""
Compute information needed to center data to have mean zero along
axis 0. Be aware that X will not be centered since it would break
the sparsity, but will be normalized if asked so.
"""
if fit_intercept:
# we might require not to change the csr matrix sometimes
# store a copy if normalize is True.
# Change dtype to float64 since mean_variance_axis accepts
# it that way.
if sp.isspmatrix(X) and X.getformat() == 'csr':
X = sp.csr_matrix(X, copy=normalize, dtype=np.float64)
else:
X = sp.csc_matrix(X, copy=normalize, dtype=np.float64)
X_mean, X_var = mean_variance_axis(X, axis=0)
if normalize:
# transform variance to std in-place
# XXX: currently scaled to variance=n_samples to match center_data
X_var *= X.shape[0]
X_std = np.sqrt(X_var, X_var)
del X_var
X_std[X_std == 0] = 1
inplace_column_scale(X, 1. / X_std)
else:
X_std = np.ones(X.shape[1])
y_mean = y.mean(axis=0)
y = y - y_mean
else:
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
y_mean = 0. if y.ndim == 1 else np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_mean, y_mean, X_std
def center_data(X, y, fit_intercept, normalize=False, copy=True,
sample_weight=None):
"""
Centers data to have mean zero along axis 0. This is here because
nearly all linear models will want their data to be centered.
If sample_weight is not None, then the weighted mean of X and y
is zero, and not the mean itself
"""
X = as_float_array(X, copy)
if fit_intercept:
if isinstance(sample_weight, numbers.Number):
sample_weight = None
if sp.issparse(X):
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
else:
X_mean = np.average(X, axis=0, weights=sample_weight)
X -= X_mean
if normalize:
# XXX: currently scaled to variance=n_samples
X_std = np.sqrt(np.sum(X ** 2, axis=0))
X_std[X_std == 0] = 1
X /= X_std
else:
X_std = np.ones(X.shape[1])
y_mean = np.average(y, axis=0, weights=sample_weight)
y = y - y_mean
else:
X_mean = np.zeros(X.shape[1])
X_std = np.ones(X.shape[1])
y_mean = 0. if y.ndim == 1 else np.zeros(y.shape[1], dtype=X.dtype)
return X, y, X_mean, y_mean, X_std
def _rescale_data(X, y, sample_weight):
"""Rescale data so as to support sample_weight"""
n_samples = X.shape[0]
sample_weight = sample_weight * np.ones(n_samples)
sample_weight = np.sqrt(sample_weight)
sw_matrix = sparse.dia_matrix((sample_weight, 0),
shape=(n_samples, n_samples))
X = safe_sparse_dot(sw_matrix, X)
y = safe_sparse_dot(sw_matrix, y)
return X, y
class LinearModel(six.with_metaclass(ABCMeta, BaseEstimator)):
"""Base class for Linear Models"""
@abstractmethod
def fit(self, X, y):
"""Fit model."""
@deprecated(" and will be removed in 0.19.")
def decision_function(self, X):
"""Decision function of the linear model.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
C : array, shape = (n_samples,)
Returns predicted values.
"""
return self._decision_function(X)
def _decision_function(self, X):
check_is_fitted(self, "coef_")
X = check_array(X, accept_sparse=['csr', 'csc', 'coo'])
return safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
def predict(self, X):
"""Predict using the linear model
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
C : array, shape = (n_samples,)
Returns predicted values.
"""
return self._decision_function(X)
_center_data = staticmethod(center_data)
def _set_intercept(self, X_mean, y_mean, X_std):
"""Set the intercept_
"""
if self.fit_intercept:
self.coef_ = self.coef_ / X_std
self.intercept_ = y_mean - np.dot(X_mean, self.coef_.T)
else:
self.intercept_ = 0.
# XXX Should this derive from LinearModel? It should be a mixin, not an ABC.
# Maybe the n_features checking can be moved to LinearModel.
class LinearClassifierMixin(ClassifierMixin):
"""Mixin for linear classifiers.
Handles prediction for sparse and dense X.
"""
def decision_function(self, X):
"""Predict confidence scores for samples.
The confidence score for a sample is the signed distance of that
sample to the hyperplane.
Parameters
----------
X : {array-like, sparse matrix}, shape = (n_samples, n_features)
Samples.
Returns
-------
array, shape=(n_samples,) if n_classes == 2 else (n_samples, n_classes)
Confidence scores per (sample, class) combination. In the binary
case, confidence score for self.classes_[1] where >0 means this
class would be predicted.
"""
if not hasattr(self, 'coef_') or self.coef_ is None:
raise NotFittedError("This %(name)s instance is not fitted "
"yet" % {'name': type(self).__name__})
X = check_array(X, accept_sparse='csr')
n_features = self.coef_.shape[1]
if X.shape[1] != n_features:
raise ValueError("X has %d features per sample; expecting %d"
% (X.shape[1], n_features))
scores = safe_sparse_dot(X, self.coef_.T,
dense_output=True) + self.intercept_
return scores.ravel() if scores.shape[1] == 1 else scores
def predict(self, X):
"""Predict class labels for samples in X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Samples.
Returns
-------
C : array, shape = [n_samples]
Predicted class label per sample.
"""
scores = self.decision_function(X)
if len(scores.shape) == 1:
indices = (scores > 0).astype(np.int)
else:
indices = scores.argmax(axis=1)
return self.classes_[indices]
def _predict_proba_lr(self, X):
"""Probability estimation for OvR logistic regression.
Positive class probabilities are computed as
1. / (1. + np.exp(-self.decision_function(X)));
multiclass is handled by normalizing that over all classes.
"""
prob = self.decision_function(X)
prob *= -1
np.exp(prob, prob)
prob += 1
np.reciprocal(prob, prob)
if prob.ndim == 1:
return np.vstack([1 - prob, prob]).T
else:
# OvR normalization, like LibLinear's predict_probability
prob /= prob.sum(axis=1).reshape((prob.shape[0], -1))
return prob
class SparseCoefMixin(object):
"""Mixin for converting coef_ to and from CSR format.
L1-regularizing estimators should inherit this.
"""
def densify(self):
"""Convert coefficient matrix to dense array format.
Converts the ``coef_`` member (back) to a numpy.ndarray. This is the
default format of ``coef_`` and is required for fitting, so calling
this method is only required on models that have previously been
sparsified; otherwise, it is a no-op.
Returns
-------
self: estimator
"""
msg = "Estimator, %(name)s, must be fitted before densifying."
check_is_fitted(self, "coef_", msg=msg)
if sp.issparse(self.coef_):
self.coef_ = self.coef_.toarray()
return self
def sparsify(self):
"""Convert coefficient matrix to sparse format.
Converts the ``coef_`` member to a scipy.sparse matrix, which for
L1-regularized models can be much more memory- and storage-efficient
than the usual numpy.ndarray representation.
The ``intercept_`` member is not converted.
Notes
-----
For non-sparse models, i.e. when there are not many zeros in ``coef_``,
this may actually *increase* memory usage, so use this method with
care. A rule of thumb is that the number of zero elements, which can
be computed with ``(coef_ == 0).sum()``, must be more than 50% for this
to provide significant benefits.
After calling this method, further fitting with the partial_fit
method (if any) will not work until you call densify.
Returns
-------
self: estimator
"""
msg = "Estimator, %(name)s, must be fitted before sparsifying."
check_is_fitted(self, "coef_", msg=msg)
self.coef_ = sp.csr_matrix(self.coef_)
return self
class LinearRegression(LinearModel, RegressorMixin):
"""
Ordinary least squares Linear Regression.
Parameters
----------
fit_intercept : boolean, optional
whether to calculate the intercept for this model. If set
to false, no intercept will be used in calculations
(e.g. data is expected to be already centered).
normalize : boolean, optional, default False
If True, the regressors X will be normalized before regression.
copy_X : boolean, optional, default True
If True, X will be copied; else, it may be overwritten.
n_jobs : int, optional, default 1
The number of jobs to use for the computation.
If -1 all CPUs are used. This will only provide speedup for
n_targets > 1 and sufficient large problems.
Attributes
----------
coef_ : array, shape (n_features, ) or (n_targets, n_features)
Estimated coefficients for the linear regression problem.
If multiple targets are passed during the fit (y 2D), this
is a 2D array of shape (n_targets, n_features), while if only
one target is passed, this is a 1D array of length n_features.
intercept_ : array
Independent term in the linear model.
Notes
-----
From the implementation point of view, this is just plain Ordinary
Least Squares (scipy.linalg.lstsq) wrapped as a predictor object.
"""
def __init__(self, fit_intercept=True, normalize=False, copy_X=True,
n_jobs=1):
self.fit_intercept = fit_intercept
self.normalize = normalize
self.copy_X = copy_X
self.n_jobs = n_jobs
def fit(self, X, y, sample_weight=None):
"""
Fit linear model.
Parameters
----------
X : numpy array or sparse matrix of shape [n_samples,n_features]
Training data
y : numpy array of shape [n_samples, n_targets]
Target values
sample_weight : numpy array of shape [n_samples]
Individual weights for each sample
Returns
-------
self : returns an instance of self.
"""
n_jobs_ = self.n_jobs
X, y = check_X_y(X, y, accept_sparse=['csr', 'csc', 'coo'],
y_numeric=True, multi_output=True)
if ((sample_weight is not None) and np.atleast_1d(sample_weight).ndim > 1):
sample_weight = column_or_1d(sample_weight, warn=True)
X, y, X_mean, y_mean, X_std = self._center_data(
X, y, self.fit_intercept, self.normalize, self.copy_X,
sample_weight=sample_weight)
if sample_weight is not None:
# Sample weight can be implemented via a simple rescaling.
X, y = _rescale_data(X, y, sample_weight)
if sp.issparse(X):
if y.ndim < 2:
out = sparse_lsqr(X, y)
self.coef_ = out[0]
self.residues_ = out[3]
else:
# sparse_lstsq cannot handle y with shape (M, K)
outs = Parallel(n_jobs=n_jobs_)(
delayed(sparse_lsqr)(X, y[:, j].ravel())
for j in range(y.shape[1]))
self.coef_ = np.vstack(out[0] for out in outs)
self.residues_ = np.vstack(out[3] for out in outs)
else:
self.coef_, self.residues_, self.rank_, self.singular_ = \
linalg.lstsq(X, y)
self.coef_ = self.coef_.T
if y.ndim == 1:
self.coef_ = np.ravel(self.coef_)
self._set_intercept(X_mean, y_mean, X_std)
return self
def _pre_fit(X, y, Xy, precompute, normalize, fit_intercept, copy,
Xy_precompute_order=None):
"""Aux function used at beginning of fit in linear models"""
n_samples, n_features = X.shape
if sparse.isspmatrix(X):
precompute = False
X, y, X_mean, y_mean, X_std = sparse_center_data(
X, y, fit_intercept, normalize)
else:
# copy was done in fit if necessary
X, y, X_mean, y_mean, X_std = center_data(
X, y, fit_intercept, normalize, copy=copy)
if hasattr(precompute, '__array__') and (
fit_intercept and not np.allclose(X_mean, np.zeros(n_features))
or normalize and not np.allclose(X_std, np.ones(n_features))):
warnings.warn("Gram matrix was provided but X was centered"
" to fit intercept, "
"or X was normalized : recomputing Gram matrix.",
UserWarning)
# recompute Gram
precompute = 'auto'
Xy = None
# precompute if n_samples > n_features
if precompute == 'auto':
precompute = (n_samples > n_features)
if precompute is True:
precompute = np.dot(X.T, X)
if Xy_precompute_order == 'F':
precompute = np.dot(X.T, X).T
if not hasattr(precompute, '__array__'):
Xy = None # cannot use Xy if precompute is not Gram
if hasattr(precompute, '__array__') and Xy is None:
if Xy_precompute_order == 'F':
Xy = np.dot(y.T, X).T
else:
Xy = np.dot(X.T, y)
return X, y, X_mean, y_mean, X_std, precompute, Xy
| bsd-3-clause |
jldaniel/Athena | AthenaOpt/islands.py | 1 | 6334 | __author__ = 'jldaniel'
from algorithm_base import AlgorithmBase
from common import Individual
from converger import Converger
from history import History
class Islands(AlgorithmBase):
"""
The Islands optimization algorithm.
"""
def __init__(self):
"""
Initialize the Islands algorithm.
@return: None
"""
super(Islands, self).__init__()
def initialize(self):
"""
Randomly initialize the island populations
@return: The initialized islands
@rtype: list(list(Individual))
"""
self._logger.info('Initializing the islands')
islands = []
# TODO (JLD): Use _global_popsize where appropriate
self._global_popsize = self._islands * self._pop_size
# Randomly initialize each sub-population
for k in xrange(self._islands):
pop = []
for i in xrange(self._pop_size):
ind = Individual()
ind.id = i
x_set = [None]*self._ndim
for j in xrange(self._ndim):
x_set[j] = self._rnd.uniform(self._lower_bound[j], self._upper_bound[j])
ind.x = x_set
pop.append(ind)
islands.append(pop)
return islands
def run(self):
"""
Run the optimization algorithm
@return: The history from the run
@rtype: History
"""
# Initialize the islands
islands = self.initialize()
# Evaluate the individuals
for island in islands:
self.evaluate_population(island)
# Perform selection to get the crowding distances
parent_islands = []
pareto_fronts = []
for island in islands:
parent_pop = self.selection(island, self._pop_size)
parent_islands.append(parent_pop)
# Get the global pareto front
pareto_front = self.nondominated_sort(self.coalesce_populations(parent_islands),
len(parent_islands)*self._pop_size, first_front_only=True)
self._converger = Converger(pareto_front[0], len(pareto_front))
# Initialize the archive
self._history.add_population(parent_islands)
gen = 0
converged = 0
# TODO (JLD): Make epoch a configuration property
epoch = 5
# TODO (JLD): Make number_of_migrants a configuration property
number_of_migrants = 3
while (gen < self._generations) and converged < 10:
self._logger.info('Starting generation ' + repr(gen+1) + ' of ' + repr(self._generations))
# Migration
if (gen % epoch) == 0:
self._logger.info('Epoch occurred, migrating...')
self.migration(parent_islands, number_of_migrants)
# Apply crossover to generate a child population
child_islands = []
for parent_pop in parent_islands:
child_pop = self.tournament_select(parent_pop, self._pop_size)
child_islands.append(child_pop)
# Apply crossover and mutation
for child_pop in child_islands:
for ind1, ind2 in zip(child_pop[::2], child_pop[1::2]):
if self._rnd.random() <= self._p_cross:
self.mate(ind1, ind2)
self.mutate(ind1)
self.mutate(ind2)
# Evaluate the child population
for child_pop in child_islands:
self.evaluate_population(child_pop)
# Update ID's
for child_pop in child_islands:
for ind in child_pop:
ind.id += self._pop_size
# Perform NSGA-2 selection to pick the next generation
for parent_pop, child_pop in zip(parent_islands, child_islands):
parent_pop = self.selection(parent_pop + child_pop, self._pop_size)
pareto_front = self.nondominated_sort(self.coalesce_populations(parent_islands),
len(parent_islands)*self._pop_size, first_front_only=True)
for parent_pop in parent_islands:
self._history.add_population(parent_pop)
convergence_metrics = self._converger.check_convergence(pareto_front[0], self._pop_size*self._islands)
self._logger.info('Frontier Goodness: ' + repr(convergence_metrics.fg))
self._logger.info('Expansion Metric: ' + repr(convergence_metrics.expansion))
self._logger.info('Density Metric: ' + repr(convergence_metrics.dm))
if max([convergence_metrics.fg, convergence_metrics.expansion, convergence_metrics.dm]) <= self._conv_tol:
converged += 1
self._logger.info('Converged Generations: ' + repr(converged))
else:
converged = 0
# Update
gen += 1
# Final solution
solution = []
for parent_pop in parent_islands:
for ind in parent_pop:
solution.append(ind)
# TODO (JLD): Add convergence history, and solution to outputs
return self._history,
# TODO (JLD): Add in different migration schemes
def migration(self, islands, number_of_migrants):
"""
Perform the migration operation in place on the sub populations
@param islands: The sub populations to include in the migration
@type islands: list(list(Individual))
@param number_of_migrants: The number of migrants
@type number_of_migrants: int
@return: None
"""
# Generate a migration pool, currently just selecting a random individual
migrant_pool = []
for island in islands:
for i in xrange(number_of_migrants):
random_index = self._rnd.randrange(0, len(island))
migrant = island.pop(random_index)
migrant_pool.append(migrant)
# assign the migrants to an island
for island in islands:
for i in xrange(number_of_migrants):
random_index = self._rnd.randrange(0, len(migrant_pool))
immigrant = migrant_pool.pop(random_index)
island.append(immigrant)
| mit |
kevintaw/django | tests/annotations/tests.py | 96 | 18622 | from __future__ import unicode_literals
import datetime
from decimal import Decimal
from django.core.exceptions import FieldDoesNotExist, FieldError
from django.db.models import (
F, BooleanField, CharField, Count, DateTimeField, ExpressionWrapper, Func,
IntegerField, Sum, Value,
)
from django.test import TestCase
from django.utils import six
from .models import (
Author, Book, Company, DepartmentStore, Employee, Publisher, Store, Ticket,
)
def cxOracle_513_py3_bug(func):
"""
cx_Oracle versions up to and including 5.1.3 have a bug with respect to
string handling under Python3 (essentially, they treat Python3 strings
as Python2 strings rather than unicode). This makes some tests here
fail under Python 3 -- so we mark them as expected failures.
See https://code.djangoproject.com/ticket/23843, in particular comment 6,
which points to https://bitbucket.org/anthony_tuininga/cx_oracle/issue/6/
"""
from unittest import expectedFailure
from django.db import connection
if connection.vendor == 'oracle' and six.PY3 and connection.Database.version <= '5.1.3':
return expectedFailure(func)
else:
return func
class NonAggregateAnnotationTestCase(TestCase):
@classmethod
def setUpTestData(cls):
cls.a1 = Author.objects.create(name='Adrian Holovaty', age=34)
cls.a2 = Author.objects.create(name='Jacob Kaplan-Moss', age=35)
cls.a3 = Author.objects.create(name='Brad Dayley', age=45)
cls.a4 = Author.objects.create(name='James Bennett', age=29)
cls.a5 = Author.objects.create(name='Jeffrey Forcier', age=37)
cls.a6 = Author.objects.create(name='Paul Bissex', age=29)
cls.a7 = Author.objects.create(name='Wesley J. Chun', age=25)
cls.a8 = Author.objects.create(name='Peter Norvig', age=57)
cls.a9 = Author.objects.create(name='Stuart Russell', age=46)
cls.a1.friends.add(cls.a2, cls.a4)
cls.a2.friends.add(cls.a1, cls.a7)
cls.a4.friends.add(cls.a1)
cls.a5.friends.add(cls.a6, cls.a7)
cls.a6.friends.add(cls.a5, cls.a7)
cls.a7.friends.add(cls.a2, cls.a5, cls.a6)
cls.a8.friends.add(cls.a9)
cls.a9.friends.add(cls.a8)
cls.p1 = Publisher.objects.create(name='Apress', num_awards=3)
cls.p2 = Publisher.objects.create(name='Sams', num_awards=1)
cls.p3 = Publisher.objects.create(name='Prentice Hall', num_awards=7)
cls.p4 = Publisher.objects.create(name='Morgan Kaufmann', num_awards=9)
cls.p5 = Publisher.objects.create(name="Jonno's House of Books", num_awards=0)
cls.b1 = Book.objects.create(
isbn='159059725', name='The Definitive Guide to Django: Web Development Done Right',
pages=447, rating=4.5, price=Decimal('30.00'), contact=cls.a1, publisher=cls.p1,
pubdate=datetime.date(2007, 12, 6)
)
cls.b2 = Book.objects.create(
isbn='067232959', name='Sams Teach Yourself Django in 24 Hours',
pages=528, rating=3.0, price=Decimal('23.09'), contact=cls.a3, publisher=cls.p2,
pubdate=datetime.date(2008, 3, 3)
)
cls.b3 = Book.objects.create(
isbn='159059996', name='Practical Django Projects',
pages=300, rating=4.0, price=Decimal('29.69'), contact=cls.a4, publisher=cls.p1,
pubdate=datetime.date(2008, 6, 23)
)
cls.b4 = Book.objects.create(
isbn='013235613', name='Python Web Development with Django',
pages=350, rating=4.0, price=Decimal('29.69'), contact=cls.a5, publisher=cls.p3,
pubdate=datetime.date(2008, 11, 3)
)
cls.b5 = Book.objects.create(
isbn='013790395', name='Artificial Intelligence: A Modern Approach',
pages=1132, rating=4.0, price=Decimal('82.80'), contact=cls.a8, publisher=cls.p3,
pubdate=datetime.date(1995, 1, 15)
)
cls.b6 = Book.objects.create(
isbn='155860191', name='Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
pages=946, rating=5.0, price=Decimal('75.00'), contact=cls.a8, publisher=cls.p4,
pubdate=datetime.date(1991, 10, 15)
)
cls.b1.authors.add(cls.a1, cls.a2)
cls.b2.authors.add(cls.a3)
cls.b3.authors.add(cls.a4)
cls.b4.authors.add(cls.a5, cls.a6, cls.a7)
cls.b5.authors.add(cls.a8, cls.a9)
cls.b6.authors.add(cls.a8)
s1 = Store.objects.create(
name='Amazon.com',
original_opening=datetime.datetime(1994, 4, 23, 9, 17, 42),
friday_night_closing=datetime.time(23, 59, 59)
)
s2 = Store.objects.create(
name='Books.com',
original_opening=datetime.datetime(2001, 3, 15, 11, 23, 37),
friday_night_closing=datetime.time(23, 59, 59)
)
s3 = Store.objects.create(
name="Mamma and Pappa's Books",
original_opening=datetime.datetime(1945, 4, 25, 16, 24, 14),
friday_night_closing=datetime.time(21, 30)
)
s1.books.add(cls.b1, cls.b2, cls.b3, cls.b4, cls.b5, cls.b6)
s2.books.add(cls.b1, cls.b3, cls.b5, cls.b6)
s3.books.add(cls.b3, cls.b4, cls.b6)
def test_basic_annotation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField()))
for book in books:
self.assertEqual(book.is_book, 1)
def test_basic_f_annotation(self):
books = Book.objects.annotate(another_rating=F('rating'))
for book in books:
self.assertEqual(book.another_rating, book.rating)
def test_joined_annotation(self):
books = Book.objects.select_related('publisher').annotate(
num_awards=F('publisher__num_awards'))
for book in books:
self.assertEqual(book.num_awards, book.publisher.num_awards)
def test_mixed_type_annotation_date_interval(self):
active = datetime.datetime(2015, 3, 20, 14, 0, 0)
duration = datetime.timedelta(hours=1)
expires = datetime.datetime(2015, 3, 20, 14, 0, 0) + duration
Ticket.objects.create(active_at=active, duration=duration)
t = Ticket.objects.annotate(
expires=ExpressionWrapper(F('active_at') + F('duration'), output_field=DateTimeField())
).first()
self.assertEqual(t.expires, expires)
def test_mixed_type_annotation_numbers(self):
test = self.b1
b = Book.objects.annotate(
combined=ExpressionWrapper(F('pages') + F('rating'), output_field=IntegerField())
).get(isbn=test.isbn)
combined = int(test.pages + test.rating)
self.assertEqual(b.combined, combined)
def test_annotate_with_aggregation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField()),
rating_count=Count('rating'))
for book in books:
self.assertEqual(book.is_book, 1)
self.assertEqual(book.rating_count, 1)
def test_aggregate_over_annotation(self):
agg = Author.objects.annotate(other_age=F('age')).aggregate(otherage_sum=Sum('other_age'))
other_agg = Author.objects.aggregate(age_sum=Sum('age'))
self.assertEqual(agg['otherage_sum'], other_agg['age_sum'])
def test_filter_annotation(self):
books = Book.objects.annotate(
is_book=Value(1, output_field=IntegerField())
).filter(is_book=1)
for book in books:
self.assertEqual(book.is_book, 1)
def test_filter_annotation_with_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=3.5)
for book in books:
self.assertEqual(book.other_rating, 3.5)
def test_filter_annotation_with_double_f(self):
books = Book.objects.annotate(
other_rating=F('rating')
).filter(other_rating=F('rating'))
for book in books:
self.assertEqual(book.other_rating, book.rating)
def test_filter_agg_with_double_f(self):
books = Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('sum_rating'))
for book in books:
self.assertEqual(book.sum_rating, book.rating)
def test_filter_wrong_annotation(self):
with six.assertRaisesRegex(self, FieldError, "Cannot resolve keyword .*"):
list(Book.objects.annotate(
sum_rating=Sum('rating')
).filter(sum_rating=F('nope')))
def test_combined_annotation_commutative(self):
book1 = Book.objects.annotate(adjusted_rating=F('rating') + 2).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=2 + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
book1 = Book.objects.annotate(adjusted_rating=F('rating') + None).get(pk=self.b1.pk)
book2 = Book.objects.annotate(adjusted_rating=None + F('rating')).get(pk=self.b1.pk)
self.assertEqual(book1.adjusted_rating, book2.adjusted_rating)
def test_update_with_annotation(self):
book_preupdate = Book.objects.get(pk=self.b2.pk)
Book.objects.annotate(other_rating=F('rating') - 1).update(rating=F('other_rating'))
book_postupdate = Book.objects.get(pk=self.b2.pk)
self.assertEqual(book_preupdate.rating - 1, book_postupdate.rating)
def test_annotation_with_m2m(self):
books = Book.objects.annotate(author_age=F('authors__age')).filter(pk=self.b1.pk).order_by('author_age')
self.assertEqual(books[0].author_age, 34)
self.assertEqual(books[1].author_age, 35)
def test_annotation_reverse_m2m(self):
books = Book.objects.annotate(
store_name=F('store__name')).filter(
name='Practical Django Projects').order_by(
'store_name')
self.assertQuerysetEqual(
books, [
'Amazon.com',
'Books.com',
'Mamma and Pappa\'s Books'
],
lambda b: b.store_name
)
def test_values_annotation(self):
"""
Annotations can reference fields in a values clause,
and contribute to an existing values clause.
"""
# annotate references a field in values()
qs = Book.objects.values('rating').annotate(other_rating=F('rating') - 1)
book = qs.get(pk=self.b1.pk)
self.assertEqual(book['rating'] - 1, book['other_rating'])
# filter refs the annotated value
book = qs.get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
# can annotate an existing values with a new field
book = qs.annotate(other_isbn=F('isbn')).get(other_rating=4)
self.assertEqual(book['other_rating'], 4)
self.assertEqual(book['other_isbn'], '155860191')
def test_defer_annotation(self):
"""
Deferred attributes can be referenced by an annotation,
but they are not themselves deferred, and cannot be deferred.
"""
qs = Book.objects.defer('rating').annotate(other_rating=F('rating') - 1)
with self.assertNumQueries(2):
book = qs.get(other_rating=4)
self.assertEqual(book.rating, 5)
self.assertEqual(book.other_rating, 4)
with six.assertRaisesRegex(self, FieldDoesNotExist, "\w has no field named u?'other_rating'"):
book = qs.defer('other_rating').get(other_rating=4)
def test_mti_annotations(self):
"""
Fields on an inherited model can be referenced by an
annotated field.
"""
d = DepartmentStore.objects.create(
name='Angus & Robinson',
original_opening=datetime.date(2014, 3, 8),
friday_night_closing=datetime.time(21, 00, 00),
chain='Westfield'
)
books = Book.objects.filter(rating__gt=4)
for b in books:
d.books.add(b)
qs = DepartmentStore.objects.annotate(
other_name=F('name'),
other_chain=F('chain'),
is_open=Value(True, BooleanField()),
book_isbn=F('books__isbn')
).order_by('book_isbn').filter(chain='Westfield')
self.assertQuerysetEqual(
qs, [
('Angus & Robinson', 'Westfield', True, '155860191'),
('Angus & Robinson', 'Westfield', True, '159059725')
],
lambda d: (d.other_name, d.other_chain, d.is_open, d.book_isbn)
)
def test_null_annotation(self):
"""
Test that annotating None onto a model round-trips
"""
book = Book.objects.annotate(no_value=Value(None, output_field=IntegerField())).first()
self.assertIsNone(book.no_value)
def test_order_by_annotation(self):
authors = Author.objects.annotate(other_age=F('age')).order_by('other_age')
self.assertQuerysetEqual(
authors, [
25, 29, 29, 34, 35, 37, 45, 46, 57,
],
lambda a: a.other_age
)
def test_order_by_aggregate(self):
authors = Author.objects.values('age').annotate(age_count=Count('age')).order_by('age_count', 'age')
self.assertQuerysetEqual(
authors, [
(25, 1), (34, 1), (35, 1), (37, 1), (45, 1), (46, 1), (57, 1), (29, 2),
],
lambda a: (a['age'], a['age_count'])
)
def test_annotate_exists(self):
authors = Author.objects.annotate(c=Count('id')).filter(c__gt=1)
self.assertFalse(authors.exists())
def test_column_field_ordering(self):
"""
Test that columns are aligned in the correct order for
resolve_columns. This test will fail on mysql if column
ordering is out. Column fields should be aligned as:
1. extra_select
2. model_fields
3. annotation_fields
4. model_related_fields
"""
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17, output_field=IntegerField())
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
self.assertQuerysetEqual(
qs.order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
def test_column_field_ordering_with_deferred(self):
store = Store.objects.first()
Employee.objects.create(id=1, first_name='Max', manager=True, last_name='Paine',
store=store, age=23, salary=Decimal(50000.00))
Employee.objects.create(id=2, first_name='Buffy', manager=False, last_name='Summers',
store=store, age=18, salary=Decimal(40000.00))
qs = Employee.objects.extra(
select={'random_value': '42'}
).select_related('store').annotate(
annotated_value=Value(17, output_field=IntegerField())
)
rows = [
(1, 'Max', True, 42, 'Paine', 23, Decimal(50000.00), store.name, 17),
(2, 'Buffy', False, 42, 'Summers', 18, Decimal(40000.00), store.name, 17)
]
# and we respect deferred columns!
self.assertQuerysetEqual(
qs.defer('age').order_by('id'), rows,
lambda e: (
e.id, e.first_name, e.manager, e.random_value, e.last_name, e.age,
e.salary, e.store.name, e.annotated_value))
@cxOracle_513_py3_bug
def test_custom_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE')
).order_by('name')
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'),
('Django Software Foundation', 'No Tag'),
('Google', 'Do No Evil'),
('Yahoo', 'Internet Company')
],
lambda c: (c.name, c.tagline)
)
@cxOracle_513_py3_bug
def test_custom_functions_can_ref_other_functions(self):
Company(name='Apple', motto=None, ticker_name='APPL', description='Beautiful Devices').save()
Company(name='Django Software Foundation', motto=None, ticker_name=None, description=None).save()
Company(name='Google', motto='Do No Evil', ticker_name='GOOG', description='Internet Company').save()
Company(name='Yahoo', motto=None, ticker_name=None, description='Internet Company').save()
class Lower(Func):
function = 'LOWER'
qs = Company.objects.annotate(
tagline=Func(
F('motto'),
F('ticker_name'),
F('description'),
Value('No Tag'),
function='COALESCE')
).annotate(
tagline_lower=Lower(F('tagline'), output_field=CharField())
).order_by('name')
# LOWER function supported by:
# oracle, postgres, mysql, sqlite, sqlserver
self.assertQuerysetEqual(
qs, [
('Apple', 'APPL'.lower()),
('Django Software Foundation', 'No Tag'.lower()),
('Google', 'Do No Evil'.lower()),
('Yahoo', 'Internet Company'.lower())
],
lambda c: (c.name, c.tagline_lower)
)
| bsd-3-clause |
CenterForOpenScience/osf.io | api/preprints/urls.py | 6 | 1988 | from django.conf.urls import url
from . import views
app_name = 'osf'
urlpatterns = [
url(r'^$', views.PreprintList.as_view(), name=views.PreprintList.view_name),
url(r'^(?P<preprint_id>\w+)/$', views.PreprintDetail.as_view(), name=views.PreprintDetail.view_name),
url(r'^(?P<preprint_id>\w+)/bibliographic_contributors/$', views.PreprintBibliographicContributorsList.as_view(), name=views.PreprintBibliographicContributorsList.view_name),
url(r'^(?P<preprint_id>\w+)/citation/$', views.PreprintCitationDetail.as_view(), name=views.PreprintCitationDetail.view_name),
url(r'^(?P<preprint_id>\w+)/citation/(?P<style_id>[-\w]+)/$', views.PreprintCitationStyleDetail.as_view(), name=views.PreprintCitationStyleDetail.view_name),
url(r'^(?P<preprint_id>\w+)/contributors/$', views.PreprintContributorsList.as_view(), name=views.PreprintContributorsList.view_name),
url(r'^(?P<preprint_id>\w+)/contributors/(?P<user_id>\w+)/$', views.PreprintContributorDetail.as_view(), name=views.PreprintContributorDetail.view_name),
url(r'^(?P<preprint_id>\w+)/files/$', views.PreprintStorageProvidersList.as_view(), name=views.PreprintStorageProvidersList.view_name),
url(r'^(?P<preprint_id>\w+)/files/osfstorage/$', views.PreprintFilesList.as_view(), name=views.PreprintFilesList.view_name),
url(r'^(?P<preprint_id>\w+)/identifiers/$', views.PreprintIdentifierList.as_view(), name=views.PreprintIdentifierList.view_name),
url(r'^(?P<preprint_id>\w+)/relationships/node/$', views.PreprintNodeRelationship.as_view(), name=views.PreprintNodeRelationship.view_name),
url(r'^(?P<preprint_id>\w+)/review_actions/$', views.PreprintActionList.as_view(), name=views.PreprintActionList.view_name),
url(r'^(?P<preprint_id>\w+)/requests/$', views.PreprintRequestListCreate.as_view(), name=views.PreprintRequestListCreate.view_name),
url(r'^(?P<preprint_id>\w+)/subjects/$', views.PreprintSubjectsList.as_view(), name=views.PreprintSubjectsList.view_name),
]
| apache-2.0 |
ruibarreira/linuxtrail | usr/lib/python2.7/dist-packages/Crypto/SelfTest/Signature/test_pkcs1_15.py | 3 | 9473 | # -*- coding: utf-8 -*-
#
# SelfTest/Signature/test_pkcs1_15.py: Self-test for PKCS#1 v1.5 signatures
#
# ===================================================================
# The contents of this file are dedicated to the public domain. To
# the extent that dedication to the public domain is not available,
# everyone is granted a worldwide, perpetual, royalty-free,
# non-exclusive license to exercise all rights associated with the
# contents of this file for any purpose whatsoever.
# No rights are reserved.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
# ===================================================================
__revision__ = "$Id$"
import unittest
from Crypto.PublicKey import RSA
from Crypto.SelfTest.st_common import list_test_cases, a2b_hex, b2a_hex
from Crypto.Hash import *
from Crypto import Random
from Crypto.Signature import PKCS1_v1_5 as PKCS
from Crypto.Util.py3compat import *
def isStr(s):
t = ''
try:
t += s
except TypeError:
return 0
return 1
def rws(t):
"""Remove white spaces, tabs, and new lines from a string"""
for c in ['\n', '\t', ' ']:
t = t.replace(c,'')
return t
def t2b(t):
"""Convert a text string with bytes in hex form to a byte string"""
clean = b(rws(t))
if len(clean)%2 == 1:
raise ValueError("Even number of characters expected")
return a2b_hex(clean)
class PKCS1_15_Tests(unittest.TestCase):
# List of tuples with test data for PKCS#1 v1.5.
# Each tuple is made up by:
# Item #0: dictionary with RSA key component, or key to import
# Item #1: data to hash and sign
# Item #2: signature of the data #1, done with the key #0, after
# hashing it with #3
# Item #3: hash object generator
_testData = (
#
# Taken from ftp://ftp.rsa.com/pub/pkcs/ascii/examples.asc
# "Some Examples of the PKCS Standards", 1999
#
(
# Private key, from 2.1
{
'n':'''0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0 c0 01 c6
27 10 27 00 75 14 29 42 e1 9a 8d 8c 51 d0 53 b3 e3 78 2a 1d
e5 dc 5a f4 eb e9 94 68 17 01 14 a1 df e6 7c dc 9a 9a f5 5d
65 56 20 bb ab''',
'e':'''01 00
01''',
'd':'''01 23 c5 b6 1b a3 6e db 1d 36 79 90 41 99 a8 9e a8 0c 09
b9 12 2e 14 00 c0 9a dc f7 78 46 76 d0 1d 23 35 6a 7d 44 d6
bd 8b d5 0e 94 bf c7 23 fa 87 d8 86 2b 75 17 76 91 c1 1d 75
76 92 df 88 81'''
},
# Data to sign, from 3.1
'''30 81 a4 02 01 00 30 42 31 0b 30 09 06
03 55 04 06 13 02 55 53 31 1d 30 1b 06 03 55 04 0a 13 14
45 78 61 6d 70 6c 65 20 4f 72 67 61 6e 69 7a 61 74 69 6f
6e 31 14 30 12 06 03 55 04 03 13 0b 54 65 73 74 20 55 73
65 72 20 31 30 5b 30 0d 06 09 2a 86 48 86 f7 0d 01 01 01
05 00 03 4a 00 30 47 02 40
0a 66 79 1d c6 98 81 68 de 7a b7 74 19 bb 7f b0
c0 01 c6 27 10 27 00 75 14 29 42 e1 9a 8d 8c 51
d0 53 b3 e3 78 2a 1d e5 dc 5a f4 eb e9 94 68 17
01 14 a1 df e6 7c dc 9a 9a f5 5d 65 56 20 bb ab
02 03 01 00 01''',
# Signature, from 3.2 (at the very end)
'''06 db 36 cb 18 d3 47 5b 9c 01 db 3c 78 95 28 08
02 79 bb ae ff 2b 7d 55 8e d6 61 59 87 c8 51 86
3f 8a 6c 2c ff bc 89 c3 f7 5a 18 d9 6b 12 7c 71
7d 54 d0 d8 04 8d a8 a0 54 46 26 d1 7a 2a 8f be''',
MD2
),
#
# RSA keypair generated with openssl
#
(
"""-----BEGIN RSA PRIVATE KEY-----
MIIBOwIBAAJBAL8eJ5AKoIsjURpcEoGubZMxLD7+kT+TLr7UkvEtFrRhDDKMtuII
q19FrL4pUIMymPMSLBn3hJLe30Dw48GQM4UCAwEAAQJACUSDEp8RTe32ftq8IwG8
Wojl5mAd1wFiIOrZ/Uv8b963WJOJiuQcVN29vxU5+My9GPZ7RA3hrDBEAoHUDPrI
OQIhAPIPLz4dphiD9imAkivY31Rc5AfHJiQRA7XixTcjEkojAiEAyh/pJHks/Mlr
+rdPNEpotBjfV4M4BkgGAA/ipcmaAjcCIQCHvhwwKVBLzzTscT2HeUdEeBMoiXXK
JACAr3sJQJGxIQIgarRp+m1WSKV1MciwMaTOnbU7wxFs9DP1pva76lYBzgUCIQC9
n0CnZCJ6IZYqSt0H5N7+Q+2Ro64nuwV/OSQfM6sBwQ==
-----END RSA PRIVATE KEY-----""",
"This is a test\x0a",
#
# PKCS#1 signature computed with openssl
#
'''4a700a16432a291a3194646952687d5316458b8b86fb0a25aa30e0dcecdb
442676759ac63d56ec1499c3ae4c0013c2053cabd5b5804848994541ac16
fa243a4d''',
SHA
),
#
# Test vector from http://www.di-mgt.com.au/rsa_alg.html#signpkcs1
#
(
{
'n':'''E08973398DD8F5F5E88776397F4EB005BB5383DE0FB7ABDC7DC775290D052E6D
12DFA68626D4D26FAA5829FC97ECFA82510F3080BEB1509E4644F12CBBD832CF
C6686F07D9B060ACBEEE34096A13F5F7050593DF5EBA3556D961FF197FC981E6
F86CEA874070EFAC6D2C749F2DFA553AB9997702A648528C4EF357385774575F''',
'e':'''010001''',
'd':'''00A403C327477634346CA686B57949014B2E8AD2C862B2C7D748096A8B91F736
F275D6E8CD15906027314735644D95CD6763CEB49F56AC2F376E1CEE0EBF282D
F439906F34D86E085BD5656AD841F313D72D395EFE33CBFF29E4030B3D05A28F
B7F18EA27637B07957D32F2BDE8706227D04665EC91BAF8B1AC3EC9144AB7F21'''
},
"abc",
'''60AD5A78FB4A4030EC542C8974CD15F55384E836554CEDD9A322D5F4135C6267
A9D20970C54E6651070B0144D43844C899320DD8FA7819F7EBC6A7715287332E
C8675C136183B3F8A1F81EF969418267130A756FDBB2C71D9A667446E34E0EAD
9CF31BFB66F816F319D0B7E430A5F2891553986E003720261C7E9022C0D9F11F''',
SHA
)
)
def testSign1(self):
for i in range(len(self._testData)):
row = self._testData[i]
# Build the key
if isStr(row[0]):
key = RSA.importKey(row[0])
else:
comps = [ long(rws(row[0][x]),16) for x in ('n','e','d') ]
key = RSA.construct(comps)
h = row[3].new()
# Data to sign can either be in hex form or not
try:
h.update(t2b(row[1]))
except:
h.update(b(row[1]))
# The real test
signer = PKCS.new(key)
self.assertTrue(signer.can_sign())
s = signer.sign(h)
self.assertEqual(s, t2b(row[2]))
def testVerify1(self):
for i in range(len(self._testData)):
row = self._testData[i]
# Build the key
if isStr(row[0]):
key = RSA.importKey(row[0]).publickey()
else:
comps = [ long(rws(row[0][x]),16) for x in ('n','e') ]
key = RSA.construct(comps)
h = row[3].new()
# Data to sign can either be in hex form or not
try:
h.update(t2b(row[1]))
except:
h.update(b(row[1]))
# The real test
verifier = PKCS.new(key)
self.assertFalse(verifier.can_sign())
result = verifier.verify(h, t2b(row[2]))
self.assertTrue(result)
def testSignVerify(self):
rng = Random.new().read
key = RSA.generate(1024, rng)
for hashmod in (MD2,MD5,SHA,SHA224,SHA256,SHA384,SHA512,RIPEMD):
h = hashmod.new()
h.update(b('blah blah blah'))
signer = PKCS.new(key)
s = signer.sign(h)
result = signer.verify(h, s)
self.assertTrue(result)
def get_tests(config={}):
tests = []
tests += list_test_cases(PKCS1_15_Tests)
return tests
if __name__ == '__main__':
suite = lambda: unittest.TestSuite(get_tests())
unittest.main(defaultTest='suite')
# vim:set ts=4 sw=4 sts=4 expandtab:
| gpl-3.0 |
hrishioa/Aviato | flask/Lib/site-packages/requests/packages/chardet/gb2312freq.py | 3132 | 36011 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
# GB2312 most frequently used character table
#
# Char to FreqOrder table , from hz6763
# 512 --> 0.79 -- 0.79
# 1024 --> 0.92 -- 0.13
# 2048 --> 0.98 -- 0.06
# 6768 --> 1.00 -- 0.02
#
# Ideal Distribution Ratio = 0.79135/(1-0.79135) = 3.79
# Random Distribution Ration = 512 / (3755 - 512) = 0.157
#
# Typical Distribution Ratio about 25% of Ideal one, still much higher that RDR
GB2312_TYPICAL_DISTRIBUTION_RATIO = 0.9
GB2312_TABLE_SIZE = 3760
GB2312CharToFreqOrder = (
1671, 749,1443,2364,3924,3807,2330,3921,1704,3463,2691,1511,1515, 572,3191,2205,
2361, 224,2558, 479,1711, 963,3162, 440,4060,1905,2966,2947,3580,2647,3961,3842,
2204, 869,4207, 970,2678,5626,2944,2956,1479,4048, 514,3595, 588,1346,2820,3409,
249,4088,1746,1873,2047,1774, 581,1813, 358,1174,3590,1014,1561,4844,2245, 670,
1636,3112, 889,1286, 953, 556,2327,3060,1290,3141, 613, 185,3477,1367, 850,3820,
1715,2428,2642,2303,2732,3041,2562,2648,3566,3946,1349, 388,3098,2091,1360,3585,
152,1687,1539, 738,1559, 59,1232,2925,2267,1388,1249,1741,1679,2960, 151,1566,
1125,1352,4271, 924,4296, 385,3166,4459, 310,1245,2850, 70,3285,2729,3534,3575,
2398,3298,3466,1960,2265, 217,3647, 864,1909,2084,4401,2773,1010,3269,5152, 853,
3051,3121,1244,4251,1895, 364,1499,1540,2313,1180,3655,2268, 562, 715,2417,3061,
544, 336,3768,2380,1752,4075, 950, 280,2425,4382, 183,2759,3272, 333,4297,2155,
1688,2356,1444,1039,4540, 736,1177,3349,2443,2368,2144,2225, 565, 196,1482,3406,
927,1335,4147, 692, 878,1311,1653,3911,3622,1378,4200,1840,2969,3149,2126,1816,
2534,1546,2393,2760, 737,2494, 13, 447, 245,2747, 38,2765,2129,2589,1079, 606,
360, 471,3755,2890, 404, 848, 699,1785,1236, 370,2221,1023,3746,2074,2026,2023,
2388,1581,2119, 812,1141,3091,2536,1519, 804,2053, 406,1596,1090, 784, 548,4414,
1806,2264,2936,1100, 343,4114,5096, 622,3358, 743,3668,1510,1626,5020,3567,2513,
3195,4115,5627,2489,2991, 24,2065,2697,1087,2719, 48,1634, 315, 68, 985,2052,
198,2239,1347,1107,1439, 597,2366,2172, 871,3307, 919,2487,2790,1867, 236,2570,
1413,3794, 906,3365,3381,1701,1982,1818,1524,2924,1205, 616,2586,2072,2004, 575,
253,3099, 32,1365,1182, 197,1714,2454,1201, 554,3388,3224,2748, 756,2587, 250,
2567,1507,1517,3529,1922,2761,2337,3416,1961,1677,2452,2238,3153, 615, 911,1506,
1474,2495,1265,1906,2749,3756,3280,2161, 898,2714,1759,3450,2243,2444, 563, 26,
3286,2266,3769,3344,2707,3677, 611,1402, 531,1028,2871,4548,1375, 261,2948, 835,
1190,4134, 353, 840,2684,1900,3082,1435,2109,1207,1674, 329,1872,2781,4055,2686,
2104, 608,3318,2423,2957,2768,1108,3739,3512,3271,3985,2203,1771,3520,1418,2054,
1681,1153, 225,1627,2929, 162,2050,2511,3687,1954, 124,1859,2431,1684,3032,2894,
585,4805,3969,2869,2704,2088,2032,2095,3656,2635,4362,2209, 256, 518,2042,2105,
3777,3657, 643,2298,1148,1779, 190, 989,3544, 414, 11,2135,2063,2979,1471, 403,
3678, 126, 770,1563, 671,2499,3216,2877, 600,1179, 307,2805,4937,1268,1297,2694,
252,4032,1448,1494,1331,1394, 127,2256, 222,1647,1035,1481,3056,1915,1048, 873,
3651, 210, 33,1608,2516, 200,1520, 415, 102, 0,3389,1287, 817, 91,3299,2940,
836,1814, 549,2197,1396,1669,2987,3582,2297,2848,4528,1070, 687, 20,1819, 121,
1552,1364,1461,1968,2617,3540,2824,2083, 177, 948,4938,2291, 110,4549,2066, 648,
3359,1755,2110,2114,4642,4845,1693,3937,3308,1257,1869,2123, 208,1804,3159,2992,
2531,2549,3361,2418,1350,2347,2800,2568,1291,2036,2680, 72, 842,1990, 212,1233,
1154,1586, 75,2027,3410,4900,1823,1337,2710,2676, 728,2810,1522,3026,4995, 157,
755,1050,4022, 710, 785,1936,2194,2085,1406,2777,2400, 150,1250,4049,1206, 807,
1910, 534, 529,3309,1721,1660, 274, 39,2827, 661,2670,1578, 925,3248,3815,1094,
4278,4901,4252, 41,1150,3747,2572,2227,4501,3658,4902,3813,3357,3617,2884,2258,
887, 538,4187,3199,1294,2439,3042,2329,2343,2497,1255, 107, 543,1527, 521,3478,
3568, 194,5062, 15, 961,3870,1241,1192,2664, 66,5215,3260,2111,1295,1127,2152,
3805,4135, 901,1164,1976, 398,1278, 530,1460, 748, 904,1054,1966,1426, 53,2909,
509, 523,2279,1534, 536,1019, 239,1685, 460,2353, 673,1065,2401,3600,4298,2272,
1272,2363, 284,1753,3679,4064,1695, 81, 815,2677,2757,2731,1386, 859, 500,4221,
2190,2566, 757,1006,2519,2068,1166,1455, 337,2654,3203,1863,1682,1914,3025,1252,
1409,1366, 847, 714,2834,2038,3209, 964,2970,1901, 885,2553,1078,1756,3049, 301,
1572,3326, 688,2130,1996,2429,1805,1648,2930,3421,2750,3652,3088, 262,1158,1254,
389,1641,1812, 526,1719, 923,2073,1073,1902, 468, 489,4625,1140, 857,2375,3070,
3319,2863, 380, 116,1328,2693,1161,2244, 273,1212,1884,2769,3011,1775,1142, 461,
3066,1200,2147,2212, 790, 702,2695,4222,1601,1058, 434,2338,5153,3640, 67,2360,
4099,2502, 618,3472,1329, 416,1132, 830,2782,1807,2653,3211,3510,1662, 192,2124,
296,3979,1739,1611,3684, 23, 118, 324, 446,1239,1225, 293,2520,3814,3795,2535,
3116, 17,1074, 467,2692,2201, 387,2922, 45,1326,3055,1645,3659,2817, 958, 243,
1903,2320,1339,2825,1784,3289, 356, 576, 865,2315,2381,3377,3916,1088,3122,1713,
1655, 935, 628,4689,1034,1327, 441, 800, 720, 894,1979,2183,1528,5289,2702,1071,
4046,3572,2399,1571,3281, 79, 761,1103, 327, 134, 758,1899,1371,1615, 879, 442,
215,2605,2579, 173,2048,2485,1057,2975,3317,1097,2253,3801,4263,1403,1650,2946,
814,4968,3487,1548,2644,1567,1285, 2, 295,2636, 97, 946,3576, 832, 141,4257,
3273, 760,3821,3521,3156,2607, 949,1024,1733,1516,1803,1920,2125,2283,2665,3180,
1501,2064,3560,2171,1592, 803,3518,1416, 732,3897,4258,1363,1362,2458, 119,1427,
602,1525,2608,1605,1639,3175, 694,3064, 10, 465, 76,2000,4846,4208, 444,3781,
1619,3353,2206,1273,3796, 740,2483, 320,1723,2377,3660,2619,1359,1137,1762,1724,
2345,2842,1850,1862, 912, 821,1866, 612,2625,1735,2573,3369,1093, 844, 89, 937,
930,1424,3564,2413,2972,1004,3046,3019,2011, 711,3171,1452,4178, 428, 801,1943,
432, 445,2811, 206,4136,1472, 730, 349, 73, 397,2802,2547, 998,1637,1167, 789,
396,3217, 154,1218, 716,1120,1780,2819,4826,1931,3334,3762,2139,1215,2627, 552,
3664,3628,3232,1405,2383,3111,1356,2652,3577,3320,3101,1703, 640,1045,1370,1246,
4996, 371,1575,2436,1621,2210, 984,4033,1734,2638, 16,4529, 663,2755,3255,1451,
3917,2257,1253,1955,2234,1263,2951, 214,1229, 617, 485, 359,1831,1969, 473,2310,
750,2058, 165, 80,2864,2419, 361,4344,2416,2479,1134, 796,3726,1266,2943, 860,
2715, 938, 390,2734,1313,1384, 248, 202, 877,1064,2854, 522,3907, 279,1602, 297,
2357, 395,3740, 137,2075, 944,4089,2584,1267,3802, 62,1533,2285, 178, 176, 780,
2440, 201,3707, 590, 478,1560,4354,2117,1075, 30, 74,4643,4004,1635,1441,2745,
776,2596, 238,1077,1692,1912,2844, 605, 499,1742,3947, 241,3053, 980,1749, 936,
2640,4511,2582, 515,1543,2162,5322,2892,2993, 890,2148,1924, 665,1827,3581,1032,
968,3163, 339,1044,1896, 270, 583,1791,1720,4367,1194,3488,3669, 43,2523,1657,
163,2167, 290,1209,1622,3378, 550, 634,2508,2510, 695,2634,2384,2512,1476,1414,
220,1469,2341,2138,2852,3183,2900,4939,2865,3502,1211,3680, 854,3227,1299,2976,
3172, 186,2998,1459, 443,1067,3251,1495, 321,1932,3054, 909, 753,1410,1828, 436,
2441,1119,1587,3164,2186,1258, 227, 231,1425,1890,3200,3942, 247, 959, 725,5254,
2741, 577,2158,2079, 929, 120, 174, 838,2813, 591,1115, 417,2024, 40,3240,1536,
1037, 291,4151,2354, 632,1298,2406,2500,3535,1825,1846,3451, 205,1171, 345,4238,
18,1163, 811, 685,2208,1217, 425,1312,1508,1175,4308,2552,1033, 587,1381,3059,
2984,3482, 340,1316,4023,3972, 792,3176, 519, 777,4690, 918, 933,4130,2981,3741,
90,3360,2911,2200,5184,4550, 609,3079,2030, 272,3379,2736, 363,3881,1130,1447,
286, 779, 357,1169,3350,3137,1630,1220,2687,2391, 747,1277,3688,2618,2682,2601,
1156,3196,5290,4034,3102,1689,3596,3128, 874, 219,2783, 798, 508,1843,2461, 269,
1658,1776,1392,1913,2983,3287,2866,2159,2372, 829,4076, 46,4253,2873,1889,1894,
915,1834,1631,2181,2318, 298, 664,2818,3555,2735, 954,3228,3117, 527,3511,2173,
681,2712,3033,2247,2346,3467,1652, 155,2164,3382, 113,1994, 450, 899, 494, 994,
1237,2958,1875,2336,1926,3727, 545,1577,1550, 633,3473, 204,1305,3072,2410,1956,
2471, 707,2134, 841,2195,2196,2663,3843,1026,4940, 990,3252,4997, 368,1092, 437,
3212,3258,1933,1829, 675,2977,2893, 412, 943,3723,4644,3294,3283,2230,2373,5154,
2389,2241,2661,2323,1404,2524, 593, 787, 677,3008,1275,2059, 438,2709,2609,2240,
2269,2246,1446, 36,1568,1373,3892,1574,2301,1456,3962, 693,2276,5216,2035,1143,
2720,1919,1797,1811,2763,4137,2597,1830,1699,1488,1198,2090, 424,1694, 312,3634,
3390,4179,3335,2252,1214, 561,1059,3243,2295,2561, 975,5155,2321,2751,3772, 472,
1537,3282,3398,1047,2077,2348,2878,1323,3340,3076, 690,2906, 51, 369, 170,3541,
1060,2187,2688,3670,2541,1083,1683, 928,3918, 459, 109,4427, 599,3744,4286, 143,
2101,2730,2490, 82,1588,3036,2121, 281,1860, 477,4035,1238,2812,3020,2716,3312,
1530,2188,2055,1317, 843, 636,1808,1173,3495, 649, 181,1002, 147,3641,1159,2414,
3750,2289,2795, 813,3123,2610,1136,4368, 5,3391,4541,2174, 420, 429,1728, 754,
1228,2115,2219, 347,2223,2733, 735,1518,3003,2355,3134,1764,3948,3329,1888,2424,
1001,1234,1972,3321,3363,1672,1021,1450,1584, 226, 765, 655,2526,3404,3244,2302,
3665, 731, 594,2184, 319,1576, 621, 658,2656,4299,2099,3864,1279,2071,2598,2739,
795,3086,3699,3908,1707,2352,2402,1382,3136,2475,1465,4847,3496,3865,1085,3004,
2591,1084, 213,2287,1963,3565,2250, 822, 793,4574,3187,1772,1789,3050, 595,1484,
1959,2770,1080,2650, 456, 422,2996, 940,3322,4328,4345,3092,2742, 965,2784, 739,
4124, 952,1358,2498,2949,2565, 332,2698,2378, 660,2260,2473,4194,3856,2919, 535,
1260,2651,1208,1428,1300,1949,1303,2942, 433,2455,2450,1251,1946, 614,1269, 641,
1306,1810,2737,3078,2912, 564,2365,1419,1415,1497,4460,2367,2185,1379,3005,1307,
3218,2175,1897,3063, 682,1157,4040,4005,1712,1160,1941,1399, 394, 402,2952,1573,
1151,2986,2404, 862, 299,2033,1489,3006, 346, 171,2886,3401,1726,2932, 168,2533,
47,2507,1030,3735,1145,3370,1395,1318,1579,3609,4560,2857,4116,1457,2529,1965,
504,1036,2690,2988,2405, 745,5871, 849,2397,2056,3081, 863,2359,3857,2096, 99,
1397,1769,2300,4428,1643,3455,1978,1757,3718,1440, 35,4879,3742,1296,4228,2280,
160,5063,1599,2013, 166, 520,3479,1646,3345,3012, 490,1937,1545,1264,2182,2505,
1096,1188,1369,1436,2421,1667,2792,2460,1270,2122, 727,3167,2143, 806,1706,1012,
1800,3037, 960,2218,1882, 805, 139,2456,1139,1521, 851,1052,3093,3089, 342,2039,
744,5097,1468,1502,1585,2087, 223, 939, 326,2140,2577, 892,2481,1623,4077, 982,
3708, 135,2131, 87,2503,3114,2326,1106, 876,1616, 547,2997,2831,2093,3441,4530,
4314, 9,3256,4229,4148, 659,1462,1986,1710,2046,2913,2231,4090,4880,5255,3392,
3274,1368,3689,4645,1477, 705,3384,3635,1068,1529,2941,1458,3782,1509, 100,1656,
2548, 718,2339, 408,1590,2780,3548,1838,4117,3719,1345,3530, 717,3442,2778,3220,
2898,1892,4590,3614,3371,2043,1998,1224,3483, 891, 635, 584,2559,3355, 733,1766,
1729,1172,3789,1891,2307, 781,2982,2271,1957,1580,5773,2633,2005,4195,3097,1535,
3213,1189,1934,5693,3262, 586,3118,1324,1598, 517,1564,2217,1868,1893,4445,3728,
2703,3139,1526,1787,1992,3882,2875,1549,1199,1056,2224,1904,2711,5098,4287, 338,
1993,3129,3489,2689,1809,2815,1997, 957,1855,3898,2550,3275,3057,1105,1319, 627,
1505,1911,1883,3526, 698,3629,3456,1833,1431, 746, 77,1261,2017,2296,1977,1885,
125,1334,1600, 525,1798,1109,2222,1470,1945, 559,2236,1186,3443,2476,1929,1411,
2411,3135,1777,3372,2621,1841,1613,3229, 668,1430,1839,2643,2916, 195,1989,2671,
2358,1387, 629,3205,2293,5256,4439, 123,1310, 888,1879,4300,3021,3605,1003,1162,
3192,2910,2010, 140,2395,2859, 55,1082,2012,2901, 662, 419,2081,1438, 680,2774,
4654,3912,1620,1731,1625,5035,4065,2328, 512,1344, 802,5443,2163,2311,2537, 524,
3399, 98,1155,2103,1918,2606,3925,2816,1393,2465,1504,3773,2177,3963,1478,4346,
180,1113,4655,3461,2028,1698, 833,2696,1235,1322,1594,4408,3623,3013,3225,2040,
3022, 541,2881, 607,3632,2029,1665,1219, 639,1385,1686,1099,2803,3231,1938,3188,
2858, 427, 676,2772,1168,2025, 454,3253,2486,3556, 230,1950, 580, 791,1991,1280,
1086,1974,2034, 630, 257,3338,2788,4903,1017, 86,4790, 966,2789,1995,1696,1131,
259,3095,4188,1308, 179,1463,5257, 289,4107,1248, 42,3413,1725,2288, 896,1947,
774,4474,4254, 604,3430,4264, 392,2514,2588, 452, 237,1408,3018, 988,4531,1970,
3034,3310, 540,2370,1562,1288,2990, 502,4765,1147, 4,1853,2708, 207, 294,2814,
4078,2902,2509, 684, 34,3105,3532,2551, 644, 709,2801,2344, 573,1727,3573,3557,
2021,1081,3100,4315,2100,3681, 199,2263,1837,2385, 146,3484,1195,2776,3949, 997,
1939,3973,1008,1091,1202,1962,1847,1149,4209,5444,1076, 493, 117,5400,2521, 972,
1490,2934,1796,4542,2374,1512,2933,2657, 413,2888,1135,2762,2314,2156,1355,2369,
766,2007,2527,2170,3124,2491,2593,2632,4757,2437, 234,3125,3591,1898,1750,1376,
1942,3468,3138, 570,2127,2145,3276,4131, 962, 132,1445,4196, 19, 941,3624,3480,
3366,1973,1374,4461,3431,2629, 283,2415,2275, 808,2887,3620,2112,2563,1353,3610,
955,1089,3103,1053, 96, 88,4097, 823,3808,1583, 399, 292,4091,3313, 421,1128,
642,4006, 903,2539,1877,2082, 596, 29,4066,1790, 722,2157, 130, 995,1569, 769,
1485, 464, 513,2213, 288,1923,1101,2453,4316, 133, 486,2445, 50, 625, 487,2207,
57, 423, 481,2962, 159,3729,1558, 491, 303, 482, 501, 240,2837, 112,3648,2392,
1783, 362, 8,3433,3422, 610,2793,3277,1390,1284,1654, 21,3823, 734, 367, 623,
193, 287, 374,1009,1483, 816, 476, 313,2255,2340,1262,2150,2899,1146,2581, 782,
2116,1659,2018,1880, 255,3586,3314,1110,2867,2137,2564, 986,2767,5185,2006, 650,
158, 926, 762, 881,3157,2717,2362,3587, 306,3690,3245,1542,3077,2427,1691,2478,
2118,2985,3490,2438, 539,2305, 983, 129,1754, 355,4201,2386, 827,2923, 104,1773,
2838,2771, 411,2905,3919, 376, 767, 122,1114, 828,2422,1817,3506, 266,3460,1007,
1609,4998, 945,2612,4429,2274, 726,1247,1964,2914,2199,2070,4002,4108, 657,3323,
1422, 579, 455,2764,4737,1222,2895,1670, 824,1223,1487,2525, 558, 861,3080, 598,
2659,2515,1967, 752,2583,2376,2214,4180, 977, 704,2464,4999,2622,4109,1210,2961,
819,1541, 142,2284, 44, 418, 457,1126,3730,4347,4626,1644,1876,3671,1864, 302,
1063,5694, 624, 723,1984,3745,1314,1676,2488,1610,1449,3558,3569,2166,2098, 409,
1011,2325,3704,2306, 818,1732,1383,1824,1844,3757, 999,2705,3497,1216,1423,2683,
2426,2954,2501,2726,2229,1475,2554,5064,1971,1794,1666,2014,1343, 783, 724, 191,
2434,1354,2220,5065,1763,2752,2472,4152, 131, 175,2885,3434, 92,1466,4920,2616,
3871,3872,3866, 128,1551,1632, 669,1854,3682,4691,4125,1230, 188,2973,3290,1302,
1213, 560,3266, 917, 763,3909,3249,1760, 868,1958, 764,1782,2097, 145,2277,3774,
4462, 64,1491,3062, 971,2132,3606,2442, 221,1226,1617, 218, 323,1185,3207,3147,
571, 619,1473,1005,1744,2281, 449,1887,2396,3685, 275, 375,3816,1743,3844,3731,
845,1983,2350,4210,1377, 773, 967,3499,3052,3743,2725,4007,1697,1022,3943,1464,
3264,2855,2722,1952,1029,2839,2467, 84,4383,2215, 820,1391,2015,2448,3672, 377,
1948,2168, 797,2545,3536,2578,2645, 94,2874,1678, 405,1259,3071, 771, 546,1315,
470,1243,3083, 895,2468, 981, 969,2037, 846,4181, 653,1276,2928, 14,2594, 557,
3007,2474, 156, 902,1338,1740,2574, 537,2518, 973,2282,2216,2433,1928, 138,2903,
1293,2631,1612, 646,3457, 839,2935, 111, 496,2191,2847, 589,3186, 149,3994,2060,
4031,2641,4067,3145,1870, 37,3597,2136,1025,2051,3009,3383,3549,1121,1016,3261,
1301, 251,2446,2599,2153, 872,3246, 637, 334,3705, 831, 884, 921,3065,3140,4092,
2198,1944, 246,2964, 108,2045,1152,1921,2308,1031, 203,3173,4170,1907,3890, 810,
1401,2003,1690, 506, 647,1242,2828,1761,1649,3208,2249,1589,3709,2931,5156,1708,
498, 666,2613, 834,3817,1231, 184,2851,1124, 883,3197,2261,3710,1765,1553,2658,
1178,2639,2351, 93,1193, 942,2538,2141,4402, 235,1821, 870,1591,2192,1709,1871,
3341,1618,4126,2595,2334, 603, 651, 69, 701, 268,2662,3411,2555,1380,1606, 503,
448, 254,2371,2646, 574,1187,2309,1770, 322,2235,1292,1801, 305, 566,1133, 229,
2067,2057, 706, 167, 483,2002,2672,3295,1820,3561,3067, 316, 378,2746,3452,1112,
136,1981, 507,1651,2917,1117, 285,4591, 182,2580,3522,1304, 335,3303,1835,2504,
1795,1792,2248, 674,1018,2106,2449,1857,2292,2845, 976,3047,1781,2600,2727,1389,
1281, 52,3152, 153, 265,3950, 672,3485,3951,4463, 430,1183, 365, 278,2169, 27,
1407,1336,2304, 209,1340,1730,2202,1852,2403,2883, 979,1737,1062, 631,2829,2542,
3876,2592, 825,2086,2226,3048,3625, 352,1417,3724, 542, 991, 431,1351,3938,1861,
2294, 826,1361,2927,3142,3503,1738, 463,2462,2723, 582,1916,1595,2808, 400,3845,
3891,2868,3621,2254, 58,2492,1123, 910,2160,2614,1372,1603,1196,1072,3385,1700,
3267,1980, 696, 480,2430, 920, 799,1570,2920,1951,2041,4047,2540,1321,4223,2469,
3562,2228,1271,2602, 401,2833,3351,2575,5157, 907,2312,1256, 410, 263,3507,1582,
996, 678,1849,2316,1480, 908,3545,2237, 703,2322, 667,1826,2849,1531,2604,2999,
2407,3146,2151,2630,1786,3711, 469,3542, 497,3899,2409, 858, 837,4446,3393,1274,
786, 620,1845,2001,3311, 484, 308,3367,1204,1815,3691,2332,1532,2557,1842,2020,
2724,1927,2333,4440, 567, 22,1673,2728,4475,1987,1858,1144,1597, 101,1832,3601,
12, 974,3783,4391, 951,1412, 1,3720, 453,4608,4041, 528,1041,1027,3230,2628,
1129, 875,1051,3291,1203,2262,1069,2860,2799,2149,2615,3278, 144,1758,3040, 31,
475,1680, 366,2685,3184, 311,1642,4008,2466,5036,1593,1493,2809, 216,1420,1668,
233, 304,2128,3284, 232,1429,1768,1040,2008,3407,2740,2967,2543, 242,2133, 778,
1565,2022,2620, 505,2189,2756,1098,2273, 372,1614, 708, 553,2846,2094,2278, 169,
3626,2835,4161, 228,2674,3165, 809,1454,1309, 466,1705,1095, 900,3423, 880,2667,
3751,5258,2317,3109,2571,4317,2766,1503,1342, 866,4447,1118, 63,2076, 314,1881,
1348,1061, 172, 978,3515,1747, 532, 511,3970, 6, 601, 905,2699,3300,1751, 276,
1467,3725,2668, 65,4239,2544,2779,2556,1604, 578,2451,1802, 992,2331,2624,1320,
3446, 713,1513,1013, 103,2786,2447,1661, 886,1702, 916, 654,3574,2031,1556, 751,
2178,2821,2179,1498,1538,2176, 271, 914,2251,2080,1325, 638,1953,2937,3877,2432,
2754, 95,3265,1716, 260,1227,4083, 775, 106,1357,3254, 426,1607, 555,2480, 772,
1985, 244,2546, 474, 495,1046,2611,1851,2061, 71,2089,1675,2590, 742,3758,2843,
3222,1433, 267,2180,2576,2826,2233,2092,3913,2435, 956,1745,3075, 856,2113,1116,
451, 3,1988,2896,1398, 993,2463,1878,2049,1341,2718,2721,2870,2108, 712,2904,
4363,2753,2324, 277,2872,2349,2649, 384, 987, 435, 691,3000, 922, 164,3939, 652,
1500,1184,4153,2482,3373,2165,4848,2335,3775,3508,3154,2806,2830,1554,2102,1664,
2530,1434,2408, 893,1547,2623,3447,2832,2242,2532,3169,2856,3223,2078, 49,3770,
3469, 462, 318, 656,2259,3250,3069, 679,1629,2758, 344,1138,1104,3120,1836,1283,
3115,2154,1437,4448, 934, 759,1999, 794,2862,1038, 533,2560,1722,2342, 855,2626,
1197,1663,4476,3127, 85,4240,2528, 25,1111,1181,3673, 407,3470,4561,2679,2713,
768,1925,2841,3986,1544,1165, 932, 373,1240,2146,1930,2673, 721,4766, 354,4333,
391,2963, 187, 61,3364,1442,1102, 330,1940,1767, 341,3809,4118, 393,2496,2062,
2211, 105, 331, 300, 439, 913,1332, 626, 379,3304,1557, 328, 689,3952, 309,1555,
931, 317,2517,3027, 325, 569, 686,2107,3084, 60,1042,1333,2794, 264,3177,4014,
1628, 258,3712, 7,4464,1176,1043,1778, 683, 114,1975, 78,1492, 383,1886, 510,
386, 645,5291,2891,2069,3305,4138,3867,2939,2603,2493,1935,1066,1848,3588,1015,
1282,1289,4609, 697,1453,3044,2666,3611,1856,2412, 54, 719,1330, 568,3778,2459,
1748, 788, 492, 551,1191,1000, 488,3394,3763, 282,1799, 348,2016,1523,3155,2390,
1049, 382,2019,1788,1170, 729,2968,3523, 897,3926,2785,2938,3292, 350,2319,3238,
1718,1717,2655,3453,3143,4465, 161,2889,2980,2009,1421, 56,1908,1640,2387,2232,
1917,1874,2477,4921, 148, 83,3438, 592,4245,2882,1822,1055, 741, 115,1496,1624,
381,1638,4592,1020, 516,3214, 458, 947,4575,1432, 211,1514,2926,1865,2142, 189,
852,1221,1400,1486, 882,2299,4036, 351, 28,1122, 700,6479,6480,6481,6482,6483, # last 512
#Everything below is of no interest for detection purpose
5508,6484,3900,3414,3974,4441,4024,3537,4037,5628,5099,3633,6485,3148,6486,3636,
5509,3257,5510,5973,5445,5872,4941,4403,3174,4627,5873,6276,2286,4230,5446,5874,
5122,6102,6103,4162,5447,5123,5323,4849,6277,3980,3851,5066,4246,5774,5067,6278,
3001,2807,5695,3346,5775,5974,5158,5448,6487,5975,5976,5776,3598,6279,5696,4806,
4211,4154,6280,6488,6489,6490,6281,4212,5037,3374,4171,6491,4562,4807,4722,4827,
5977,6104,4532,4079,5159,5324,5160,4404,3858,5359,5875,3975,4288,4610,3486,4512,
5325,3893,5360,6282,6283,5560,2522,4231,5978,5186,5449,2569,3878,6284,5401,3578,
4415,6285,4656,5124,5979,2506,4247,4449,3219,3417,4334,4969,4329,6492,4576,4828,
4172,4416,4829,5402,6286,3927,3852,5361,4369,4830,4477,4867,5876,4173,6493,6105,
4657,6287,6106,5877,5450,6494,4155,4868,5451,3700,5629,4384,6288,6289,5878,3189,
4881,6107,6290,6495,4513,6496,4692,4515,4723,5100,3356,6497,6291,3810,4080,5561,
3570,4430,5980,6498,4355,5697,6499,4724,6108,6109,3764,4050,5038,5879,4093,3226,
6292,5068,5217,4693,3342,5630,3504,4831,4377,4466,4309,5698,4431,5777,6293,5778,
4272,3706,6110,5326,3752,4676,5327,4273,5403,4767,5631,6500,5699,5880,3475,5039,
6294,5562,5125,4348,4301,4482,4068,5126,4593,5700,3380,3462,5981,5563,3824,5404,
4970,5511,3825,4738,6295,6501,5452,4516,6111,5881,5564,6502,6296,5982,6503,4213,
4163,3454,6504,6112,4009,4450,6113,4658,6297,6114,3035,6505,6115,3995,4904,4739,
4563,4942,4110,5040,3661,3928,5362,3674,6506,5292,3612,4791,5565,4149,5983,5328,
5259,5021,4725,4577,4564,4517,4364,6298,5405,4578,5260,4594,4156,4157,5453,3592,
3491,6507,5127,5512,4709,4922,5984,5701,4726,4289,6508,4015,6116,5128,4628,3424,
4241,5779,6299,4905,6509,6510,5454,5702,5780,6300,4365,4923,3971,6511,5161,3270,
3158,5985,4100, 867,5129,5703,6117,5363,3695,3301,5513,4467,6118,6512,5455,4232,
4242,4629,6513,3959,4478,6514,5514,5329,5986,4850,5162,5566,3846,4694,6119,5456,
4869,5781,3779,6301,5704,5987,5515,4710,6302,5882,6120,4392,5364,5705,6515,6121,
6516,6517,3736,5988,5457,5989,4695,2457,5883,4551,5782,6303,6304,6305,5130,4971,
6122,5163,6123,4870,3263,5365,3150,4871,6518,6306,5783,5069,5706,3513,3498,4409,
5330,5632,5366,5458,5459,3991,5990,4502,3324,5991,5784,3696,4518,5633,4119,6519,
4630,5634,4417,5707,4832,5992,3418,6124,5993,5567,4768,5218,6520,4595,3458,5367,
6125,5635,6126,4202,6521,4740,4924,6307,3981,4069,4385,6308,3883,2675,4051,3834,
4302,4483,5568,5994,4972,4101,5368,6309,5164,5884,3922,6127,6522,6523,5261,5460,
5187,4164,5219,3538,5516,4111,3524,5995,6310,6311,5369,3181,3386,2484,5188,3464,
5569,3627,5708,6524,5406,5165,4677,4492,6312,4872,4851,5885,4468,5996,6313,5709,
5710,6128,2470,5886,6314,5293,4882,5785,3325,5461,5101,6129,5711,5786,6525,4906,
6526,6527,4418,5887,5712,4808,2907,3701,5713,5888,6528,3765,5636,5331,6529,6530,
3593,5889,3637,4943,3692,5714,5787,4925,6315,6130,5462,4405,6131,6132,6316,5262,
6531,6532,5715,3859,5716,5070,4696,5102,3929,5788,3987,4792,5997,6533,6534,3920,
4809,5000,5998,6535,2974,5370,6317,5189,5263,5717,3826,6536,3953,5001,4883,3190,
5463,5890,4973,5999,4741,6133,6134,3607,5570,6000,4711,3362,3630,4552,5041,6318,
6001,2950,2953,5637,4646,5371,4944,6002,2044,4120,3429,6319,6537,5103,4833,6538,
6539,4884,4647,3884,6003,6004,4758,3835,5220,5789,4565,5407,6540,6135,5294,4697,
4852,6320,6321,3206,4907,6541,6322,4945,6542,6136,6543,6323,6005,4631,3519,6544,
5891,6545,5464,3784,5221,6546,5571,4659,6547,6324,6137,5190,6548,3853,6549,4016,
4834,3954,6138,5332,3827,4017,3210,3546,4469,5408,5718,3505,4648,5790,5131,5638,
5791,5465,4727,4318,6325,6326,5792,4553,4010,4698,3439,4974,3638,4335,3085,6006,
5104,5042,5166,5892,5572,6327,4356,4519,5222,5573,5333,5793,5043,6550,5639,5071,
4503,6328,6139,6551,6140,3914,3901,5372,6007,5640,4728,4793,3976,3836,4885,6552,
4127,6553,4451,4102,5002,6554,3686,5105,6555,5191,5072,5295,4611,5794,5296,6556,
5893,5264,5894,4975,5466,5265,4699,4976,4370,4056,3492,5044,4886,6557,5795,4432,
4769,4357,5467,3940,4660,4290,6141,4484,4770,4661,3992,6329,4025,4662,5022,4632,
4835,4070,5297,4663,4596,5574,5132,5409,5895,6142,4504,5192,4664,5796,5896,3885,
5575,5797,5023,4810,5798,3732,5223,4712,5298,4084,5334,5468,6143,4052,4053,4336,
4977,4794,6558,5335,4908,5576,5224,4233,5024,4128,5469,5225,4873,6008,5045,4729,
4742,4633,3675,4597,6559,5897,5133,5577,5003,5641,5719,6330,6560,3017,2382,3854,
4406,4811,6331,4393,3964,4946,6561,2420,3722,6562,4926,4378,3247,1736,4442,6332,
5134,6333,5226,3996,2918,5470,4319,4003,4598,4743,4744,4485,3785,3902,5167,5004,
5373,4394,5898,6144,4874,1793,3997,6334,4085,4214,5106,5642,4909,5799,6009,4419,
4189,3330,5899,4165,4420,5299,5720,5227,3347,6145,4081,6335,2876,3930,6146,3293,
3786,3910,3998,5900,5300,5578,2840,6563,5901,5579,6147,3531,5374,6564,6565,5580,
4759,5375,6566,6148,3559,5643,6336,6010,5517,6337,6338,5721,5902,3873,6011,6339,
6567,5518,3868,3649,5722,6568,4771,4947,6569,6149,4812,6570,2853,5471,6340,6341,
5644,4795,6342,6012,5723,6343,5724,6013,4349,6344,3160,6150,5193,4599,4514,4493,
5168,4320,6345,4927,3666,4745,5169,5903,5005,4928,6346,5725,6014,4730,4203,5046,
4948,3395,5170,6015,4150,6016,5726,5519,6347,5047,3550,6151,6348,4197,4310,5904,
6571,5581,2965,6152,4978,3960,4291,5135,6572,5301,5727,4129,4026,5905,4853,5728,
5472,6153,6349,4533,2700,4505,5336,4678,3583,5073,2994,4486,3043,4554,5520,6350,
6017,5800,4487,6351,3931,4103,5376,6352,4011,4321,4311,4190,5136,6018,3988,3233,
4350,5906,5645,4198,6573,5107,3432,4191,3435,5582,6574,4139,5410,6353,5411,3944,
5583,5074,3198,6575,6354,4358,6576,5302,4600,5584,5194,5412,6577,6578,5585,5413,
5303,4248,5414,3879,4433,6579,4479,5025,4854,5415,6355,4760,4772,3683,2978,4700,
3797,4452,3965,3932,3721,4910,5801,6580,5195,3551,5907,3221,3471,3029,6019,3999,
5908,5909,5266,5267,3444,3023,3828,3170,4796,5646,4979,4259,6356,5647,5337,3694,
6357,5648,5338,4520,4322,5802,3031,3759,4071,6020,5586,4836,4386,5048,6581,3571,
4679,4174,4949,6154,4813,3787,3402,3822,3958,3215,3552,5268,4387,3933,4950,4359,
6021,5910,5075,3579,6358,4234,4566,5521,6359,3613,5049,6022,5911,3375,3702,3178,
4911,5339,4521,6582,6583,4395,3087,3811,5377,6023,6360,6155,4027,5171,5649,4421,
4249,2804,6584,2270,6585,4000,4235,3045,6156,5137,5729,4140,4312,3886,6361,4330,
6157,4215,6158,3500,3676,4929,4331,3713,4930,5912,4265,3776,3368,5587,4470,4855,
3038,4980,3631,6159,6160,4132,4680,6161,6362,3923,4379,5588,4255,6586,4121,6587,
6363,4649,6364,3288,4773,4774,6162,6024,6365,3543,6588,4274,3107,3737,5050,5803,
4797,4522,5589,5051,5730,3714,4887,5378,4001,4523,6163,5026,5522,4701,4175,2791,
3760,6589,5473,4224,4133,3847,4814,4815,4775,3259,5416,6590,2738,6164,6025,5304,
3733,5076,5650,4816,5590,6591,6165,6592,3934,5269,6593,3396,5340,6594,5804,3445,
3602,4042,4488,5731,5732,3525,5591,4601,5196,6166,6026,5172,3642,4612,3202,4506,
4798,6366,3818,5108,4303,5138,5139,4776,3332,4304,2915,3415,4434,5077,5109,4856,
2879,5305,4817,6595,5913,3104,3144,3903,4634,5341,3133,5110,5651,5805,6167,4057,
5592,2945,4371,5593,6596,3474,4182,6367,6597,6168,4507,4279,6598,2822,6599,4777,
4713,5594,3829,6169,3887,5417,6170,3653,5474,6368,4216,2971,5228,3790,4579,6369,
5733,6600,6601,4951,4746,4555,6602,5418,5475,6027,3400,4665,5806,6171,4799,6028,
5052,6172,3343,4800,4747,5006,6370,4556,4217,5476,4396,5229,5379,5477,3839,5914,
5652,5807,4714,3068,4635,5808,6173,5342,4192,5078,5419,5523,5734,6174,4557,6175,
4602,6371,6176,6603,5809,6372,5735,4260,3869,5111,5230,6029,5112,6177,3126,4681,
5524,5915,2706,3563,4748,3130,6178,4018,5525,6604,6605,5478,4012,4837,6606,4534,
4193,5810,4857,3615,5479,6030,4082,3697,3539,4086,5270,3662,4508,4931,5916,4912,
5811,5027,3888,6607,4397,3527,3302,3798,2775,2921,2637,3966,4122,4388,4028,4054,
1633,4858,5079,3024,5007,3982,3412,5736,6608,3426,3236,5595,3030,6179,3427,3336,
3279,3110,6373,3874,3039,5080,5917,5140,4489,3119,6374,5812,3405,4494,6031,4666,
4141,6180,4166,6032,5813,4981,6609,5081,4422,4982,4112,3915,5653,3296,3983,6375,
4266,4410,5654,6610,6181,3436,5082,6611,5380,6033,3819,5596,4535,5231,5306,5113,
6612,4952,5918,4275,3113,6613,6376,6182,6183,5814,3073,4731,4838,5008,3831,6614,
4888,3090,3848,4280,5526,5232,3014,5655,5009,5737,5420,5527,6615,5815,5343,5173,
5381,4818,6616,3151,4953,6617,5738,2796,3204,4360,2989,4281,5739,5174,5421,5197,
3132,5141,3849,5142,5528,5083,3799,3904,4839,5480,2880,4495,3448,6377,6184,5271,
5919,3771,3193,6034,6035,5920,5010,6036,5597,6037,6378,6038,3106,5422,6618,5423,
5424,4142,6619,4889,5084,4890,4313,5740,6620,3437,5175,5307,5816,4199,5198,5529,
5817,5199,5656,4913,5028,5344,3850,6185,2955,5272,5011,5818,4567,4580,5029,5921,
3616,5233,6621,6622,6186,4176,6039,6379,6380,3352,5200,5273,2908,5598,5234,3837,
5308,6623,6624,5819,4496,4323,5309,5201,6625,6626,4983,3194,3838,4167,5530,5922,
5274,6381,6382,3860,3861,5599,3333,4292,4509,6383,3553,5481,5820,5531,4778,6187,
3955,3956,4324,4389,4218,3945,4325,3397,2681,5923,4779,5085,4019,5482,4891,5382,
5383,6040,4682,3425,5275,4094,6627,5310,3015,5483,5657,4398,5924,3168,4819,6628,
5925,6629,5532,4932,4613,6041,6630,4636,6384,4780,4204,5658,4423,5821,3989,4683,
5822,6385,4954,6631,5345,6188,5425,5012,5384,3894,6386,4490,4104,6632,5741,5053,
6633,5823,5926,5659,5660,5927,6634,5235,5742,5824,4840,4933,4820,6387,4859,5928,
4955,6388,4143,3584,5825,5346,5013,6635,5661,6389,5014,5484,5743,4337,5176,5662,
6390,2836,6391,3268,6392,6636,6042,5236,6637,4158,6638,5744,5663,4471,5347,3663,
4123,5143,4293,3895,6639,6640,5311,5929,5826,3800,6189,6393,6190,5664,5348,3554,
3594,4749,4603,6641,5385,4801,6043,5827,4183,6642,5312,5426,4761,6394,5665,6191,
4715,2669,6643,6644,5533,3185,5427,5086,5930,5931,5386,6192,6044,6645,4781,4013,
5745,4282,4435,5534,4390,4267,6045,5746,4984,6046,2743,6193,3501,4087,5485,5932,
5428,4184,4095,5747,4061,5054,3058,3862,5933,5600,6646,5144,3618,6395,3131,5055,
5313,6396,4650,4956,3855,6194,3896,5202,4985,4029,4225,6195,6647,5828,5486,5829,
3589,3002,6648,6397,4782,5276,6649,6196,6650,4105,3803,4043,5237,5830,6398,4096,
3643,6399,3528,6651,4453,3315,4637,6652,3984,6197,5535,3182,3339,6653,3096,2660,
6400,6654,3449,5934,4250,4236,6047,6401,5831,6655,5487,3753,4062,5832,6198,6199,
6656,3766,6657,3403,4667,6048,6658,4338,2897,5833,3880,2797,3780,4326,6659,5748,
5015,6660,5387,4351,5601,4411,6661,3654,4424,5935,4339,4072,5277,4568,5536,6402,
6662,5238,6663,5349,5203,6200,5204,6201,5145,4536,5016,5056,4762,5834,4399,4957,
6202,6403,5666,5749,6664,4340,6665,5936,5177,5667,6666,6667,3459,4668,6404,6668,
6669,4543,6203,6670,4276,6405,4480,5537,6671,4614,5205,5668,6672,3348,2193,4763,
6406,6204,5937,5602,4177,5669,3419,6673,4020,6205,4443,4569,5388,3715,3639,6407,
6049,4058,6206,6674,5938,4544,6050,4185,4294,4841,4651,4615,5488,6207,6408,6051,
5178,3241,3509,5835,6208,4958,5836,4341,5489,5278,6209,2823,5538,5350,5206,5429,
6675,4638,4875,4073,3516,4684,4914,4860,5939,5603,5389,6052,5057,3237,5490,3791,
6676,6409,6677,4821,4915,4106,5351,5058,4243,5539,4244,5604,4842,4916,5239,3028,
3716,5837,5114,5605,5390,5940,5430,6210,4332,6678,5540,4732,3667,3840,6053,4305,
3408,5670,5541,6410,2744,5240,5750,6679,3234,5606,6680,5607,5671,3608,4283,4159,
4400,5352,4783,6681,6411,6682,4491,4802,6211,6412,5941,6413,6414,5542,5751,6683,
4669,3734,5942,6684,6415,5943,5059,3328,4670,4144,4268,6685,6686,6687,6688,4372,
3603,6689,5944,5491,4373,3440,6416,5543,4784,4822,5608,3792,4616,5838,5672,3514,
5391,6417,4892,6690,4639,6691,6054,5673,5839,6055,6692,6056,5392,6212,4038,5544,
5674,4497,6057,6693,5840,4284,5675,4021,4545,5609,6418,4454,6419,6213,4113,4472,
5314,3738,5087,5279,4074,5610,4959,4063,3179,4750,6058,6420,6214,3476,4498,4716,
5431,4960,4685,6215,5241,6694,6421,6216,6695,5841,5945,6422,3748,5946,5179,3905,
5752,5545,5947,4374,6217,4455,6423,4412,6218,4803,5353,6696,3832,5280,6219,4327,
4702,6220,6221,6059,4652,5432,6424,3749,4751,6425,5753,4986,5393,4917,5948,5030,
5754,4861,4733,6426,4703,6697,6222,4671,5949,4546,4961,5180,6223,5031,3316,5281,
6698,4862,4295,4934,5207,3644,6427,5842,5950,6428,6429,4570,5843,5282,6430,6224,
5088,3239,6060,6699,5844,5755,6061,6431,2701,5546,6432,5115,5676,4039,3993,3327,
4752,4425,5315,6433,3941,6434,5677,4617,4604,3074,4581,6225,5433,6435,6226,6062,
4823,5756,5116,6227,3717,5678,4717,5845,6436,5679,5846,6063,5847,6064,3977,3354,
6437,3863,5117,6228,5547,5394,4499,4524,6229,4605,6230,4306,4500,6700,5951,6065,
3693,5952,5089,4366,4918,6701,6231,5548,6232,6702,6438,4704,5434,6703,6704,5953,
4168,6705,5680,3420,6706,5242,4407,6066,3812,5757,5090,5954,4672,4525,3481,5681,
4618,5395,5354,5316,5955,6439,4962,6707,4526,6440,3465,4673,6067,6441,5682,6708,
5435,5492,5758,5683,4619,4571,4674,4804,4893,4686,5493,4753,6233,6068,4269,6442,
6234,5032,4705,5146,5243,5208,5848,6235,6443,4963,5033,4640,4226,6236,5849,3387,
6444,6445,4436,4437,5850,4843,5494,4785,4894,6709,4361,6710,5091,5956,3331,6237,
4987,5549,6069,6711,4342,3517,4473,5317,6070,6712,6071,4706,6446,5017,5355,6713,
6714,4988,5436,6447,4734,5759,6715,4735,4547,4456,4754,6448,5851,6449,6450,3547,
5852,5318,6451,6452,5092,4205,6716,6238,4620,4219,5611,6239,6072,4481,5760,5957,
5958,4059,6240,6453,4227,4537,6241,5761,4030,4186,5244,5209,3761,4457,4876,3337,
5495,5181,6242,5959,5319,5612,5684,5853,3493,5854,6073,4169,5613,5147,4895,6074,
5210,6717,5182,6718,3830,6243,2798,3841,6075,6244,5855,5614,3604,4606,5496,5685,
5118,5356,6719,6454,5960,5357,5961,6720,4145,3935,4621,5119,5962,4261,6721,6455,
4786,5963,4375,4582,6245,6246,6247,6076,5437,4877,5856,3376,4380,6248,4160,6722,
5148,6456,5211,6457,6723,4718,6458,6724,6249,5358,4044,3297,6459,6250,5857,5615,
5497,5245,6460,5498,6725,6251,6252,5550,3793,5499,2959,5396,6461,6462,4572,5093,
5500,5964,3806,4146,6463,4426,5762,5858,6077,6253,4755,3967,4220,5965,6254,4989,
5501,6464,4352,6726,6078,4764,2290,5246,3906,5438,5283,3767,4964,2861,5763,5094,
6255,6256,4622,5616,5859,5860,4707,6727,4285,4708,4824,5617,6257,5551,4787,5212,
4965,4935,4687,6465,6728,6466,5686,6079,3494,4413,2995,5247,5966,5618,6729,5967,
5764,5765,5687,5502,6730,6731,6080,5397,6467,4990,6258,6732,4538,5060,5619,6733,
4719,5688,5439,5018,5149,5284,5503,6734,6081,4607,6259,5120,3645,5861,4583,6260,
4584,4675,5620,4098,5440,6261,4863,2379,3306,4585,5552,5689,4586,5285,6735,4864,
6736,5286,6082,6737,4623,3010,4788,4381,4558,5621,4587,4896,3698,3161,5248,4353,
4045,6262,3754,5183,4588,6738,6263,6739,6740,5622,3936,6741,6468,6742,6264,5095,
6469,4991,5968,6743,4992,6744,6083,4897,6745,4256,5766,4307,3108,3968,4444,5287,
3889,4343,6084,4510,6085,4559,6086,4898,5969,6746,5623,5061,4919,5249,5250,5504,
5441,6265,5320,4878,3242,5862,5251,3428,6087,6747,4237,5624,5442,6266,5553,4539,
6748,2585,3533,5398,4262,6088,5150,4736,4438,6089,6267,5505,4966,6749,6268,6750,
6269,5288,5554,3650,6090,6091,4624,6092,5690,6751,5863,4270,5691,4277,5555,5864,
6752,5692,4720,4865,6470,5151,4688,4825,6753,3094,6754,6471,3235,4653,6755,5213,
5399,6756,3201,4589,5865,4967,6472,5866,6473,5019,3016,6757,5321,4756,3957,4573,
6093,4993,5767,4721,6474,6758,5625,6759,4458,6475,6270,6760,5556,4994,5214,5252,
6271,3875,5768,6094,5034,5506,4376,5769,6761,2120,6476,5253,5770,6762,5771,5970,
3990,5971,5557,5558,5772,6477,6095,2787,4641,5972,5121,6096,6097,6272,6763,3703,
5867,5507,6273,4206,6274,4789,6098,6764,3619,3646,3833,3804,2394,3788,4936,3978,
4866,4899,6099,6100,5559,6478,6765,3599,5868,6101,5869,5870,6275,6766,4527,6767)
# flake8: noqa
| gpl-2.0 |
zerobatu/edx-platform | common/djangoapps/student/tests/test_microsite.py | 47 | 3772 | """
Test for User Creation from Micro-Sites
"""
from django.test import TestCase
from student.models import UserSignupSource
import mock
import json
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
FAKE_MICROSITE = {
"SITE_NAME": "openedx.localhost",
"university": "fakeuniversity",
"course_org_filter": "fakeorg",
"REGISTRATION_EXTRA_FIELDS": {
"address1": "required",
"city": "required",
"state": "required",
"country": "required",
"company": "required",
"title": "required"
},
"extended_profile_fields": [
"address1", "state", "company", "title"
]
}
def fake_site_name(name, default=None):
"""
create a fake microsite site name
"""
if name == 'SITE_NAME':
return 'openedx.localhost'
else:
return default
def fake_microsite_get_value(name, default=None):
"""
create a fake microsite site name
"""
return FAKE_MICROSITE.get(name, default)
class TestMicrosite(TestCase):
"""Test for Account Creation from a white labeled Micro-Sites"""
def setUp(self):
super(TestMicrosite, self).setUp()
self.username = "test_user"
self.url = reverse("create_account")
self.params = {
"username": self.username,
"email": "test@example.org",
"password": "testpass",
"name": "Test User",
"honor_code": "true",
"terms_of_service": "true",
}
self.extended_params = dict(self.params.items() + {
"address1": "foo",
"city": "foo",
"state": "foo",
"country": "foo",
"company": "foo",
"title": "foo"
}.items())
@mock.patch("microsite_configuration.microsite.get_value", fake_site_name)
def test_user_signup_source(self):
"""
test to create a user form the microsite and see that it record has been
saved in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertGreater(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
def test_user_signup_from_non_micro_site(self):
"""
test to create a user form the non-microsite. The record should not be saved
in the UserSignupSource Table
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 200)
self.assertEqual(len(UserSignupSource.objects.filter(site='openedx.localhost')), 0)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_missing_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.params)
self.assertEqual(response.status_code, 400)
@mock.patch("microsite_configuration.microsite.get_value", fake_microsite_get_value)
def test_user_signup_including_enhanced_profile(self):
"""
test to create a user form the microsite but don't provide any of the microsite specific
profile information
"""
response = self.client.post(self.url, self.extended_params)
self.assertEqual(response.status_code, 200)
user = User.objects.get(username=self.username)
meta = json.loads(user.profile.meta)
self.assertEqual(meta['address1'], 'foo')
self.assertEqual(meta['state'], 'foo')
self.assertEqual(meta['company'], 'foo')
self.assertEqual(meta['title'], 'foo')
| agpl-3.0 |
Julian/Minion | minion/tests/test_integration.py | 1 | 1668 | from unittest import TestCase
import json
from hyperlink import URL
from minion.core import Application
from minion.http import Headers
from minion.renderers import JSON
from minion.request import Request, Response
from minion.routing import Router, SimpleMapper
class TestMinion(TestCase):
def test_it_routes_simple_views(self):
minion = Application(router=Router(mapper=SimpleMapper()))
@minion.route(b"/show")
def show(request):
return Response(b"Hello World!")
url = URL(path=[u"show"])
response = minion.serve(Request(url=url), path=u"/show")
self.assertEqual(response, Response(b"Hello World!"))
class RequestIntegrationTestMixin(object):
def setUp(self):
self.minion = Application()
@self.minion.route(b"/respond", renderer=JSON())
def respond(request):
return {"url": request.url.to_text().encode("utf-8")}
def get_request(self, *args, **kwargs):
kwargs.setdefault("headers", Headers())
response = json.loads(self.get(*args, **kwargs))
response["url"] = URL.from_text(response["url"].decode("utf-8"))
return Request(**response)
def test_it_parses_the_url(self):
request = self.get_request(
b"/respond?foo=bar#baz",
headers=Headers([(b"Host", [b"example.com"])]),
)
self.assertEqual(
request.url, URL(
scheme=u"http",
host=u"example.com",
path=[u"respond"],
query=[(u"foo", u"bar")],
fragment=u"", # Fragments should be ignored by servers.
),
)
| mit |
chares-zhang/you-get | src/you_get/extractors/youtube.py | 14 | 12808 | #!/usr/bin/env python
from ..common import *
from ..extractor import VideoExtractor
class YouTube(VideoExtractor):
name = "YouTube"
# YouTube media encoding options, in descending quality order.
# http://en.wikipedia.org/wiki/YouTube#Quality_and_codecs. Retrieved July 17, 2014.
stream_types = [
{'itag': '38', 'container': 'MP4', 'video_resolution': '3072p', 'video_encoding': 'H.264', 'video_profile': 'High', 'video_bitrate': '3.5-5', 'audio_encoding': 'AAC', 'audio_bitrate': '192'},
#{'itag': '85', 'container': 'MP4', 'video_resolution': '1080p', 'video_encoding': 'H.264', 'video_profile': '3D', 'video_bitrate': '3-4', 'audio_encoding': 'AAC', 'audio_bitrate': '192'},
{'itag': '46', 'container': 'WebM', 'video_resolution': '1080p', 'video_encoding': 'VP8', 'video_profile': '', 'video_bitrate': '', 'audio_encoding': 'Vorbis', 'audio_bitrate': '192'},
{'itag': '37', 'container': 'MP4', 'video_resolution': '1080p', 'video_encoding': 'H.264', 'video_profile': 'High', 'video_bitrate': '3-4.3', 'audio_encoding': 'AAC', 'audio_bitrate': '192'},
#{'itag': '102', 'container': 'WebM', 'video_resolution': '720p', 'video_encoding': 'VP8', 'video_profile': '3D', 'video_bitrate': '', 'audio_encoding': 'Vorbis', 'audio_bitrate': '192'},
{'itag': '45', 'container': 'WebM', 'video_resolution': '720p', 'video_encoding': 'VP8', 'video_profile': '', 'video_bitrate': '2', 'audio_encoding': 'Vorbis', 'audio_bitrate': '192'},
#{'itag': '84', 'container': 'MP4', 'video_resolution': '720p', 'video_encoding': 'H.264', 'video_profile': '3D', 'video_bitrate': '2-3', 'audio_encoding': 'AAC', 'audio_bitrate': '192'},
{'itag': '22', 'container': 'MP4', 'video_resolution': '720p', 'video_encoding': 'H.264', 'video_profile': 'High', 'video_bitrate': '2-3', 'audio_encoding': 'AAC', 'audio_bitrate': '192'},
{'itag': '120', 'container': 'FLV', 'video_resolution': '720p', 'video_encoding': 'H.264', 'video_profile': 'Main@L3.1', 'video_bitrate': '2', 'audio_encoding': 'AAC', 'audio_bitrate': '128'}, # Live streaming only
{'itag': '44', 'container': 'WebM', 'video_resolution': '480p', 'video_encoding': 'VP8', 'video_profile': '', 'video_bitrate': '1', 'audio_encoding': 'Vorbis', 'audio_bitrate': '128'},
{'itag': '35', 'container': 'FLV', 'video_resolution': '480p', 'video_encoding': 'H.264', 'video_profile': 'Main', 'video_bitrate': '0.8-1', 'audio_encoding': 'AAC', 'audio_bitrate': '128'},
#{'itag': '101', 'container': 'WebM', 'video_resolution': '360p', 'video_encoding': 'VP8', 'video_profile': '3D', 'video_bitrate': '', 'audio_encoding': 'Vorbis', 'audio_bitrate': '192'},
#{'itag': '100', 'container': 'WebM', 'video_resolution': '360p', 'video_encoding': 'VP8', 'video_profile': '3D', 'video_bitrate': '', 'audio_encoding': 'Vorbis', 'audio_bitrate': '128'},
{'itag': '43', 'container': 'WebM', 'video_resolution': '360p', 'video_encoding': 'VP8', 'video_profile': '', 'video_bitrate': '0.5', 'audio_encoding': 'Vorbis', 'audio_bitrate': '128'},
{'itag': '34', 'container': 'FLV', 'video_resolution': '360p', 'video_encoding': 'H.264', 'video_profile': 'Main', 'video_bitrate': '0.5', 'audio_encoding': 'AAC', 'audio_bitrate': '128'},
#{'itag': '82', 'container': 'MP4', 'video_resolution': '360p', 'video_encoding': 'H.264', 'video_profile': '3D', 'video_bitrate': '0.5', 'audio_encoding': 'AAC', 'audio_bitrate': '96'},
{'itag': '18', 'container': 'MP4', 'video_resolution': '270p/360p', 'video_encoding': 'H.264', 'video_profile': 'Baseline', 'video_bitrate': '0.5', 'audio_encoding': 'AAC', 'audio_bitrate': '96'},
{'itag': '6', 'container': 'FLV', 'video_resolution': '270p', 'video_encoding': 'Sorenson H.263', 'video_profile': '', 'video_bitrate': '0.8', 'audio_encoding': 'MP3', 'audio_bitrate': '64'},
#{'itag': '83', 'container': 'MP4', 'video_resolution': '240p', 'video_encoding': 'H.264', 'video_profile': '3D', 'video_bitrate': '0.5', 'audio_encoding': 'AAC', 'audio_bitrate': '96'},
{'itag': '13', 'container': '3GP', 'video_resolution': '', 'video_encoding': 'MPEG-4 Visual', 'video_profile': '', 'video_bitrate': '0.5', 'audio_encoding': 'AAC', 'audio_bitrate': ''},
{'itag': '5', 'container': 'FLV', 'video_resolution': '240p', 'video_encoding': 'Sorenson H.263', 'video_profile': '', 'video_bitrate': '0.25', 'audio_encoding': 'MP3', 'audio_bitrate': '64'},
{'itag': '36', 'container': '3GP', 'video_resolution': '240p', 'video_encoding': 'MPEG-4 Visual', 'video_profile': 'Simple', 'video_bitrate': '0.175', 'audio_encoding': 'AAC', 'audio_bitrate': '36'},
{'itag': '17', 'container': '3GP', 'video_resolution': '144p', 'video_encoding': 'MPEG-4 Visual', 'video_profile': 'Simple', 'video_bitrate': '0.05', 'audio_encoding': 'AAC', 'audio_bitrate': '24'},
]
def decipher(js, s):
def tr_js(code):
code = re.sub(r'function', r'def', code)
code = re.sub(r'\$', '_dollar', code)
code = re.sub(r'\{', r':\n\t', code)
code = re.sub(r'\}', r'\n', code)
code = re.sub(r'var\s+', r'', code)
code = re.sub(r'(\w+).join\(""\)', r'"".join(\1)', code)
code = re.sub(r'(\w+).length', r'len(\1)', code)
code = re.sub(r'(\w+).slice\((\w+)\)', r'\1[\2:]', code)
code = re.sub(r'(\w+).splice\((\w+),(\w+)\)', r'del \1[\2:\2+\3]', code)
code = re.sub(r'(\w+).split\(""\)', r'list(\1)', code)
return code
f1 = match1(js, r'\w+\.sig\|\|([$\w]+)\(\w+\.\w+\)')
f1def = match1(js, r'(function %s\(\w+\)\{[^\{]+\})' % re.escape(f1))
f1def = re.sub(r'([$\w]+\.)([$\w]+\(\w+,\d+\))', r'\2', f1def)
code = tr_js(f1def)
f2s = set(re.findall(r'([$\w]+)\(\w+,\d+\)', f1def))
for f2 in f2s:
f2e = re.escape(f2)
f2def = re.search(r'[^$\w]%s:function\((\w+,\w+)\)(\{[^\{\}]+\})' % f2e, js)
if f2def:
f2def = 'function {}({}){}'.format(f2e, f2def.group(1), f2def.group(2))
else:
f2def = re.search(r'[^$\w]%s:function\((\w+)\)(\{[^\{\}]+\})' % f2e, js)
f2def = 'function {}({},b){}'.format(f2e, f2def.group(1), f2def.group(2))
f2 = re.sub(r'\$', '_dollar', f2)
code = code + 'global %s\n' % f2 + tr_js(f2def)
code = code + 'sig=%s(s)' % re.sub(r'\$', '_dollar', f1)
exec(code, globals(), locals())
return locals()['sig']
def get_url_from_vid(vid):
return 'https://youtu.be/{}'.format(vid)
def get_vid_from_url(url):
"""Extracts video ID from URL.
"""
return match1(url, r'youtu\.be/([^/]+)') or \
match1(url, r'youtube\.com/embed/([^/?]+)') or \
match1(url, r'youtube\.com/v/([^/?]+)') or \
parse_query_param(url, 'v') or \
parse_query_param(parse_query_param(url, 'u'), 'v')
def get_playlist_id_from_url(url):
"""Extracts playlist ID from URL.
"""
return parse_query_param(url, 'list') or \
parse_query_param(url, 'p')
def download_playlist_by_url(self, url, **kwargs):
self.url = url
playlist_id = self.__class__.get_playlist_id_from_url(self.url)
if playlist_id is None:
log.wtf('[Failed] Unsupported URL pattern.')
video_page = get_content('https://www.youtube.com/playlist?list=%s' % playlist_id)
from html.parser import HTMLParser
videos = sorted([HTMLParser().unescape(video)
for video in re.findall(r'<a href="(/watch\?[^"]+)"', video_page)
if parse_query_param(video, 'index')],
key=lambda video: parse_query_param(video, 'index'))
self.title = re.search(r'<meta name="title" content="([^"]+)"', video_page).group(1)
self.p_playlist()
for video in videos:
vid = parse_query_param(video, 'v')
index = parse_query_param(video, 'index')
self.__class__().download_by_url(self.__class__.get_url_from_vid(vid), index=index, **kwargs)
def prepare(self, **kwargs):
assert self.url or self.vid
if not self.vid and self.url:
self.vid = self.__class__.get_vid_from_url(self.url)
if self.vid is None:
self.download_playlist_by_url(self.url, **kwargs)
exit(0)
video_info = parse.parse_qs(get_content('https://www.youtube.com/get_video_info?video_id={}'.format(self.vid)))
if 'status' not in video_info:
log.wtf('[Failed] Unknown status.')
elif video_info['status'] == ['ok']:
if 'use_cipher_signature' not in video_info or video_info['use_cipher_signature'] == ['False']:
self.title = parse.unquote_plus(video_info['title'][0])
stream_list = video_info['url_encoded_fmt_stream_map'][0].split(',')
else:
# Parse video page instead
video_page = get_content('https://www.youtube.com/watch?v=%s' % self.vid)
ytplayer_config = json.loads(re.search('ytplayer.config\s*=\s*([^\n]+?});', video_page).group(1))
self.title = ytplayer_config['args']['title']
self.html5player = 'https:' + ytplayer_config['assets']['js']
stream_list = ytplayer_config['args']['url_encoded_fmt_stream_map'].split(',')
elif video_info['status'] == ['fail']:
if video_info['errorcode'] == ['150']:
video_page = get_content('https://www.youtube.com/watch?v=%s' % self.vid)
ytplayer_config = json.loads(re.search('ytplayer.config\s*=\s*([^\n]+});ytplayer', video_page).group(1))
if 'title' in ytplayer_config['args']:
# 150 Restricted from playback on certain sites
# Parse video page instead
self.title = ytplayer_config['args']['title']
self.html5player = 'https:' + ytplayer_config['assets']['js']
stream_list = ytplayer_config['args']['url_encoded_fmt_stream_map'].split(',')
else:
log.wtf('[Error] The uploader has not made this video available in your country.')
#self.title = re.search('<meta name="title" content="([^"]+)"', video_page).group(1)
#stream_list = []
elif video_info['errorcode'] == ['100']:
log.wtf('[Failed] This video does not exist.', exit_code=int(video_info['errorcode'][0]))
else:
log.wtf('[Failed] %s' % video_info['reason'][0], exit_code=int(video_info['errorcode'][0]))
else:
log.wtf('[Failed] Invalid status.')
for stream in stream_list:
metadata = parse.parse_qs(stream)
stream_itag = metadata['itag'][0]
self.streams[stream_itag] = {
'itag': metadata['itag'][0],
'url': metadata['url'][0],
'sig': metadata['sig'][0] if 'sig' in metadata else None,
's': metadata['s'][0] if 's' in metadata else None,
'quality': metadata['quality'][0],
'type': metadata['type'][0],
'mime': metadata['type'][0].split(';')[0],
'container': mime_to_container(metadata['type'][0].split(';')[0]),
}
def extract(self, **kwargs):
if not self.streams_sorted:
# No stream is available
return
if 'stream_id' in kwargs and kwargs['stream_id']:
# Extract the stream
stream_id = kwargs['stream_id']
if stream_id not in self.streams:
log.e('[Error] Invalid video format.')
log.e('Run \'-i\' command with no specific video format to view all available formats.')
exit(2)
else:
# Extract stream with the best quality
stream_id = self.streams_sorted[0]['itag']
src = self.streams[stream_id]['url']
if self.streams[stream_id]['sig'] is not None:
sig = self.streams[stream_id]['sig']
src += '&signature={}'.format(sig)
elif self.streams[stream_id]['s'] is not None:
s = self.streams[stream_id]['s']
js = get_content(self.html5player)
sig = self.__class__.decipher(js, s)
src += '&signature={}'.format(sig)
self.streams[stream_id]['src'] = [src]
self.streams[stream_id]['size'] = urls_size(self.streams[stream_id]['src'])
site = YouTube()
download = site.download_by_url
download_playlist = site.download_playlist_by_url
| mit |
Omegaphora/external_chromium_org | third_party/tlslite/scripts/tlsdb.py | 110 | 3763 | #!/usr/bin/env python
# Authors:
# Trevor Perrin
# Martin von Loewis - python 3 port
#
# See the LICENSE file for legal information regarding use of this file.
from __future__ import print_function
import sys
import os
import socket
import math
if __name__ != "__main__":
raise "This must be run as a command, not used as a module!"
from tlslite import *
from tlslite import __version__
if len(sys.argv) == 1 or (len(sys.argv)==2 and sys.argv[1].lower().endswith("help")):
print("")
print("Version: %s" % __version__)
print("")
print("RNG: %s" % prngName)
print("")
print("Modules:")
if m2cryptoLoaded:
print(" M2Crypto : Loaded")
else:
print(" M2Crypto : Not Loaded")
if pycryptoLoaded:
print(" pycrypto : Loaded")
else:
print(" pycrypto : Not Loaded")
if gmpyLoaded:
print(" GMPY : Loaded")
else:
print(" GMPY : Not Loaded")
print("")
print("Commands:")
print("")
print(" createsrp <db>")
print("")
print(" add <db> <user> <pass> [<bits>]")
print(" del <db> <user>")
print(" check <db> <user> [<pass>]")
print(" list <db>")
sys.exit()
cmd = sys.argv[1].lower()
class Args:
def __init__(self, argv):
self.argv = argv
def get(self, index):
if len(self.argv)<=index:
raise SyntaxError("Not enough arguments")
return self.argv[index]
def getLast(self, index):
if len(self.argv)>index+1:
raise SyntaxError("Too many arguments")
return self.get(index)
args = Args(sys.argv)
def reformatDocString(s):
lines = s.splitlines()
newLines = []
for line in lines:
newLines.append(" " + line.strip())
return "\n".join(newLines)
try:
if cmd == "help":
command = args.getLast(2).lower()
if command == "valid":
print("")
else:
print("Bad command: '%s'" % command)
elif cmd == "createsrp":
dbName = args.get(2)
db = VerifierDB(dbName)
db.create()
elif cmd == "add":
dbName = args.get(2)
username = args.get(3)
password = args.get(4)
db = VerifierDB(dbName)
db.open()
if username in db:
print("User already in database!")
sys.exit()
bits = int(args.getLast(5))
N, g, salt, verifier = VerifierDB.makeVerifier(username, password, bits)
db[username] = N, g, salt, verifier
elif cmd == "del":
dbName = args.get(2)
username = args.getLast(3)
db = VerifierDB(dbName)
db.open()
del(db[username])
elif cmd == "check":
dbName = args.get(2)
username = args.get(3)
if len(sys.argv)>=5:
password = args.getLast(4)
else:
password = None
db = VerifierDB(dbName)
db.open()
try:
db[username]
print("Username exists")
if password:
if db.check(username, password):
print("Password is correct")
else:
print("Password is wrong")
except KeyError:
print("Username does not exist")
sys.exit()
elif cmd == "list":
dbName = args.get(2)
db = VerifierDB(dbName)
db.open()
print("Verifier Database")
def numBits(n):
if n==0:
return 0
return int(math.floor(math.log(n, 2))+1)
for username in db.keys():
N, g, s, v = db[username]
print(numBits(N), username)
else:
print("Bad command: '%s'" % cmd)
except:
raise
| bsd-3-clause |
oscarsaleta/P4 | mpir/build.vc/mpir_config.py | 1 | 19187 | '''
Set up Visual Sudio to build a specified MPIR configuration
Copyright (C) 2011, 2012, 2013, 2014 Brian Gladman
'''
from __future__ import print_function
from operator import itemgetter
from os import listdir, walk, unlink, makedirs, sep
from os.path import split, splitext, isdir, relpath, join, exists
from os.path import join, abspath, dirname, normpath
from copy import deepcopy
from sys import argv, exit, path
from filecmp import cmp
from shutil import copy
from re import compile, search
from collections import defaultdict
from time import sleep
from _msvc_filters import gen_filter
from _msvc_project import Project_Type, gen_vcxproj
from _msvc_solution import msvc_solution
try:
input = raw_input
except NameError:
pass
vs_version = 15
if len(argv) > 1:
vs_version = int(argv[1])
solution_name = 'mpir.sln'
script_dir = dirname(__file__)
build_dir_name = 'build.vc{0:d}'.format(vs_version)
path.append(abspath(join(script_dir, '../' + build_dir_name)))
from version_info import vs_info
# for script debugging
debug = False
# either add a prebuild step to the project files or do it here
add_prebuild = True
# output a build project for the C++ static library
add_cpp_lib = True
# The path to the mpir root directory
cf_dir = './'
mpir_root_dir = '../'
build_dir = join(mpir_root_dir, build_dir_name)
solution_dir = join(mpir_root_dir, build_dir_name)
cfg_dir = join(solution_dir, 'cdata')
# paths that might include source files(*.c, *.h, *.asm)
c_directories = ('', 'build.vc', 'fft', 'mpf', 'mpq', 'mpz', 'printf', 'scanf')
# files that are to be excluded from the build
exclude_file_list = ('config.guess', 'cfg', 'getopt', 'getrusage',
'gettimeofday', 'cpuid', 'obsolete', 'win_timing',
'gmp-mparam', 'tal-debug', 'tal-notreent', 'new_fft',
'new_fft_with_flint', 'compat', 'udiv_w_sdiv')
# copy from file ipath to file opath but avoid copying if
# opath exists and is the same as ipath (this is to avoid
# triggering an unecessary rebuild).
def write_f(ipath, opath):
if exists(ipath) and not isdir(ipath):
if exists(opath):
if isdir(opath) or cmp(ipath, opath):
return
copy(ipath, opath)
# append a file (ipath) to an existing file (opath)
def append_f(ipath, opath):
try:
with open(opath, 'ab') as out_file:
try:
with open(ipath, 'rb') as in_file:
buf = in_file.read(8192)
while buf:
out_file.write(buf)
buf = in_file.read(8192)
except IOError:
print('error reading {0:s} for input'.format(f))
return
except IOError:
print('error opening {0:s} for output'.format(opath))
# copy files in a list from in_dir to out_dir
def copy_files(file_list, in_dir, out_dir):
try:
makedirs(out_dir)
except IOError:
pass
for f in file_list:
copy(join(in_dir, f), out_dir)
# Recursively search a given directory tree to find header,
# C and assembler code files that either replace or augment
# the generic C source files in the input list 'src_list'.
# As the directory tree is searched, files in each directory
# become the source code files for the current directory and
# the default source code files for its child directories.
#
# Lists of default header, C and assembler source code files
# are maintained as the tree is traversed and if a file in
# the current directory matches the name of a file in the
# default file list (name matches ignore file extensions),
# the name in the list is removed and is replaced by the new
# file found. On return each directory in the tree had an
# entry in the returned dictionary that contains:
#
# 1. The list of header files
#
# 2. The list of C source code files for the directory
#
# 3. The list of assembler code files that replace C files
#
# 4. The list of assembler files that are not C replacements
#
def find_asm(path, cf_list):
d = dict()
for root, dirs, files in walk(path):
if '.svn' in dirs: # ignore SVN directories
dirs.remove('.svn')
if 'fat' in dirs: # ignore fat directory
dirs.remove('fat')
relp = relpath(root, path) # path from asm root
relr = relpath(root, mpir_root_dir) # path from MPIR root
if relp == '.': # set C files as default
relp = h = t = ''
d[''] = [[], deepcopy(cf_list), [], [], relr]
else:
h, _ = split(relp) # h = parent, t = this directory
# copy defaults from this directories parent
d[relp] = [deepcopy(d[h][0]), deepcopy(d[h][1]),
deepcopy(d[h][2]), deepcopy(d[h][3]), relr]
for f in files: # for the files in this directory
n, x = splitext(f)
if x == '.h': # if it is a header file, remove
for cf in reversed(d[relp][0]): # any matching default
if cf[0] == n:
d[relp][0].remove(cf)
d[relp][0] += [(n, x, relr)] # and add the local header file
if x == '.c': # if it is a C file, remove
for cf in reversed(d[relp][1]): # any matching default
if cf[0] == n:
d[relp][1].remove(cf)
d[relp][1] += [(n, x, relr)] # and add the local C file
if x == '.asm': # if it is an assembler file
match = False
for cf in reversed(d[relp][1]): # remove any matching C file
if cf[0] == n:
d[relp][1].remove(cf)
match = True
break
for cf in reversed(d[relp][2]): # and remove any matching
if cf[0] == n: # assembler file
d[relp][2].remove(cf)
match = True
break
if match: # if a match was found, put the
d[relp][2] += [(n, x, relr)] # file in the replacement list
else: # otherwise look for it in the
for cf in reversed(d[relp][3]): # additional files list
if cf[0] == n:
d[relp][3].remove(cf)
break
d[relp][3] += [(n, x, relr)]
for k in d: # additional assembler list
for i in range(4):
d[k][i].sort(key=itemgetter(0)) # sort the four file lists
return d
# create 4 lists of c, h, cc (or cpp) and asm (or as) files in a directory
def find_src(dir_list):
# list number from file extension
di = {'.h': 0, '.c': 1, '.cc': 2, '.cpp': 2, '.asm': 3, '.as': 3}
list = [[], [], [], []]
for d in dir_list:
for f in listdir(join(mpir_root_dir, d)):
if f == '.svn':
continue # ignore SVN directories
if not isdir(f):
n, x = splitext(f) # split into name + extension
if x in di and not n in exclude_file_list:
list[di[x]] += [(n, x, d)] # if of the right type and is
for x in list: # not in the exclude list
x.sort(key=itemgetter(2, 0, 1)) # add it to appropriate list
return list
# scan the files in the input set and find the symbols
# defined in the files
fr_sym = compile(r'LEAF_PROC\s+(\w+)')
lf_sym = compile(r'FRAME_PROC\s+(\w+)')
wf_sym = compile(r'WIN64_GCC_PROC\s+(\w+)')
g3_sym = compile(r'global\s+___g(\w+)')
g2_sym = compile(r'global\s+__g(\w+)')
def get_symbols(setf, sym_dir):
for f in setf:
fn = join(mpir_root_dir, f[2], f[0] + f[1])
with open(fn, 'r', encoding='utf8', errors='replace') as inf:
lines = inf.readlines()
for l in lines:
m = fr_sym.search(l)
if m:
sym_dir[f] |= set((m.groups(1)[0],))
m = lf_sym.search(l)
if m:
sym_dir[f] |= set((m.groups(1)[0],))
m = wf_sym.search(l)
if m:
sym_dir[f] |= set((m.groups(1)[0],))
m = g3_sym.search(l)
if m:
sym_dir[f] |= set((m.groups(1)[0],))
else:
m = g2_sym.search(l)
if m:
sym_dir[f] |= set((m.groups(1)[0],))
def file_symbols(cf):
sym_dir = defaultdict(set)
for c in cf:
if c == 'fat':
continue
setf = set()
for f in cf[c][2] + cf[c][3]:
setf |= set((f,))
get_symbols(setf, sym_dir)
return sym_dir
def gen_have_list(c, sym_dir, out_dir):
set_sym2 = set()
for f in c[2]:
set_sym2 |= sym_dir[f]
set_sym3 = set()
for f in c[3]:
set_sym3 |= sym_dir[f]
c += [sorted(list(set_sym2)), sorted(list(set_sym3))]
fd = join(out_dir, c[4])
try:
makedirs(fd)
except IOError:
pass
with open(join(fd, 'cfg.h'), 'w') as outf:
for sym in sorted(set_sym2 | set_sym3):
print(sym, file=outf)
# print('/* assembler symbols also available in C files */', file=outf)
# for sym in sorted(set_sym2):
# print(sym, file=outf)
# print('/* assembler symbols not available in C files */', file=outf)
# for sym in sorted(set_sym3):
# print(sym, file=outf)
# compile list of C files
t = find_src(c_directories)
c_hdr_list = t[0]
c_src_list = t[1]
if t[2] or t[3]:
print('found C++ and/or assembler file(s) in a C directory')
if t[2]:
for f in t[2]:
print(f)
print()
if t[3]:
for f in t[3]:
print(f)
print()
# compile list of C++ files
t = find_src(['cxx'])
cc_hdr_list = t[0]
cc_src_list = t[2]
if t[1] or t[3]:
print('found C and/or assembler file(s) in a C++ directory')
if t[1]:
for f in t[1]:
print(f)
print()
if t[3]:
for f in cc_src_list:
print(f)
print()
# compile list of C files in mpn\generic
t = find_src([r'mpn\generic'])
gc_hdr_list = t[0]
gc_src_list = t[1]
if t[2] or t[3]:
print('found C++ and/or assembler file(s) in a C directory')
if t[2]:
for f in gc_hdr_list:
print(f)
print()
if t[3]:
for f in gc_src_list:
print(f)
print()
# prepare the generic C build
mpn_gc = dict((('gc', [gc_hdr_list, gc_src_list, [], []]),))
# prepare the list of Win32 builds
mpn_32 = find_asm(mpir_root_dir + 'mpn/x86w', gc_src_list)
syms32 = file_symbols(mpn_32)
del mpn_32['']
# prepare the list of x64 builds
mpn_64 = find_asm(mpir_root_dir + 'mpn/x86_64w', gc_src_list)
syms64 = file_symbols(mpn_64)
del mpn_64['']
nd_gc = len(mpn_gc)
nd_32 = nd_gc + len(mpn_32)
nd_nd = nd_32 + len(mpn_64)
# now ask user which builds they wish to generate
while True:
cnt = 0
for v in sorted(mpn_gc):
cnt += 1
print('{0:2d}. {1:24s} '.format(cnt, v.replace('\\', '_')))
for v in sorted(mpn_32):
cnt += 1
print('{0:2d}. {1:24s} (win32)'.format(cnt, v.replace('\\', '_')))
for v in sorted(mpn_64):
cnt += 1
print('{0:2d}. {1:24s} (x64)'.format(cnt, v.replace('\\', '_')))
fs = 'Space separated list of builds (1..{0:d}, 0 to exit)? '
s = input(fs.format(cnt))
n_list = [int(c) for c in s.split()]
if 0 in n_list:
exit()
if any(n < 1 or n > nd_nd for n in n_list):
print('list contains invalid build numbers')
sleep(2)
else:
break
# multiple builds must each have their own prebuilds
if len(n_list) > 1:
add_prebuild = True
# now generate the requested builds
# input any existing projects in the solution (*.sln) file
solc = msvc_solution(abspath(join(solution_dir, solution_name)))
hf_list = ('config.h', 'gmp-impl.h', 'longlong.h', 'mpir.h', 'gmp-mparam.h')
for n in n_list:
if 0 < n <= nd_gc:
config = sorted(mpn_gc)[n - 1]
mode = ('Win32', 'x64')
mpn_f = mpn_gc[config]
elif nd_gc < n <= nd_32:
config = sorted(mpn_32)[n - 1 - nd_gc]
gen_have_list(mpn_32[config], syms32, cfg_dir)
mode = ('Win32', )
mpn_f = mpn_32[config]
elif nd_32 < n <= nd_nd:
config = sorted(mpn_64)[n - 1 - nd_32]
gen_have_list(mpn_64[config], syms64, cfg_dir)
mode = ('x64', )
mpn_f = mpn_64[config]
else:
print('internal error')
exit()
if mode[0] == 'x64':
for l in mpn_f[1:]:
for t in l:
if t[0].startswith('preinv_'):
if 'x64' in mode and t[0] == 'preinv_divrem_1':
l.remove(t)
print(config, mode)
if not add_prebuild:
# generate mpir.h and gmp.h from gmp_h.in
gmp_h = '''
#ifdef _WIN32
# ifdef _WIN64
# define _LONG_LONG_LIMB 1
# define GMP_LIMB_BITS 64
# else
# define GMP_LIMB_BITS 32
# endif
# define __GMP_BITS_PER_MP_LIMB GMP_LIMB_BITS
# define SIZEOF_MP_LIMB_T (GMP_LIMB_BITS >> 3)
# define GMP_NAIL_BITS 0
#endif
'''
try:
lines = open(join(mpir_root_dir, 'gmp-h.in'), 'r').readlines()
except IOError:
print('error attempting to read from gmp_h.in')
exit()
try:
tfile = join(mpir_root_dir, 'tmp.h')
with open(tfile, 'w') as outf:
first = True
for line in lines:
if search(r'@\w+@', line):
if first:
first = False
outf.writelines(gmp_h)
else:
outf.writelines([line])
# write result to mpir.h but only overwrite the existing
# version if this version is different (don't trigger an
# unnecessary rebuild)
write_f(tfile, join(mpir_root_dir, 'mpir.h'))
write_f(tfile, join(mpir_root_dir, 'gmp.h'))
unlink(tfile)
except IOError:
print('error attempting to create mpir.h from gmp-h.in')
exit()
# generate config.h
try:
tfile = join(mpir_root_dir, 'tmp.h')
if 5 < len(mpn_f) < 8:
if len(mpn_f) == 6:
t = sorted(mpn_f[5])
else:
t = sorted(mpn_f[5] + mpn_f[6])
with open(tfile, 'w') as outf:
for i in t:
outf.writelines(['#define HAVE_NATIVE_{0:s} 1\n'.format(i)])
append_f(join(cf_dir, 'cfg.h'), tfile)
write_f(tfile, join(mpir_root_dir, 'config.h'))
unlink(tfile)
except IOError:
print('error attempting to write to {0:s}'.format(tfile))
exit()
# generate longlong.h and copy gmp-mparam.h
try:
li_file = None
for i in mpn_f[0]:
if i[0] == 'longlong_inc':
li_file = join(mpir_root_dir, join(i[2], r'longlong_inc.h'))
if i[0] == 'gmp-mparam':
write_f(join(mpir_root_dir, join(i[2], 'gmp-mparam.h')),
join(mpir_root_dir, 'gmp-mparam.h'))
if not li_file or not exists(li_file):
print('error attempting to read {0:s}'.format(li_file))
exit()
tfile = join(mpir_root_dir, 'tmp.h')
write_f(join(mpir_root_dir, 'longlong_pre.h'), tfile)
append_f(li_file, tfile)
append_f(join(mpir_root_dir, 'longlong_post.h'), tfile)
write_f(tfile, join(mpir_root_dir, 'longlong.h'))
unlink(tfile)
except IOError:
print('error attempting to generate longlong.h')
exit()
# generate the vcxproj and the IDE filter files
# and add/replace project in the solution file
af_list = sorted(mpn_f[2] + mpn_f[3])
# find the gmp-mparam.h file to be used
for name, ty, loc in mpn_f[0]:
if name == 'gmp-mparam':
loc = loc.replace('mpn\\x86w', '', 1)
loc = loc.replace('mpn\\x86_64w', '', 1)
if loc.startswith('\\'):
loc = loc[1:]
mp_dir = loc if loc else config
break
else:
mp_dir = config
cf = config.replace('\\', '_')
# set up DLL build
proj_name = 'mpir'
vcx_name = 'dll_mpir_' + cf
vcx_path = abspath(join(build_dir, vcx_name, vcx_name + '.vcxproj'))
guid = solc.get_project_guid(vcx_name, vcx_path)
gen_filter(vcx_path + '.filters', mpir_root_dir, hf_list, c_src_list + cc_src_list + mpn_f[1], af_list, 12.0)
gen_vcxproj(vcx_path, mpir_root_dir, proj_name, guid, mp_dir, mode, Project_Type.DLL,
False, hf_list, c_src_list + cc_src_list + mpn_f[1], af_list, add_prebuild, vs_info)
solc.add_project('', vcx_name, vcx_path, guid)
# set up LIB build
proj_name = 'mpir'
vcx_name = 'lib_mpir_' + cf
vcx_path = abspath(join(build_dir, vcx_name, vcx_name + '.vcxproj'))
guid = solc.get_project_guid(vcx_name, vcx_path)
gen_filter(vcx_path + '.filters', mpir_root_dir, hf_list, c_src_list + mpn_f[1], af_list, 12.0)
gen_vcxproj(vcx_path, mpir_root_dir, proj_name, guid, mp_dir, mode, Project_Type.LIB,
False, hf_list, c_src_list + mpn_f[1], af_list, add_prebuild, vs_info)
solc.add_project('', vcx_name, vcx_path, guid)
# C++ library build
if add_cpp_lib:
proj_name = 'mpirxx'
mode = ('Win32', 'x64')
vcx_name = 'lib_mpir_cxx'
vcx_path = abspath(join(build_dir, vcx_name, vcx_name + '.vcxproj'))
th = hf_list + ('mpirxx.h',)
guid = solc.get_project_guid(vcx_name, vcx_path)
gen_filter(vcx_path + '.filters', mpir_root_dir, th, cc_src_list, '', 12.0)
gen_vcxproj(vcx_path, mpir_root_dir, proj_name, guid, '', mode, Project_Type.LIB,
True, th, cc_src_list, '', add_prebuild, vs_info)
solc.add_project('', vcx_name, vcx_path, guid)
solc.write_solution(vs_info)
# the following code is for diagnostic purposes only
if debug:
for x in sorted(mpn_f[0] + mpn_f[1]):
print(x)
print()
for x in sorted(mpn_f[2] + mpn_f[3]):
print(x)
print()
# mpn_files = dict()
# mpn_files.update(mpn_32)
# mpn_files.update(mpn_64)
for x in mpn_f[config]:
print(x)
if False:
print('1:')
for y in mpn_files[x][0]:
print(y)
print('2:')
for y in mpn_files[x][1]:
print(y)
print('3:')
for y in mpn_files[x][2]:
print(y)
print('4:')
for y in mpn_files[x][3]:
print(y)
print()
for y in sorted(x[2] + x[3]):
print(y)
print()
print()
if debug:
mpn_dirs = ('mpn/generic', 'mpn/x86_64w', 'mpn/x86w')
# compile a list of files in directories in 'dl' under root 'r' with extension 'p'
def findf(r, dl, p):
l = []
for d in dl:
for root, dirs, files in walk(r + d):
relp = relpath(root, r) # path relative to mpir root directory
if '.svn' in dirs:
dirs.remove('.svn') # ignore SVN directories
if d == '' or root.endswith(build_vc):
for d in reversed(dirs): # don't scan build.vc<nn> subdirectories
dirs.remove(d)
for f in files:
if f.endswith(p):
l += [(tuple(relp.split('\\')), f)]
return sorted(l)
hdr_list = findf(mpir_root_dir, c_directories, '.h')
for x in hdr_list:
print(x)
print()
src_list = findf(mpir_root_dir, c_directories, '.c')
for x in src_list:
print(x)
print()
cpp_list = findf(mpir_root_dir, ['cpp'], '.cc')
for x in cpp_list:
print(x)
print()
gnc_list = findf(mpir_root_dir + 'mpn/', ['generic'], '.c')
for x in gnc_list:
print(x)
print()
w32_list = findf(mpir_root_dir + 'mpn/', ['x86w'], '.asm')
for x in w32_list:
print(x)
print()
x64_list = findf(mpir_root_dir + 'mpn/', ['x86_64w'], '.asm')
for x in x64_list:
print(x)
print()
nd = dict([])
for d, f in gnc_list:
n, x = splitext(f)
nd[n] = nd.get(n, []) + [(d, 'c')]
for d, f in x64_list:
n, x = splitext(f)
nd[n] = nd.get(n, []) + [(d, 'asm')]
for d, f in w32_list:
n, x = splitext(f)
nd[n] = nd.get(n, []) + [(d, 'asm')]
for x in nd:
print(x, nd[x])
| lgpl-3.0 |
procangroup/edx-platform | cms/djangoapps/contentstore/features/course-settings.py | 20 | 6300 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
from django.conf import settings
from lettuce import step, world
from nose.tools import assert_false, assert_true
from selenium.webdriver.common.keys import Keys
from cms.djangoapps.contentstore.features.common import type_in_codemirror
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
COURSE_START_DATE_CSS = "#course-start-date"
COURSE_END_DATE_CSS = "#course-end-date"
ENROLLMENT_START_DATE_CSS = "#course-enrollment-start-date"
ENROLLMENT_END_DATE_CSS = "#course-enrollment-end-date"
COURSE_START_TIME_CSS = "#course-start-time"
COURSE_END_TIME_CSS = "#course-end-time"
ENROLLMENT_START_TIME_CSS = "#course-enrollment-start-time"
ENROLLMENT_END_TIME_CSS = "#course-enrollment-end-time"
DUMMY_TIME = "15:30"
DEFAULT_TIME = "00:00"
############### ACTIONS ####################
@step('I select Schedule and Details$')
def test_i_select_schedule_and_details(step):
world.click_course_settings()
link_css = 'li.nav-course-settings-schedule a'
world.css_click(link_css)
world.wait_for_requirejs(
["jquery", "js/models/course",
"js/models/settings/course_details", "js/views/settings/main"])
@step('I have set course dates$')
def test_i_have_set_course_dates(step):
step.given('I have opened a new course in Studio')
step.given('I select Schedule and Details')
step.given('And I set course dates')
@step('And I set course dates$')
def test_and_i_set_course_dates(step):
set_date_or_time(COURSE_START_DATE_CSS, '12/20/2013')
set_date_or_time(COURSE_END_DATE_CSS, '12/26/2013')
set_date_or_time(ENROLLMENT_START_DATE_CSS, '12/1/2013')
set_date_or_time(ENROLLMENT_END_DATE_CSS, '12/10/2013')
set_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)
set_date_or_time(ENROLLMENT_END_TIME_CSS, DUMMY_TIME)
@step('And I clear all the dates except start$')
def test_and_i_clear_all_the_dates_except_start(step):
set_date_or_time(COURSE_END_DATE_CSS, '')
set_date_or_time(ENROLLMENT_START_DATE_CSS, '')
set_date_or_time(ENROLLMENT_END_DATE_CSS, '')
@step('Then I see cleared dates$')
def test_then_i_see_cleared_dates(step):
verify_date_or_time(COURSE_END_DATE_CSS, '')
verify_date_or_time(ENROLLMENT_START_DATE_CSS, '')
verify_date_or_time(ENROLLMENT_END_DATE_CSS, '')
verify_date_or_time(COURSE_END_TIME_CSS, '')
verify_date_or_time(ENROLLMENT_START_TIME_CSS, '')
verify_date_or_time(ENROLLMENT_END_TIME_CSS, '')
# Verify course start date (required) and time still there
verify_date_or_time(COURSE_START_DATE_CSS, '12/20/2013')
verify_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)
@step('I clear the course start date$')
def test_i_clear_the_course_start_date(step):
set_date_or_time(COURSE_START_DATE_CSS, '')
@step('I receive a warning about course start date$')
def test_i_receive_a_warning_about_course_start_date(step):
assert_true(world.css_has_text('.message-error', 'The course must have an assigned start date.'))
assert_true('error' in world.css_find(COURSE_START_DATE_CSS).first._element.get_attribute('class'))
assert_true('error' in world.css_find(COURSE_START_TIME_CSS).first._element.get_attribute('class'))
@step('the previously set start date is shown$')
def test_the_previously_set_start_date_is_shown(step):
verify_date_or_time(COURSE_START_DATE_CSS, '12/20/2013')
verify_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)
@step('Given I have tried to clear the course start$')
def test_i_have_tried_to_clear_the_course_start(step):
step.given("I have set course dates")
step.given("I clear the course start date")
step.given("I receive a warning about course start date")
@step('I have entered a new course start date$')
def test_i_have_entered_a_new_course_start_date(step):
set_date_or_time(COURSE_START_DATE_CSS, '12/22/2013')
@step('The warning about course start date goes away$')
def test_the_warning_about_course_start_date_goes_away(step):
assert world.is_css_not_present('.message-error')
assert_false('error' in world.css_find(COURSE_START_DATE_CSS).first._element.get_attribute('class'))
assert_false('error' in world.css_find(COURSE_START_TIME_CSS).first._element.get_attribute('class'))
@step('my new course start date is shown$')
def new_course_start_date_is_shown(step):
verify_date_or_time(COURSE_START_DATE_CSS, '12/22/2013')
# Time should have stayed from before attempt to clear date.
verify_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)
@step('I change fields$')
def test_i_change_fields(step):
set_date_or_time(COURSE_START_DATE_CSS, '7/7/7777')
set_date_or_time(COURSE_END_DATE_CSS, '7/7/7777')
set_date_or_time(ENROLLMENT_START_DATE_CSS, '7/7/7777')
set_date_or_time(ENROLLMENT_END_DATE_CSS, '7/7/7777')
@step('I change the course overview')
def test_change_course_overview(_step):
type_in_codemirror(0, "<h1>Overview</h1>")
############### HELPER METHODS ####################
def set_date_or_time(css, date_or_time):
"""
Sets date or time field.
"""
world.css_fill(css, date_or_time)
e = world.css_find(css).first
# hit Enter to apply the changes
e._element.send_keys(Keys.ENTER)
def verify_date_or_time(css, date_or_time):
"""
Verifies date or time field.
"""
# We need to wait for JavaScript to fill in the field, so we use
# css_has_value(), which first checks that the field is not blank
assert_true(world.css_has_value(css, date_or_time))
@step('I do not see the changes')
@step('I see the set dates')
def i_see_the_set_dates(_step):
"""
Ensure that each field has the value set in `test_and_i_set_course_dates`.
"""
verify_date_or_time(COURSE_START_DATE_CSS, '12/20/2013')
verify_date_or_time(COURSE_END_DATE_CSS, '12/26/2013')
verify_date_or_time(ENROLLMENT_START_DATE_CSS, '12/01/2013')
verify_date_or_time(ENROLLMENT_END_DATE_CSS, '12/10/2013')
verify_date_or_time(COURSE_START_TIME_CSS, DUMMY_TIME)
# Unset times get set to 12 AM once the corresponding date has been set.
verify_date_or_time(COURSE_END_TIME_CSS, DEFAULT_TIME)
verify_date_or_time(ENROLLMENT_START_TIME_CSS, DEFAULT_TIME)
verify_date_or_time(ENROLLMENT_END_TIME_CSS, DUMMY_TIME)
| agpl-3.0 |
willingc/oh-mainline | vendor/packages/Django/django/conf/locale/pt/formats.py | 106 | 1499 | # -*- encoding: utf-8 -*-
# This file is distributed under the same license as the Django package.
#
# The *_FORMAT strings use the Django date format syntax,
# see http://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = r'j \d\e F \d\e Y'
TIME_FORMAT = 'H:i:s'
DATETIME_FORMAT = r'j \d\e F \d\e Y à\s H:i'
YEAR_MONTH_FORMAT = r'F \d\e Y'
MONTH_DAY_FORMAT = r'j \d\e F'
SHORT_DATE_FORMAT = 'd/m/Y'
SHORT_DATETIME_FORMAT = 'd/m/Y H:i'
FIRST_DAY_OF_WEEK = 0 # Sunday
# The *_INPUT_FORMATS strings use the Python strftime format syntax,
# see http://docs.python.org/library/datetime.html#strftime-strptime-behavior
# Kept ISO formats as they are in first position
DATE_INPUT_FORMATS = (
'%Y-%m-%d', '%d/%m/%Y', '%d/%m/%y', # '2006-10-25', '25/10/2006', '25/10/06'
# '%d de %b de %Y', '%d de %b, %Y', # '25 de Out de 2006', '25 Out, 2006'
# '%d de %B de %Y', '%d de %B, %Y', # '25 de Outubro de 2006', '25 de Outubro, 2006'
)
DATETIME_INPUT_FORMATS = (
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%d/%m/%Y %H:%M:%S', # '25/10/2006 14:30:59'
'%d/%m/%Y %H:%M', # '25/10/2006 14:30'
'%d/%m/%Y', # '25/10/2006'
'%d/%m/%y %H:%M:%S', # '25/10/06 14:30:59'
'%d/%m/%y %H:%M', # '25/10/06 14:30'
'%d/%m/%y', # '25/10/06'
)
DECIMAL_SEPARATOR = ','
THOUSAND_SEPARATOR = '.'
NUMBER_GROUPING = 3
| agpl-3.0 |
LumaPictures/rez | src/rez/exceptions.py | 1 | 4824 | """
Exceptions.
"""
from contextlib import contextmanager
class RezError(Exception):
"""Base-class Rez error."""
def __init__(self, value=None):
self.value = value
def __str__(self):
return str(self.value)
class RezSystemError(RezError):
"""Rez system/internal error."""
pass
class RezBindError(RezError):
"""A bind-related error."""
pass
class RezPluginError(RezError):
"""An error related to plugin or plugin load."""
pass
class ConfigurationError(RezError):
"""A misconfiguration error."""
pass
class ResolveError(RezError):
"""A resolve-related error."""
pass
class PackageFamilyNotFoundError(RezError):
"""A package could not be found on disk."""
pass
class PackageNotFoundError(RezError):
"""A package could not be found on disk."""
pass
class ResourceError(RezError):
"""Resource-related exception base class."""
pass
class ResourceNotFoundError(ResourceError):
"""A resource could not be found."""
pass
class ResourceContentError(ResourceError):
"""A resource contains incorrect data."""
type_name = "resource file"
def __init__(self, value=None, path=None, resource_key=None):
msg = []
if resource_key is not None:
msg.append("resource type: %r" % resource_key)
if path is not None:
msg.append("%s: %s" % (self.type_name, path))
if value is not None:
msg.append(value)
ResourceError.__init__(self, ": ".join(msg))
class PackageMetadataError(ResourceContentError):
"""There is an error in a package's definition file."""
type_name = "package definition file"
class PackageCommandError(RezError):
"""There is an error in a command or list of commands"""
pass
class PackageRequestError(RezError):
"""There is an error related to a package request."""
pass
class PackageTestError(RezError):
"""There was a problem running a package test."""
pass
class ResolvedContextError(RezError):
"""An error occurred in a resolved context."""
pass
class RexError(RezError):
"""There is an error in Rex code."""
pass
class RexUndefinedVariableError(RexError):
"""There is a reference to an undefined variable."""
pass
class BuildError(RezError):
"""Base class for any build-related error."""
pass
class BuildSystemError(BuildError):
"""Base class for buildsys-related errors."""
pass
class BuildContextResolveError(BuildError):
"""Raised if unable to resolve the required context when creating the
environment for a build process."""
def __init__(self, context):
self.context = context
assert context.status != "solved"
msg = ("The build environment could not be resolved:\n%s"
% context.failure_description)
super(BuildContextResolveError, self).__init__(msg)
class BuildProcessError(RezError):
"""Base class for build process-related errors."""
pass
class ReleaseError(RezError):
"""Any release-related error."""
pass
class ReleaseVCSError(ReleaseError):
"""Base class for release VCS-related errors."""
pass
class ReleaseHookError(RezError):
"""Base class for release-hook- related errors."""
pass
class ReleaseHookCancellingError(RezError):
"""A release hook error that asks to cancel the release as a result."""
pass
class SuiteError(RezError):
"""Any suite-related error."""
pass
class PackageRepositoryError(RezError):
"""Base class for package repository- related errors."""
pass
class InvalidPackageError(RezError):
"""A special case exception used in package 'preprocess function'."""
pass
class RezGuiQTImportError(ImportError):
"""A special case - see cli/gui.py
"""
pass
@contextmanager
def convert_errors(from_, to, msg=None):
exc = None
try:
yield None
except from_ as e:
exc = e
if exc:
info = "%s: %s" % (exc.__class__.__name__, str(exc))
if msg:
info = "%s: %s" % (msg, info)
raise to(info)
# Copyright 2013-2016 Allan Johns.
#
# This library is free software: you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library. If not, see <http://www.gnu.org/licenses/>.
| lgpl-3.0 |
memo/tensorflow | tensorflow/tensorboard/backend/event_processing/io_wrapper.py | 77 | 1818 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""IO helper functions."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from tensorflow.python.platform import gfile
def IsGCSPath(path):
return path.startswith("gs://")
def ListDirectoryAbsolute(directory):
"""Yields all files in the given directory. The paths are absolute."""
return (os.path.join(directory, path)
for path in gfile.ListDirectory(directory))
def ListRecursively(top):
"""Walks a directory tree, yielding (dir_path, file_paths) tuples.
For each of `top` and its subdirectories, yields a tuple containing the path
to the directory and the path to each of the contained files. Note that
unlike os.Walk()/gfile.Walk(), this does not list subdirectories and the file
paths are all absolute.
If the directory does not exist, this yields nothing.
Args:
top: A path to a directory..
Yields:
A list of (dir_path, file_paths) tuples.
"""
for dir_path, _, filenames in gfile.Walk(top):
yield (dir_path, (os.path.join(dir_path, filename)
for filename in filenames))
| apache-2.0 |
denys-duchier/kivy | examples/keyboard/main.py | 30 | 7441 | """
Custom Keyboards
================
This demo shows how to create and display custom keyboards on screen. Note that
the new "input_type" property of the TextInput means that this is rarely needed.
We provide this demo for the sake of completeness.
"""
# Author: Zen-CODE
from kivy.app import App
from kivy.lang import Builder
from kivy.core.window import Window
from kivy.uix.vkeyboard import VKeyboard
from kivy.properties import ObjectProperty
from kivy.uix.button import Button
from functools import partial
from kivy.config import Config
from kivy.uix.screenmanager import Screen, ScreenManager
from kivy import require
# This example uses features introduced in Kivy 1.8.0, namely being able to load
# custom json files from the app folder
require("1.8.0")
Builder.load_string('''
<KeyboardScreen>:
displayLabel: displayLabel
kbContainer: kbContainer
BoxLayout:
orientation: 'vertical'
Label:
size_hint_y: 0.15
text: "Available Keyboard Layouts"
BoxLayout:
id: kbContainer
size_hint_y: 0.2
orientation: "horizontal"
padding: 10
Label:
id: displayLabel
size_hint_y: 0.15
markup: True
text: "[b]Key pressed[/b] - None"
halign: "center"
Button:
text: "Back"
size_hint_y: 0.1
on_release: root.parent.current = "mode"
Widget:
# Just a space taker to allow for the popup keyboard
size_hint_y: 0.5
<ModeScreen>:
center_label: center_label
mode_spinner: mode_spinner
FloatLayout:
BoxLayout:
orientation: "vertical"
size_hint: 0.8, 0.8
pos_hint: {"x": 0.1, "y": 0.1}
padding: "5sp"
spacing: "5sp"
Label:
canvas:
Color:
rgba: 0, 0, 1, 0.3
Rectangle:
pos: self.pos
size: self.size
text: "Custom Keyboard Demo"
size_hint_y: 0.1
Label:
id: center_label
markup: True
size_hint_y: 0.6
BoxLayout:
orientation: "horizontal"
size_hint_y: 0.1
padding: "5sp"
Widget:
size_hint_x: 0.2
Label:
text: "Current keyboard mode :"
Spinner:
id: mode_spinner
values: "''", "'dock'", "'system'", "'systemanddock'",\
"'systemandmulti'"
Button:
text: "Set"
on_release: root.set_mode(mode_spinner.text)
Widget:
size_hint_x: 0.2
Widget:
size_hint_y: 0.1
BoxLayout:
orientation: "horizontal"
size_hint_y: 0.1
Button:
text: "Exit"
on_release: exit()
Button:
text: "Continue"
on_release: root.next()
''')
class ModeScreen(Screen):
"""
Present the option to change keyboard mode and warn of system-wide
consequences.
"""
center_label = ObjectProperty()
mode_spinner = ObjectProperty()
keyboard_mode = ""
def on_pre_enter(self, *args):
""" Detect the current keyboard mode and set the text of the main
label accordingly. """
self.keyboard_mode = Config.get("kivy", "keyboard_mode")
self.mode_spinner.text = "'{0}'".format(self.keyboard_mode)
p1 = "Current keyboard mode: '{0}'\n\n".format(self.keyboard_mode)
if self.keyboard_mode in ['dock', 'system', 'systemanddock']:
p2 = "You have the right setting to use this demo.\n\n"
else:
p2 = "You need the keyboard mode to 'dock', 'system' or '"\
"'systemanddock'(below)\n in order to "\
"use custom onscreen keyboards.\n\n"
p3 = "[b][color=#ff0000]Warning:[/color][/b] This is a system-wide " \
"setting and will affect all Kivy apps. If you change the\n" \
" keyboard mode, please use this app" \
" to reset this value to it's original one."
self.center_label.text = "".join([p1, p2, p3])
def set_mode(self, mode):
""" Sets the keyboard mode to the one specified """
Config.set("kivy", "keyboard_mode", mode.replace("'", ""))
Config.write()
self.center_label.text = "Please restart the application for this\n" \
"setting to take effect."
def next(self):
""" Continue to the main screen """
self.manager.switch_to(KeyboardScreen())
class KeyboardScreen(Screen):
"""
Screen containing all the available keyboard layouts. Clicking the buttons
switches to these layouts.
"""
displayLabel = ObjectProperty()
kbContainer = ObjectProperty()
def __init__(self, **kwargs):
super(KeyboardScreen, self).__init__(**kwargs)
self._add_keyboards()
self._keyboard = None
def _add_keyboards(self):
""" Add a buttons for each available keyboard layout. When clicked,
the buttons will change the keyboard layout to the one selected. """
layouts = VKeyboard().available_layouts.keys()
layouts.append("numeric.json") # Add the file in our app directory
# Note the .json extension is required
for key in layouts:
self.kbContainer.add_widget(
Button(
text=key,
on_release=partial(self.set_layout, key)))
def set_layout(self, layout, button):
""" Change the keyboard layout to the one specified by *layout*. """
kb = Window.request_keyboard(
self._keyboard_close, self)
if kb.widget:
# If the current configuration supports Virtual Keyboards, this
# widget will be a kivy.uix.vkeyboard.VKeyboard instance.
self._keyboard = kb.widget
self._keyboard.layout = layout
else:
self._keyboard = kb
self._keyboard.bind(on_key_down=self.key_down,
on_key_up=self.key_up)
def _keyboard_close(self, *args):
""" The active keyboard is being closed. """
if self._keyboard:
self._keyboard.unbind(on_key_down=self.key_down)
self._keyboard.unbind(on_key_up=self.key_up)
self._keyboard = None
def key_down(self, keyboard, keycode, text, modifiers):
""" The callback function that catches keyboard events. """
self.displayLabel.text = "Key pressed - {0}".format(text)
def key_up(self, keyboard, keycode, text, modifiers):
""" The callback function that catches keyboard events. """
self.displayLabel.text += " (up {0})".format(text)
class KeyboardDemo(App):
sm = None # The root screen manager
def build(self):
self.sm = ScreenManager()
self.sm.add_widget(ModeScreen(name="mode"))
self.sm.add_widget(KeyboardScreen(name="keyboard"))
self.sm.current = "mode"
return self.sm
if __name__ == "__main__":
KeyboardDemo().run()
| mit |
thundernixon/dailydecode | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/easy_xml.py | 1049 | 4803 | # Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import os
def XmlToString(content, encoding='utf-8', pretty=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Visual Studio files have a lot of pre-defined structures. This function makes
it easy to represent these structures as Python data structures, instead of
having to create a lot of function calls.
Each XML element of the content is represented as a list composed of:
1. The name of the element, a string,
2. The attributes of the element, a dictionary (optional), and
3+. The content of the element, if any. Strings are simple text nodes and
lists are child elements.
Example 1:
<test/>
becomes
['test']
Example 2:
<myelement a='value1' b='value2'>
<childtype>This is</childtype>
<childtype>it!</childtype>
</myelement>
becomes
['myelement', {'a':'value1', 'b':'value2'},
['childtype', 'This is'],
['childtype', 'it!'],
]
Args:
content: The structured content to be converted.
encoding: The encoding to report on the first XML line.
pretty: True if we want pretty printing with indents and new lines.
Returns:
The XML content as a string.
"""
# We create a huge list of all the elements of the file.
xml_parts = ['<?xml version="1.0" encoding="%s"?>' % encoding]
if pretty:
xml_parts.append('\n')
_ConstructContentList(xml_parts, content, pretty)
# Convert it to a string
return ''.join(xml_parts)
def _ConstructContentList(xml_parts, specification, pretty, level=0):
""" Appends the XML parts corresponding to the specification.
Args:
xml_parts: A list of XML parts to be appended to.
specification: The specification of the element. See EasyXml docs.
pretty: True if we want pretty printing with indents and new lines.
level: Indentation level.
"""
# The first item in a specification is the name of the element.
if pretty:
indentation = ' ' * level
new_line = '\n'
else:
indentation = ''
new_line = ''
name = specification[0]
if not isinstance(name, str):
raise Exception('The first item of an EasyXml specification should be '
'a string. Specification was ' + str(specification))
xml_parts.append(indentation + '<' + name)
# Optionally in second position is a dictionary of the attributes.
rest = specification[1:]
if rest and isinstance(rest[0], dict):
for at, val in sorted(rest[0].iteritems()):
xml_parts.append(' %s="%s"' % (at, _XmlEscape(val, attr=True)))
rest = rest[1:]
if rest:
xml_parts.append('>')
all_strings = reduce(lambda x, y: x and isinstance(y, str), rest, True)
multi_line = not all_strings
if multi_line and new_line:
xml_parts.append(new_line)
for child_spec in rest:
# If it's a string, append a text node.
# Otherwise recurse over that child definition
if isinstance(child_spec, str):
xml_parts.append(_XmlEscape(child_spec))
else:
_ConstructContentList(xml_parts, child_spec, pretty, level + 1)
if multi_line and indentation:
xml_parts.append(indentation)
xml_parts.append('</%s>%s' % (name, new_line))
else:
xml_parts.append('/>%s' % new_line)
def WriteXmlIfChanged(content, path, encoding='utf-8', pretty=False,
win32=False):
""" Writes the XML content to disk, touching the file only if it has changed.
Args:
content: The structured content to be written.
path: Location of the file.
encoding: The encoding to report on the first line of the XML file.
pretty: True if we want pretty printing with indents and new lines.
"""
xml_string = XmlToString(content, encoding, pretty)
if win32 and os.linesep != '\r\n':
xml_string = xml_string.replace('\n', '\r\n')
# Get the old content
try:
f = open(path, 'r')
existing = f.read()
f.close()
except:
existing = None
# It has changed, write it
if existing != xml_string:
f = open(path, 'w')
f.write(xml_string)
f.close()
_xml_escape_map = {
'"': '"',
"'": ''',
'<': '<',
'>': '>',
'&': '&',
'\n': '
',
'\r': '
',
}
_xml_escape_re = re.compile(
"(%s)" % "|".join(map(re.escape, _xml_escape_map.keys())))
def _XmlEscape(value, attr=False):
""" Escape a string for inclusion in XML."""
def replace(match):
m = match.string[match.start() : match.end()]
# don't replace single quotes in attrs
if attr and m == "'":
return m
return _xml_escape_map[m]
return _xml_escape_re.sub(replace, value)
| mit |
ahtn/keyplus | host-software/keyplus/utility/crc16.py | 2 | 1245 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright 2017 jem@seethis.link
# Licensed under the MIT license (http://opensource.org/licenses/MIT)
def u16(x):
return x & 0xffff
def crc16_step(crc, byte):
crc_poly = 0x1021
for n in range(7, -1, -1):
crc_carry = crc >> 15 # bit that is shifted out of crc
bit_n = (byte >> n) & 0x01 # next bit of input stream
if (bit_n ^ crc_carry):
crc = u16((crc << 1) ^ crc_poly)
else:
crc = u16(crc << 1)
return crc
def crc16_bytes(data):
crc = 0xffff
for b in data:
crc = crc16_step(crc, b)
return crc
def crc16(data, endaddr=None):
crc = 0xffff
if endaddr == None:
endaddr = len(data)
for i in range(endaddr):
crc = crc16_step(crc, data[i])
return crc
def is_valid_crc16(data, endaddr=None):
return crc16(data, endaddr) == 0
if __name__ == "__main__":
import intelhex
import sys
hexfile = intelhex.IntelHex()
hexfile.loadhex("unifying.hex")
endaddr = hexfile.maxaddr()
checksum = crc16(hexfile, 0x6800-2)
# print("crc16", hex(checksum))
# print("crc16 is valid", is_valid_crc16(hexfile, 0x6800))
hexfile.write_hex_file(sys.stdout)
| mit |
gkotton/neutron | tools/i18n_cfg.py | 65 | 3491 | import compiler
import re
def is_log_callfunc(n):
"""LOG.xxx('hello %s' % xyz) and LOG('hello')"""
if isinstance(n.parent, compiler.ast.Mod):
n = n.parent
if isinstance(n.parent, compiler.ast.CallFunc):
if isinstance(n.parent.node, compiler.ast.Getattr):
if isinstance(n.parent.node.getChildNodes()[0],
compiler.ast.Name):
if n.parent.node.getChildNodes()[0].name == 'LOG':
return True
return False
def is_log_i18n_msg_with_mod(n):
"""LOG.xxx("Hello %s" % xyz) should be LOG.xxx("Hello %s", xyz)"""
if not isinstance(n.parent.parent, compiler.ast.Mod):
return False
n = n.parent.parent
if isinstance(n.parent, compiler.ast.CallFunc):
if isinstance(n.parent.node, compiler.ast.Getattr):
if isinstance(n.parent.node.getChildNodes()[0],
compiler.ast.Name):
if n.parent.node.getChildNodes()[0].name == 'LOG':
return True
return False
def is_wrong_i18n_format(n):
"""Check _('hello %s' % xyz)"""
if isinstance(n.parent, compiler.ast.Mod):
n = n.parent
if isinstance(n.parent, compiler.ast.CallFunc):
if isinstance(n.parent.node, compiler.ast.Name):
if n.parent.node.name == '_':
return True
return False
"""
Used for check message need be localized or not.
(predicate_func, action, message)
"""
i18n_msg_predicates = [
# Skip ['hello world', 1]
(lambda n: isinstance(n.parent, compiler.ast.List), 'skip', ''),
# Skip {'hellow world', 1}
(lambda n: isinstance(n.parent, compiler.ast.Dict), 'skip', ''),
# Skip msg['hello world']
(lambda n: isinstance(n.parent, compiler.ast.Subscript), 'skip', ''),
# Skip doc string
(lambda n: isinstance(n.parent, compiler.ast.Discard), 'skip', ''),
# Skip msg = "hello", in normal, message should more than one word
(lambda n: len(n.value.strip().split(' ')) <= 1, 'skip', ''),
# Skip msg = 'hello world' + vars + 'world hello'
(lambda n: isinstance(n.parent, compiler.ast.Add), 'skip', ''),
# Skip xml markers msg = "<test></test>"
(lambda n: len(re.compile("</.*>").findall(n.value)) > 0, 'skip', ''),
# Skip sql statement
(lambda n: len(
re.compile("^SELECT.*FROM", flags=re.I).findall(n.value)) > 0,
'skip', ''),
# LOG.xxx()
(is_log_callfunc, 'error', 'Message must be localized'),
# _('hello %s' % xyz) should be _('hello %s') % xyz
(is_wrong_i18n_format, 'error',
("Message format was wrong, _('hello %s' % xyz) "
"should be _('hello %s') % xyz")),
# default
(lambda n: True, 'warn', 'Message might need localized')
]
"""
Used for checking message format. (checker_func, message)
"""
msg_format_checkers = [
# If message contain more than on format specifier, it should use
# mapping key
(lambda n: len(re.compile("%[bcdeEfFgGnosxX]").findall(n.value)) > 1,
"The message shouldn't contain more than one format specifier"),
# Check capital
(lambda n: n.value.split(' ')[0].count('_') == 0 and
n.value[0].isalpha() and
n.value[0].islower(),
"First letter must be capital"),
(is_log_i18n_msg_with_mod,
'LOG.xxx("Hello %s" % xyz) should be LOG.xxx("Hello %s", xyz)')
]
file_black_list = ["./neutron/tests/unit",
"./neutron/openstack",
"./neutron/plugins/bigswitch/tests"]
| apache-2.0 |
kunato/s3-u6 | arch/ia64/scripts/unwcheck.py | 13143 | 1714 | #!/usr/bin/python
#
# Usage: unwcheck.py FILE
#
# This script checks the unwind info of each function in file FILE
# and verifies that the sum of the region-lengths matches the total
# length of the function.
#
# Based on a shell/awk script originally written by Harish Patil,
# which was converted to Perl by Matthew Chapman, which was converted
# to Python by David Mosberger.
#
import os
import re
import sys
if len(sys.argv) != 2:
print "Usage: %s FILE" % sys.argv[0]
sys.exit(2)
readelf = os.getenv("READELF", "readelf")
start_pattern = re.compile("<([^>]*)>: \[0x([0-9a-f]+)-0x([0-9a-f]+)\]")
rlen_pattern = re.compile(".*rlen=([0-9]+)")
def check_func (func, slots, rlen_sum):
if slots != rlen_sum:
global num_errors
num_errors += 1
if not func: func = "[%#x-%#x]" % (start, end)
print "ERROR: %s: %lu slots, total region length = %lu" % (func, slots, rlen_sum)
return
num_funcs = 0
num_errors = 0
func = False
slots = 0
rlen_sum = 0
for line in os.popen("%s -u %s" % (readelf, sys.argv[1])):
m = start_pattern.match(line)
if m:
check_func(func, slots, rlen_sum)
func = m.group(1)
start = long(m.group(2), 16)
end = long(m.group(3), 16)
slots = 3 * (end - start) / 16
rlen_sum = 0L
num_funcs += 1
else:
m = rlen_pattern.match(line)
if m:
rlen_sum += long(m.group(1))
check_func(func, slots, rlen_sum)
if num_errors == 0:
print "No errors detected in %u functions." % num_funcs
else:
if num_errors > 1:
err="errors"
else:
err="error"
print "%u %s detected in %u functions." % (num_errors, err, num_funcs)
sys.exit(1)
| gpl-2.0 |
avasenin/mzbench | node/scripts/report_network_usage.py | 5 | 2867 | #!/usr/bin/env python
"""
Prints network usage in erlang-friendly format
"""
import os
import re
import subprocess
def main():
uname = os.uname()[0]
if uname == 'Darwin':
p = subprocess.Popen(['netstat', '-ib'], stdout=subprocess.PIPE)
(netstat_output, err) = p.communicate()
lines = netstat_output.split('\n')
column_titles = re.split(r'\s+', lines[0])
name_column_index = column_titles.index('Name')
rx_column_index = column_titles.index('Ibytes')
tx_column_index = column_titles.index('Obytes')
def data():
yielded_names = set()
for line in lines[1:]:
if not line:
continue
fields = re.split(r'\s+', line)
name = fields[name_column_index]
if name not in yielded_names:
yield {'name': name,
'rx_bytes': int(fields[rx_column_index]),
'tx_bytes': int(fields[tx_column_index])}
yielded_names.add(name)
stats = list(data())
elif uname == 'Linux':
p = subprocess.Popen(['netstat', '-ine'], stdout=subprocess.PIPE)
(netstat_output, err) = p.communicate()
(_header, _newline, body) = netstat_output.partition('\n')
netstat_sections = body.split('\n\n')
stats = [parse_netstat_section(section)
for section in netstat_sections
if section]
else:
raise RuntimeError("Getting network usage on {0} is not supported.".format(uname))
print format_value(stats) + '.'
def parse_netstat_section(s):
if not s:
return None
try:
name = s.split(' ')[0]
rx_bytes = re.search(r'RX bytes:(\d+)', s).group(1)
tx_bytes = re.search(r'TX bytes:(\d+)', s).group(1)
return {'name': name,
'rx_bytes': int(rx_bytes),
'tx_bytes': int(tx_bytes)}
except:
# Some interface sections don't have transmitted bytes info
# so we ignore them.
# Example from a travis-ci vm:
#
# venet0:0 Link encap:UNSPEC HWaddr 00-00-00-00-00-00-00-00-00-00-00-00-00-00-00-00
# inet addr:1.2.3.4 P-t-P:5.6.7.8 Bcast:0.0.0.0 Mask:255.255.255.255
# UP BROADCAST POINTOPOINT RUNNING NOARP MTU:1500 Metric:1
return None
def format_value(value):
if isinstance(value, dict):
body = ', '.join('{0} => {1}'.format(k, format_value(v))
for k, v in value.iteritems())
return '#{' + body + '}'
elif isinstance(value, list):
body = ', '.join((format_value(v) for v in value))
return '[' + body + ']'
elif isinstance(value, int):
return str(value)
else:
return '"{0}"'.format(value)
if __name__ == '__main__':
main() | bsd-3-clause |
cognitiveclass/edx-platform | common/lib/xmodule/xmodule/tests/test_annotator_mixin.py | 223 | 1932 | """
This test will run for annotator_mixin.py
"""
import unittest
from lxml import etree
from xmodule.annotator_mixin import get_instructions, get_extension, html_to_text
class HelperFunctionTest(unittest.TestCase):
"""
Tests to ensure that the following helper functions work for the annotation tool
"""
sample_xml = '''
<annotatable>
<instructions><p>Helper Test Instructions.</p></instructions>
</annotatable>
'''
sample_sourceurl = "http://video-js.zencoder.com/oceans-clip.mp4"
sample_youtubeurl = "http://www.youtube.com/watch?v=yxLIu-scR9Y"
sample_html = '<p><b>Testing here</b> and not bolded here</p>'
def test_get_instructions(self):
"""
Function takes in an input of a specific xml string with surrounding instructions
tags and returns a valid html string.
"""
xmltree = etree.fromstring(self.sample_xml)
expected_xml = u"<div><p>Helper Test Instructions.</p></div>"
actual_xml = get_instructions(xmltree)
self.assertIsNotNone(actual_xml)
self.assertEqual(expected_xml.strip(), actual_xml.strip())
xmltree = etree.fromstring('<annotatable>foo</annotatable>')
actual = get_instructions(xmltree)
self.assertIsNone(actual)
def test_get_extension(self):
"""
Tests whether given a url if the video will return a youtube source or extension
"""
expectedyoutube = 'video/youtube'
expectednotyoutube = 'video/mp4'
result1 = get_extension(self.sample_sourceurl)
result2 = get_extension(self.sample_youtubeurl)
self.assertEqual(expectedyoutube, result2)
self.assertEqual(expectednotyoutube, result1)
def test_html_to_text(self):
expectedtext = "Testing here and not bolded here"
result = html_to_text(self.sample_html)
self.assertEqual(expectedtext, result)
| agpl-3.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.