repo_name stringlengths 5 100 | path stringlengths 4 294 | copies stringclasses 990
values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15
values |
|---|---|---|---|---|---|
tboyce021/home-assistant | tests/components/openalpr_local/test_image_processing.py | 13 | 5024 | """The tests for the openalpr local platform."""
import homeassistant.components.image_processing as ip
from homeassistant.const import ATTR_ENTITY_PICTURE
from homeassistant.core import callback
from homeassistant.setup import setup_component
from tests.async_mock import MagicMock, PropertyMock, patch
from tests.common import assert_setup_component, get_test_home_assistant, load_fixture
from tests.components.image_processing import common
def mock_async_subprocess():
"""Get a Popen mock back."""
async_popen = MagicMock()
async def communicate(input=None):
"""Communicate mock."""
fixture = bytes(load_fixture("alpr_stdout.txt"), "utf-8")
return (fixture, None)
async_popen.communicate = communicate
return async_popen
class TestOpenAlprLocalSetup:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
def test_setup_platform(self):
"""Set up platform with one entity."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.openalpr_demo_camera")
def test_setup_platform_name(self):
"""Set up platform with one entity and set name."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with assert_setup_component(1, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
assert self.hass.states.get("image_processing.test_local")
def test_setup_platform_without_region(self):
"""Set up platform with one entity without region."""
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera"},
},
"camera": {"platform": "demo"},
}
with assert_setup_component(0, ip.DOMAIN):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
class TestOpenAlprLocal:
"""Test class for image processing."""
def setup_method(self):
"""Set up things to be run when tests are started."""
self.hass = get_test_home_assistant()
config = {
ip.DOMAIN: {
"platform": "openalpr_local",
"source": {"entity_id": "camera.demo_camera", "name": "test local"},
"region": "eu",
},
"camera": {"platform": "demo"},
}
with patch(
"homeassistant.components.openalpr_local.image_processing."
"OpenAlprLocalEntity.should_poll",
new_callable=PropertyMock(return_value=False),
):
setup_component(self.hass, ip.DOMAIN, config)
self.hass.block_till_done()
state = self.hass.states.get("camera.demo_camera")
self.url = f"{self.hass.config.internal_url}{state.attributes.get(ATTR_ENTITY_PICTURE)}"
self.alpr_events = []
@callback
def mock_alpr_event(event):
"""Mock event."""
self.alpr_events.append(event)
self.hass.bus.listen("image_processing.found_plate", mock_alpr_event)
def teardown_method(self):
"""Stop everything that was started."""
self.hass.stop()
@patch("asyncio.create_subprocess_exec", return_value=mock_async_subprocess())
def test_openalpr_process_image(self, popen_mock, aioclient_mock):
"""Set up and scan a picture and test plates from event."""
aioclient_mock.get(self.url, content=b"image")
common.scan(self.hass, entity_id="image_processing.test_local")
self.hass.block_till_done()
state = self.hass.states.get("image_processing.test_local")
assert popen_mock.called
assert len(self.alpr_events) == 5
assert state.attributes.get("vehicles") == 1
assert state.state == "PE3R2X"
event_data = [
event.data
for event in self.alpr_events
if event.data.get("plate") == "PE3R2X"
]
assert len(event_data) == 1
assert event_data[0]["plate"] == "PE3R2X"
assert event_data[0]["confidence"] == float(98.9371)
assert event_data[0]["entity_id"] == "image_processing.test_local"
| apache-2.0 |
Kagee/youtube-dl | youtube_dl/extractor/mailru.py | 81 | 3096 | # encoding: utf-8
from __future__ import unicode_literals
import re
from .common import InfoExtractor
class MailRuIE(InfoExtractor):
IE_NAME = 'mailru'
IE_DESC = 'Видео@Mail.Ru'
_VALID_URL = r'http://(?:www\.)?my\.mail\.ru/(?:video/.*#video=/?(?P<idv1>(?:[^/]+/){3}\d+)|(?:(?P<idv2prefix>(?:[^/]+/){2})video/(?P<idv2suffix>[^/]+/\d+))\.html)'
_TESTS = [
{
'url': 'http://my.mail.ru/video/top#video=/mail/sonypicturesrus/75/76',
'md5': 'dea205f03120046894db4ebb6159879a',
'info_dict': {
'id': '46301138_76',
'ext': 'mp4',
'title': 'Новый Человек-Паук. Высокое напряжение. Восстание Электро',
'timestamp': 1393232740,
'upload_date': '20140224',
'uploader': 'sonypicturesrus',
'uploader_id': 'sonypicturesrus@mail.ru',
'duration': 184,
},
},
{
'url': 'http://my.mail.ru/corp/hitech/video/news_hi-tech_mail_ru/1263.html',
'md5': '00a91a58c3402204dcced523777b475f',
'info_dict': {
'id': '46843144_1263',
'ext': 'mp4',
'title': 'Samsung Galaxy S5 Hammer Smash Fail Battery Explosion',
'timestamp': 1397217632,
'upload_date': '20140411',
'uploader': 'hitech',
'uploader_id': 'hitech@corp.mail.ru',
'duration': 245,
},
},
]
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('idv1')
if not video_id:
video_id = mobj.group('idv2prefix') + mobj.group('idv2suffix')
video_data = self._download_json(
'http://api.video.mail.ru/videos/%s.json?new=1' % video_id, video_id, 'Downloading video JSON')
author = video_data['author']
uploader = author['name']
uploader_id = author.get('id') or author.get('email')
view_count = video_data.get('views_count')
meta_data = video_data['meta']
content_id = '%s_%s' % (
meta_data.get('accId', ''), meta_data['itemId'])
title = meta_data['title']
if title.endswith('.mp4'):
title = title[:-4]
thumbnail = meta_data['poster']
duration = meta_data['duration']
timestamp = meta_data['timestamp']
formats = [
{
'url': video['url'],
'format_id': video['key'],
'height': int(video['key'].rstrip('p'))
} for video in video_data['videos']
]
self._sort_formats(formats)
return {
'id': content_id,
'title': title,
'thumbnail': thumbnail,
'timestamp': timestamp,
'uploader': uploader,
'uploader_id': uploader_id,
'duration': duration,
'view_count': view_count,
'formats': formats,
}
| unlicense |
zulip/django | django/db/migrations/graph.py | 351 | 10956 | from __future__ import unicode_literals
import warnings
from collections import deque
from functools import total_ordering
from django.db.migrations.state import ProjectState
from django.utils.datastructures import OrderedSet
from django.utils.encoding import python_2_unicode_compatible
from .exceptions import CircularDependencyError, NodeNotFoundError
RECURSION_DEPTH_WARNING = (
"Maximum recursion depth exceeded while generating migration graph, "
"falling back to iterative approach. If you're experiencing performance issues, "
"consider squashing migrations as described at "
"https://docs.djangoproject.com/en/dev/topics/migrations/#squashing-migrations."
)
@python_2_unicode_compatible
@total_ordering
class Node(object):
"""
A single node in the migration graph. Contains direct links to adjacent
nodes in either direction.
"""
def __init__(self, key):
self.key = key
self.children = set()
self.parents = set()
def __eq__(self, other):
return self.key == other
def __lt__(self, other):
return self.key < other
def __hash__(self):
return hash(self.key)
def __getitem__(self, item):
return self.key[item]
def __str__(self):
return str(self.key)
def __repr__(self):
return '<Node: (%r, %r)>' % self.key
def add_child(self, child):
self.children.add(child)
def add_parent(self, parent):
self.parents.add(parent)
# Use manual caching, @cached_property effectively doubles the
# recursion depth for each recursion.
def ancestors(self):
# Use self.key instead of self to speed up the frequent hashing
# when constructing an OrderedSet.
if '_ancestors' not in self.__dict__:
ancestors = deque([self.key])
for parent in sorted(self.parents):
ancestors.extendleft(reversed(parent.ancestors()))
self.__dict__['_ancestors'] = list(OrderedSet(ancestors))
return self.__dict__['_ancestors']
# Use manual caching, @cached_property effectively doubles the
# recursion depth for each recursion.
def descendants(self):
# Use self.key instead of self to speed up the frequent hashing
# when constructing an OrderedSet.
if '_descendants' not in self.__dict__:
descendants = deque([self.key])
for child in sorted(self.children):
descendants.extendleft(reversed(child.descendants()))
self.__dict__['_descendants'] = list(OrderedSet(descendants))
return self.__dict__['_descendants']
@python_2_unicode_compatible
class MigrationGraph(object):
"""
Represents the digraph of all migrations in a project.
Each migration is a node, and each dependency is an edge. There are
no implicit dependencies between numbered migrations - the numbering is
merely a convention to aid file listing. Every new numbered migration
has a declared dependency to the previous number, meaning that VCS
branch merges can be detected and resolved.
Migrations files can be marked as replacing another set of migrations -
this is to support the "squash" feature. The graph handler isn't responsible
for these; instead, the code to load them in here should examine the
migration files and if the replaced migrations are all either unapplied
or not present, it should ignore the replaced ones, load in just the
replacing migration, and repoint any dependencies that pointed to the
replaced migrations to point to the replacing one.
A node should be a tuple: (app_path, migration_name). The tree special-cases
things within an app - namely, root nodes and leaf nodes ignore dependencies
to other apps.
"""
def __init__(self):
self.node_map = {}
self.nodes = {}
self.cached = False
def add_node(self, key, implementation):
node = Node(key)
self.node_map[key] = node
self.nodes[key] = implementation
self.clear_cache()
def add_dependency(self, migration, child, parent):
if child not in self.nodes:
raise NodeNotFoundError(
"Migration %s dependencies reference nonexistent child node %r" % (migration, child),
child
)
if parent not in self.nodes:
raise NodeNotFoundError(
"Migration %s dependencies reference nonexistent parent node %r" % (migration, parent),
parent
)
self.node_map[child].add_parent(self.node_map[parent])
self.node_map[parent].add_child(self.node_map[child])
self.clear_cache()
def clear_cache(self):
if self.cached:
for node in self.nodes:
self.node_map[node].__dict__.pop('_ancestors', None)
self.node_map[node].__dict__.pop('_descendants', None)
self.cached = False
def forwards_plan(self, target):
"""
Given a node, returns a list of which previous nodes (dependencies)
must be applied, ending with the node itself.
This is the list you would follow if applying the migrations to
a database.
"""
if target not in self.nodes:
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
# Use parent.key instead of parent to speed up the frequent hashing in ensure_not_cyclic
self.ensure_not_cyclic(target, lambda x: (parent.key for parent in self.node_map[x].parents))
self.cached = True
node = self.node_map[target]
try:
return node.ancestors()
except RuntimeError:
# fallback to iterative dfs
warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
return self.iterative_dfs(node)
def backwards_plan(self, target):
"""
Given a node, returns a list of which dependent nodes (dependencies)
must be unapplied, ending with the node itself.
This is the list you would follow if removing the migrations from
a database.
"""
if target not in self.nodes:
raise NodeNotFoundError("Node %r not a valid node" % (target, ), target)
# Use child.key instead of child to speed up the frequent hashing in ensure_not_cyclic
self.ensure_not_cyclic(target, lambda x: (child.key for child in self.node_map[x].children))
self.cached = True
node = self.node_map[target]
try:
return node.descendants()
except RuntimeError:
# fallback to iterative dfs
warnings.warn(RECURSION_DEPTH_WARNING, RuntimeWarning)
return self.iterative_dfs(node, forwards=False)
def iterative_dfs(self, start, forwards=True):
"""
Iterative depth first search, for finding dependencies.
"""
visited = deque()
visited.append(start)
if forwards:
stack = deque(sorted(start.parents))
else:
stack = deque(sorted(start.children))
while stack:
node = stack.popleft()
visited.appendleft(node)
if forwards:
children = sorted(node.parents, reverse=True)
else:
children = sorted(node.children, reverse=True)
# reverse sorting is needed because prepending using deque.extendleft
# also effectively reverses values
stack.extendleft(children)
return list(OrderedSet(visited))
def root_nodes(self, app=None):
"""
Returns all root nodes - that is, nodes with no dependencies inside
their app. These are the starting point for an app.
"""
roots = set()
for node in self.nodes:
if (not any(key[0] == node[0] for key in self.node_map[node].parents)
and (not app or app == node[0])):
roots.add(node)
return sorted(roots)
def leaf_nodes(self, app=None):
"""
Returns all leaf nodes - that is, nodes with no dependents in their app.
These are the "most current" version of an app's schema.
Having more than one per app is technically an error, but one that
gets handled further up, in the interactive command - it's usually the
result of a VCS merge and needs some user input.
"""
leaves = set()
for node in self.nodes:
if (not any(key[0] == node[0] for key in self.node_map[node].children)
and (not app or app == node[0])):
leaves.add(node)
return sorted(leaves)
def ensure_not_cyclic(self, start, get_children):
# Algo from GvR:
# http://neopythonic.blogspot.co.uk/2009/01/detecting-cycles-in-directed-graph.html
todo = set(self.nodes)
while todo:
node = todo.pop()
stack = [node]
while stack:
top = stack[-1]
for node in get_children(top):
if node in stack:
cycle = stack[stack.index(node):]
raise CircularDependencyError(", ".join("%s.%s" % n for n in cycle))
if node in todo:
stack.append(node)
todo.remove(node)
break
else:
node = stack.pop()
def __str__(self):
return 'Graph: %s nodes, %s edges' % self._nodes_and_edges()
def __repr__(self):
nodes, edges = self._nodes_and_edges()
return '<%s: nodes=%s, edges=%s>' % (self.__class__.__name__, nodes, edges)
def _nodes_and_edges(self):
return len(self.nodes), sum(len(node.parents) for node in self.node_map.values())
def make_state(self, nodes=None, at_end=True, real_apps=None):
"""
Given a migration node or nodes, returns a complete ProjectState for it.
If at_end is False, returns the state before the migration has run.
If nodes is not provided, returns the overall most current project state.
"""
if nodes is None:
nodes = list(self.leaf_nodes())
if len(nodes) == 0:
return ProjectState()
if not isinstance(nodes[0], tuple):
nodes = [nodes]
plan = []
for node in nodes:
for migration in self.forwards_plan(node):
if migration not in plan:
if not at_end and migration in nodes:
continue
plan.append(migration)
project_state = ProjectState(real_apps=real_apps)
for node in plan:
project_state = self.nodes[node].mutate_state(project_state, preserve=False)
return project_state
def __contains__(self, node):
return node in self.nodes
| bsd-3-clause |
tafaRU/odoo | addons/l10n_co/__openerp__.py | 49 | 1812 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) David Arnold (devCO).
# Author David Arnold (devCO), dar@devco.co
# Co-Authors Juan Pablo Aries (devCO), jpa@devco.co
# Hector Ivan Valencia Muñoz (TIX SAS)
# Nhomar Hernandez (Vauxoo)
# Humberto Ochoa (Vauxoo)
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Colombian - Accounting',
'version': '0.8',
'category': 'Localization/Account Charts',
'description': 'Colombian Accounting and Tax Preconfiguration',
'author': 'David Arnold BA HSG (devCO)',
'depends': [
'account',
'base_vat',
'account_chart',
],
'data': [
'data/account.account.type.csv',
'data/account.account.template.csv',
'data/account.tax.code.template.csv',
'data/account_chart_template.xml',
'data/account.tax.template.csv',
'wizard/account_wizard.xml',
],
'demo': [],
'installable': True,
'images': [],
}
| agpl-3.0 |
anusha-ragunathan/docker | vendor/src/github.com/ugorji/go/codec/test.py | 670 | 3808 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# pip install --user msgpack-python
# pip install --user cbor
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464646464.0,
False,
True,
None,
u"someday",
u"",
u"bytestring",
1328176922000002000,
-2206187877999998000,
270,
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": "True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": "123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": "1234567890" },
{ True: "true", 8: False, "false": 0 }
]
l = []
l.extend(l0)
l.append(l0)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| apache-2.0 |
fkie-cad/FACT_core | src/test/integration/intercom/test_intercom_common.py | 1 | 1550 | import gc
import pickle
import unittest
from tempfile import TemporaryDirectory
from intercom.common_mongo_binding import InterComListener
from storage.MongoMgr import MongoMgr
from test.common_helper import get_config_for_testing
TMP_DIR = TemporaryDirectory(prefix='fact_test_')
BSON_MAX_FILE_SIZE = 16 * 1024**2
class TestInterComListener(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.config = get_config_for_testing(temp_dir=TMP_DIR)
cls.mongo_server = MongoMgr(config=cls.config)
def setUp(self):
self.generic_listener = InterComListener(config=self.config)
def tearDown(self):
for item in self.generic_listener.connections.keys():
self.generic_listener.client.drop_database(self.generic_listener.connections[item]['name'])
self.generic_listener.shutdown()
gc.collect()
@classmethod
def tearDownClass(cls):
cls.mongo_server.shutdown()
TMP_DIR.cleanup()
def check_file(self, binary):
self.generic_listener.connections[self.generic_listener.CONNECTION_TYPE]['fs'].put(pickle.dumps(binary))
task = self.generic_listener.get_next_task()
self.assertEqual(task, binary)
another_task = self.generic_listener.get_next_task()
self.assertIsNone(another_task, 'task not deleted')
def test_small_file(self):
self.check_file(b'this is a test')
def test_big_file(self):
large_test_data = b'\x00' * (BSON_MAX_FILE_SIZE + 1024)
self.check_file(large_test_data)
| gpl-3.0 |
conyx/isjtests | datamodel/configuration.py | 1 | 3996 | #!/usr/bin/env python3
"""
Data model for ISJ tests.
Configuration data representation.
Author:
Tomas Bambas xbamba01@stud.fit.vutbr.cz
"""
class Configuration:
def __init__(self):
self.set_mode("web")
self.set_verbose(True)
self.set_logging(False)
self.set_logfile("isjtests.log")
self.set_python2path("python")
self.set_python3path("python3")
self.set_ruby_path("ruby")
self.set_intern_python2path("python")
self.set_intern_python3path("python3")
self.set_intern_ruby_path("ruby")
self.set_output_path("output")
self.set_students_csv_file_path("students.csv")
self.set_tasks_paths([("tasks", 1, 10)])
self.set_webserver_port(8866)
self.set_webserver_pass("password")
self.set_evaluator_strictness(0)
self.set_evaluator_timeout(10)
self.set_anonymous(False)
self.set_self_learning(False)
def set_mode(self, mode):
self._mode = mode
def get_mode(self):
return self._mode
def set_verbose(self, verbose):
self._verbose = verbose
def is_verbose(self):
return self._verbose
def set_logging(self, logging):
self._logging = logging
def is_logging(self):
return self._logging
def set_logfile(self, logfile_path):
self._logfile = logfile_path
def get_logfile(self):
return self._logfile
def set_python2path(self, path):
self._python2path = path
def get_python2path(self):
return self._python2path
def set_python3path(self, path):
self._python3path = path
def get_python3path(self):
return self._python3path
def set_ruby_path(self, path):
self._ruby_path = path
def get_ruby_path(self):
return self._ruby_path
def set_intern_python2path(self, path):
self._intern_python2path = path
def get_intern_python2path(self):
return self._intern_python2path
def set_intern_python3path(self, path):
self._intern_python3path = path
def get_intern_python3path(self):
return self._intern_python3path
def set_intern_ruby_path(self, path):
self._intern_ruby_path = path
def get_intern_ruby_path(self):
return self._intern_ruby_path
def set_output_path(self, path):
self._output_path = path
def get_output_path(self):
return self._output_path
def set_students_csv_file_path(self, path):
self._students_csv_file_path = path
def get_students_csv_file_path(self):
return self._students_csv_file_path
def set_tasks_paths(self, paths):
self._tasks_paths = paths
def get_tasks_paths(self):
return self._tasks_paths
def set_webserver_port(self, port):
self._webserver_port = port
def get_webserver_port(self):
return self._webserver_port
def set_webserver_pass(self, password):
self._webserver_password = password
def get_webserver_pass(self):
return self._webserver_password
def set_evaluator_strictness(self, strictness):
self._strictness = strictness
def get_evaluator_strictness(self):
return self._strictness
def set_evaluator_timeout(self, timeout):
self._timeout = timeout
def get_evaluator_timeout(self):
return self._timeout
def set_anonymous(self, anonymous):
self._anonymous = anonymous
def is_anonymous(self):
return self._anonymous
def set_self_learning(self, sl):
self._self_learning = sl
def is_self_learning(self):
return self._self_learning
| bsd-3-clause |
PingaxAnalytics/koob_beta | signup/migrations/0001_initial.py | 1 | 1445 | # -*- coding: utf-8 -*-
# Generated by Django 1.10.3 on 2016-12-01 19:37
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Stores',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Users',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('password', models.CharField(max_length=128)),
('last_login', models.DateTimeField(blank=True, null=True)),
('is_superuser', models.IntegerField(null=True)),
('username', models.CharField(max_length=150, unique=True)),
('first_name', models.CharField(max_length=30)),
('last_name', models.CharField(max_length=30)),
('email', models.CharField(max_length=254)),
('date_joined', models.DateTimeField(null=True)),
('client_id', models.CharField(max_length=254)),
('client_secret', models.CharField(max_length=254)),
],
),
]
| apache-2.0 |
ThomasCr/smarthome | bin/smarthome.py | 2 | 10866 | #!/usr/bin/env python3
# vim: set encoding=utf-8 tabstop=4 softtabstop=4 shiftwidth=4 expandtab
#########################################################################
# Copyright 2011-2014 Marcus Popp marcus@popp.mx
# Copyright 2016 Christian Strassburg c.strassburg@gmx.de
# Copyright 2016- Martin Sinn m.sinn@gmx.de
# Copyright 2020- Bernd Meiners bernd.meiners@mail.de
#########################################################################
# This file is part of SmartHomeNG.
# https://github.com/smarthomeNG/smarthome
#
# SmartHomeNG is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# SmartHomeNG is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with SmartHomeNG. If not, see <http://www.gnu.org/licenses/>.
#########################################################################
#####################################################################
# Check mimimum Python Version
#####################################################################
import sys
if sys.hexversion < 0x03060000:
print("Sorry your python interpreter ({0}.{1}) is too old. Please update to 3.6 or newer.".format(sys.version_info[0], sys.version_info[1]))
exit()
#####################################################################
# prevent user root
#####################################################################
import os
if not os.name == 'nt':
# Check only, if not running under Windows
if os.getegid() == 0:
print("SmartHomeNG should not run as root")
# exit()
#####################################################################
# Import minimum set of Python Core Modules
#####################################################################
import argparse
import locale
# locale.getpreferredencoding() gives back platforms default file encoding
# this should be UTF-8 for linux and
# for windows mostly cp1252 (which is bad for SHNG's UTF-8 files)
# https://stackoverflow.com/questions/31469707/changing-the-locale-preferred-encoding-in-python-3-in-windows
#####################################################################
# Read command line arguments
#####################################################################
# argument handling here, because pip3_command is needed before all imports are done
argparser = argparse.ArgumentParser()
arggroup = argparser.add_mutually_exclusive_group()
argparser.add_argument('-p', '--pip3_command', help='set path of pip3 command, if it is not automatically found')
arggroup.add_argument('-i', '--interactive', help='open an interactive shell with tab completion and with verbose logging to the logfile', action='store_true')
arggroup.add_argument('-l', '--logics', help='reload all logics', action='store_true')
arggroup.add_argument('-r', '--restart', help='restart SmartHomeNG', action='store_true')
arggroup.add_argument('-s', '--stop', help='stop SmartHomeNG', action='store_true')
arggroup.add_argument('-V', '--version', help='show SmartHomeNG version', action='store_true')
arggroup.add_argument('--start', help='start SmartHomeNG and detach from console (default)', default=True, action='store_true')
arggroup.add_argument('-cb', '--create_backup', help='create backup of SmartHomeNG configuration (yaml configuration only)', action='store_true')
arggroup.add_argument('-cbt', '--create_backup_t', help='create backup of SmartHomeNG configuration with a timestamp in the filename', action='store_true')
arggroup.add_argument('-rb', '--restore_backup', help='restore backup of configuration to SmartHomeNG installation (yaml configuration only). CAUTION: Existing configuration is overwritten!', action='store_true')
argparser.add_argument('-c', '--config_dir', help='use external config dir (should contain "etc", "logics" and "items" subdirectories)')
arggroup.add_argument('-v', '--verbose', help='verbose (info output) logging to the logfile - DEPRECATED use logging-configuration', action='store_true')
arggroup.add_argument('-d', '--debug', help='stay in the foreground with verbose output - DEPRECATED use logging-configuration', action='store_true')
arggroup.add_argument('-f', '--foreground', help='stay in the foreground', action='store_true')
arggroup.add_argument('-q', '--quiet', help='reduce logging to the logfile - DEPRECATED use logging-configuration', action='store_true')
args = argparser.parse_args()
#####################################################################
# Import Python Core Modules
#####################################################################
import signal
import subprocess
import threading
import time
try:
import psutil
except:
pass
#####################################################################
# Base
#####################################################################
BASE = os.path.sep.join(os.path.realpath(__file__).split(os.path.sep)[:-2])
sys.path.insert(0, BASE)
PIDFILE= os.path.join(BASE,'var','run','smarthome.pid')
# Only used for Version Check in Plugins to decide if a logger must be explicitly declared
import bin.shngversion
VERSION = bin.shngversion.get_shng_version()
#############################################################
# test if needed Python packages are installed
# - core requirements = libs
from lib.shpypi import Shpypi
shpypi = Shpypi.get_instance()
if shpypi is None:
shpypi = Shpypi(base=BASE)
core_reqs = shpypi.test_core_requirements(logging=False, pip3_command=args.pip3_command)
if core_reqs == 0:
print("Starting SmartHomeNG again...")
python_bin = sys.executable
if ' ' in python_bin:
python_bin = '"'+python_bin+'"'
command = python_bin + ' ' + os.path.join(BASE, 'bin', 'smarthome.py')
try:
p = subprocess.Popen(command, shell=True)
except subprocess.SubprocessError as e:
print("Restart command '{}' failed with error {}".format(command,e))
time.sleep(10)
print()
exit(0)
elif core_reqs == -1:
print("ERROR: Unable to install core requirements")
print("Use the commandline option --pip3_command to specify the path to the command")
print()
exit(1)
#####################################################################
# Import SmartHomeNG Modules
#####################################################################
#import lib.config
#import lib.connection
import lib.daemon
#import lib.item
#import lib.log
#import lib.logic
#import lib.module
#import lib.plugin
#import lib.scene
#import lib.scheduler
#import lib.tools
#import lib.orb
import lib.backup
#import lib.translation
#import lib.shyaml
from lib.smarthome import SmartHome
#####################################################################
# Globals
#####################################################################
MODE = 'default'
#TZ = gettz('UTC')
#####################################################################
# Private Methods
#####################################################################
def _reload_logics():
"""
Reload logics through the commandline with option -l
"""
pid = lib.daemon.read_pidfile(PIDFILE)
if pid:
os.kill(pid, signal.SIGHUP)
#####################################################################
# Main
#####################################################################
if __name__ == '__main__':
try:
if locale.getdefaultlocale() == (None, None):
locale.setlocale(locale.LC_ALL, 'C')
else:
locale.setlocale(locale.LC_ALL, '')
except:
locale.setlocale(locale.LC_ALL, 'en_US.UTF-8')
extern_conf_dir = BASE
if args.config_dir is not None:
extern_conf_dir = os.path.normpath(args.config_dir)
lib.backup.make_backup_directories(BASE)
if args.interactive:
MODE = 'interactive'
import code
import rlcompleter # noqa
try:
import readline
except:
print("ERROR: module 'readline' is not installed. Without this module the interactive mode can't be used")
exit(1)
import atexit
# history file
histfile = os.path.join(os.environ['HOME'], '.history.python')
try:
readline.read_history_file(histfile)
except IOError:
pass
atexit.register(readline.write_history_file, histfile)
readline.parse_and_bind("tab: complete")
sh = SmartHome(MODE=MODE, extern_conf_dir=extern_conf_dir)
_sh_thread = threading.Thread(target=sh.start)
_sh_thread.start()
shell = code.InteractiveConsole(locals())
shell.interact()
exit(0)
elif args.logics:
_reload_logics()
exit(0)
elif args.version:
import bin.shngversion
VERSION = bin.shngversion.get_shng_version()
print("{}".format(VERSION))
exit(0)
elif args.stop:
lib.daemon.kill(PIDFILE, 30)
exit(0)
elif args.restart:
time.sleep(5)
lib.daemon.kill(PIDFILE, 30)
pass
elif args.debug:
MODE = 'debug'
elif args.quiet:
pass
elif args.verbose:
MODE = 'verbose'
pass
elif args.foreground:
MODE = 'foreground'
pass
elif args.create_backup:
fn = lib.backup.create_backup(extern_conf_dir, BASE)
if fn:
print("Backup of configuration created at: \n{}".format(fn))
exit(0)
elif args.create_backup_t:
fn = lib.backup.create_backup(extern_conf_dir, BASE, filename_with_timestamp=True)
if fn:
print("Backup of configuration created at: \n{}".format(fn))
exit(0)
elif args.restore_backup:
fn = lib.backup.restore_backup(extern_conf_dir, BASE)
if fn is not None:
print("Configuration has been restored from: \n{}".format(fn))
print("Restart SmartHomeNG to use the restored configuration")
exit(0)
# check for pid file
if lib.daemon.check_sh_is_running(PIDFILE):
print("SmartHomeNG already running with pid {}".format(lib.daemon.read_pidfile(PIDFILE)))
print("Run 'smarthome.py -s' to stop it or 'smarthome.py -r' to restart it.")
exit()
if MODE == 'debug':
lib.daemon.write_pidfile(psutil.Process().pid, PIDFILE)
# Starting SmartHomeNG
sh = SmartHome(MODE=MODE, extern_conf_dir=extern_conf_dir)
sh.start()
| gpl-3.0 |
Akshay0724/scikit-learn | sklearn/ensemble/voting_classifier.py | 19 | 9888 | """
Soft Voting/Majority Rule classifier.
This module contains a Soft Voting/Majority Rule classifier for
classification estimators.
"""
# Authors: Sebastian Raschka <se.raschka@gmail.com>,
# Gilles Louppe <g.louppe@gmail.com>
#
# License: BSD 3 clause
import numpy as np
from ..base import BaseEstimator
from ..base import ClassifierMixin
from ..base import TransformerMixin
from ..base import clone
from ..preprocessing import LabelEncoder
from ..externals import six
from ..externals.joblib import Parallel, delayed
from ..utils.validation import has_fit_parameter, check_is_fitted
def _parallel_fit_estimator(estimator, X, y, sample_weight):
"""Private function used to fit an estimator within a job."""
if sample_weight is not None:
estimator.fit(X, y, sample_weight)
else:
estimator.fit(X, y)
return estimator
class VotingClassifier(BaseEstimator, ClassifierMixin, TransformerMixin):
"""Soft Voting/Majority Rule classifier for unfitted estimators.
.. versionadded:: 0.17
Read more in the :ref:`User Guide <voting_classifier>`.
Parameters
----------
estimators : list of (string, estimator) tuples
Invoking the ``fit`` method on the ``VotingClassifier`` will fit clones
of those original estimators that will be stored in the class attribute
`self.estimators_`.
voting : str, {'hard', 'soft'} (default='hard')
If 'hard', uses predicted class labels for majority rule voting.
Else if 'soft', predicts the class label based on the argmax of
the sums of the predicted probabilities, which is recommended for
an ensemble of well-calibrated classifiers.
weights : array-like, shape = [n_classifiers], optional (default=`None`)
Sequence of weights (`float` or `int`) to weight the occurrences of
predicted class labels (`hard` voting) or class probabilities
before averaging (`soft` voting). Uses uniform weights if `None`.
n_jobs : int, optional (default=1)
The number of jobs to run in parallel for ``fit``.
If -1, then the number of jobs is set to the number of cores.
Attributes
----------
estimators_ : list of classifiers
The collection of fitted sub-estimators.
classes_ : array-like, shape = [n_predictions]
The classes labels.
Examples
--------
>>> import numpy as np
>>> from sklearn.linear_model import LogisticRegression
>>> from sklearn.naive_bayes import GaussianNB
>>> from sklearn.ensemble import RandomForestClassifier, VotingClassifier
>>> clf1 = LogisticRegression(random_state=1)
>>> clf2 = RandomForestClassifier(random_state=1)
>>> clf3 = GaussianNB()
>>> X = np.array([[-1, -1], [-2, -1], [-3, -2], [1, 1], [2, 1], [3, 2]])
>>> y = np.array([1, 1, 1, 2, 2, 2])
>>> eclf1 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)], voting='hard')
>>> eclf1 = eclf1.fit(X, y)
>>> print(eclf1.predict(X))
[1 1 1 2 2 2]
>>> eclf2 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)],
... voting='soft')
>>> eclf2 = eclf2.fit(X, y)
>>> print(eclf2.predict(X))
[1 1 1 2 2 2]
>>> eclf3 = VotingClassifier(estimators=[
... ('lr', clf1), ('rf', clf2), ('gnb', clf3)],
... voting='soft', weights=[2,1,1])
>>> eclf3 = eclf3.fit(X, y)
>>> print(eclf3.predict(X))
[1 1 1 2 2 2]
>>>
"""
def __init__(self, estimators, voting='hard', weights=None, n_jobs=1):
self.estimators = estimators
self.named_estimators = dict(estimators)
self.voting = voting
self.weights = weights
self.n_jobs = n_jobs
def fit(self, X, y, sample_weight=None):
""" Fit the estimators.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape = [n_samples]
Target values.
sample_weight : array-like, shape = [n_samples] or None
Sample weights. If None, then samples are equally weighted.
Note that this is supported only if all underlying estimators
support sample weights.
Returns
-------
self : object
"""
if isinstance(y, np.ndarray) and len(y.shape) > 1 and y.shape[1] > 1:
raise NotImplementedError('Multilabel and multi-output'
' classification is not supported.')
if self.voting not in ('soft', 'hard'):
raise ValueError("Voting must be 'soft' or 'hard'; got (voting=%r)"
% self.voting)
if self.estimators is None or len(self.estimators) == 0:
raise AttributeError('Invalid `estimators` attribute, `estimators`'
' should be a list of (string, estimator)'
' tuples')
if (self.weights is not None and
len(self.weights) != len(self.estimators)):
raise ValueError('Number of classifiers and weights must be equal'
'; got %d weights, %d estimators'
% (len(self.weights), len(self.estimators)))
if sample_weight is not None:
for name, step in self.estimators:
if not has_fit_parameter(step, 'sample_weight'):
raise ValueError('Underlying estimator \'%s\' does not support'
' sample weights.' % name)
self.le_ = LabelEncoder()
self.le_.fit(y)
self.classes_ = self.le_.classes_
self.estimators_ = []
transformed_y = self.le_.transform(y)
self.estimators_ = Parallel(n_jobs=self.n_jobs)(
delayed(_parallel_fit_estimator)(clone(clf), X, transformed_y,
sample_weight)
for _, clf in self.estimators)
return self
def predict(self, X):
""" Predict class labels for X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
Returns
----------
maj : array-like, shape = [n_samples]
Predicted class labels.
"""
check_is_fitted(self, 'estimators_')
if self.voting == 'soft':
maj = np.argmax(self.predict_proba(X), axis=1)
else: # 'hard' voting
predictions = self._predict(X)
maj = np.apply_along_axis(lambda x:
np.argmax(np.bincount(x,
weights=self.weights)),
axis=1,
arr=predictions.astype('int'))
maj = self.le_.inverse_transform(maj)
return maj
def _collect_probas(self, X):
"""Collect results from clf.predict calls. """
return np.asarray([clf.predict_proba(X) for clf in self.estimators_])
def _predict_proba(self, X):
"""Predict class probabilities for X in 'soft' voting """
if self.voting == 'hard':
raise AttributeError("predict_proba is not available when"
" voting=%r" % self.voting)
check_is_fitted(self, 'estimators_')
avg = np.average(self._collect_probas(X), axis=0, weights=self.weights)
return avg
@property
def predict_proba(self):
"""Compute probabilities of possible outcomes for samples in X.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
Returns
----------
avg : array-like, shape = [n_samples, n_classes]
Weighted average probability for each class per sample.
"""
return self._predict_proba
def transform(self, X):
"""Return class labels or probabilities for X for each estimator.
Parameters
----------
X : {array-like, sparse matrix}, shape = [n_samples, n_features]
Training vectors, where n_samples is the number of samples and
n_features is the number of features.
Returns
-------
If `voting='soft'`:
array-like = [n_classifiers, n_samples, n_classes]
Class probabilities calculated by each classifier.
If `voting='hard'`:
array-like = [n_samples, n_classifiers]
Class labels predicted by each classifier.
"""
check_is_fitted(self, 'estimators_')
if self.voting == 'soft':
return self._collect_probas(X)
else:
return self._predict(X)
def get_params(self, deep=True):
"""Return estimator parameter names for GridSearch support"""
if not deep:
return super(VotingClassifier, self).get_params(deep=False)
else:
out = super(VotingClassifier, self).get_params(deep=False)
out.update(self.named_estimators.copy())
for name, step in six.iteritems(self.named_estimators):
for key, value in six.iteritems(step.get_params(deep=True)):
out['%s__%s' % (name, key)] = value
return out
def _predict(self, X):
"""Collect results from clf.predict calls. """
return np.asarray([clf.predict(X) for clf in self.estimators_]).T
| bsd-3-clause |
tiborsimko/invenio-redirector | invenio_redirector/models.py | 3 | 2489 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2013, 2014, 2015 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""GoTo database models."""
# External imports
import datetime
# General imports.
from invenio_ext.sqlalchemy import db
from sqlalchemy.orm import validates
from .registry import redirect_methods
# Create your models here.
class Goto(db.Model):
"""Represents a Goto record."""
__tablename__ = 'goto'
label = db.Column(db.String(150), primary_key=True)
plugin = db.Column(db.String(150), nullable=False)
_parameters = db.Column(db.JSON, nullable=False, default={},
name="parameters")
creation_date = db.Column(db.DateTime, default=datetime.datetime.now,
nullable=False, index=True)
modification_date = db.Column(db.DateTime, default=datetime.datetime.now,
onupdate=datetime.datetime.now,
nullable=False, index=True)
@validates('plugin')
def validate_plugin(self, key, plugin):
"""Validate plugin name."""
if plugin not in redirect_methods:
raise ValueError("%s plugin does not exist" % plugin)
return plugin
@db.hybrid_property
def parameters(self):
"""Get parameters method."""
return self._parameters
@parameters.setter
def parameters(self, value):
"""Set parameters method."""
self._parameters = value or {}
def to_dict(self):
"""Return a dict representation of Goto."""
return {'label': self.label,
'plugin': self.plugin,
'parameters': self.parameters,
'creation_date': self.creation_date,
'modification_date': self.modification_date}
__all__ = ('Goto',)
| gpl-2.0 |
Argonne-National-Laboratory/ProMC | protobuf/python/google/protobuf/internal/enum_type_wrapper.py | 292 | 3541 | # Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple wrapper around enum types to expose utility functions.
Instances are created as properties with the same name as the enum they wrap
on proto classes. For usage, see:
reflection_test.py
"""
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
class EnumTypeWrapper(object):
"""A utility for finding the names of enum values."""
DESCRIPTOR = None
def __init__(self, enum_type):
"""Inits EnumTypeWrapper with an EnumDescriptor."""
self._enum_type = enum_type
self.DESCRIPTOR = enum_type;
def Name(self, number):
"""Returns a string containing the name of an enum value."""
if number in self._enum_type.values_by_number:
return self._enum_type.values_by_number[number].name
raise ValueError('Enum %s has no name defined for value %d' % (
self._enum_type.name, number))
def Value(self, name):
"""Returns the value coresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise ValueError('Enum %s has no value defined for name %s' % (
self._enum_type.name, name))
def keys(self):
"""Return a list of the string names in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.name
for value_descriptor in self._enum_type.values]
def values(self):
"""Return a list of the integer values in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.number
for value_descriptor in self._enum_type.values]
def items(self):
"""Return a list of the (name, value) pairs of the enum.
These are returned in the order they were defined in the .proto file.
"""
return [(value_descriptor.name, value_descriptor.number)
for value_descriptor in self._enum_type.values]
| lgpl-3.0 |
jbenden/ansible | lib/ansible/modules/cloud/amazon/sts_assume_role.py | 1 | 5188 | #!/usr/bin/python
# Copyright: Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['stableinterface'],
'supported_by': 'curated'}
DOCUMENTATION = '''
---
module: sts_assume_role
short_description: Assume a role using AWS Security Token Service and obtain temporary credentials
description:
- Assume a role using AWS Security Token Service and obtain temporary credentials
version_added: "2.0"
author: Boris Ekelchik (@bekelchik)
options:
role_arn:
description:
- The Amazon Resource Name (ARN) of the role that the caller is
assuming (http://docs.aws.amazon.com/IAM/latest/UserGuide/Using_Identifiers.html#Identifiers_ARNs)
required: true
role_session_name:
description:
- Name of the role's session - will be used by CloudTrail
required: true
policy:
description:
- Supplemental policy to use in addition to assumed role's policies.
required: false
default: null
duration_seconds:
description:
- The duration, in seconds, of the role session. The value can range from 900 seconds (15 minutes) to 3600 seconds (1 hour).
By default, the value is set to 3600 seconds.
required: false
default: null
external_id:
description:
- A unique identifier that is used by third parties to assume a role in their customers' accounts.
required: false
default: null
mfa_serial_number:
description:
- he identification number of the MFA device that is associated with the user who is making the AssumeRole call.
required: false
default: null
mfa_token:
description:
- The value provided by the MFA device, if the trust policy of the role being assumed requires MFA.
required: false
default: null
notes:
- In order to use the assumed role in a following playbook task you must pass the access_key, access_secret and access_token
extends_documentation_fragment:
- aws
- ec2
'''
EXAMPLES = '''
# Note: These examples do not set authentication details, see the AWS Guide for details.
# Assume an existing role (more details: http://docs.aws.amazon.com/STS/latest/APIReference/API_AssumeRole.html)
sts_assume_role:
role_arn: "arn:aws:iam::123456789012:role/someRole"
role_session_name: "someRoleSession"
register: assumed_role
# Use the assumed role above to tag an instance in account 123456789012
ec2_tag:
aws_access_key: "{{ assumed_role.sts_creds.access_key }}"
aws_secret_key: "{{ assumed_role.sts_creds.secret_key }}"
security_token: "{{ assumed_role.sts_creds.session_token }}"
resource: i-xyzxyz01
state: present
tags:
MyNewTag: value
'''
try:
import boto.sts
from boto.exception import BotoServerError
HAS_BOTO = True
except ImportError:
HAS_BOTO = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ec2 import AnsibleAWSError, connect_to_aws, ec2_argument_spec, get_aws_connection_info
def assume_role_policy(connection, module):
role_arn = module.params.get('role_arn')
role_session_name = module.params.get('role_session_name')
policy = module.params.get('policy')
duration_seconds = module.params.get('duration_seconds')
external_id = module.params.get('external_id')
mfa_serial_number = module.params.get('mfa_serial_number')
mfa_token = module.params.get('mfa_token')
changed = False
try:
assumed_role = connection.assume_role(role_arn, role_session_name, policy, duration_seconds, external_id, mfa_serial_number, mfa_token)
changed = True
except BotoServerError as e:
module.fail_json(msg=e)
module.exit_json(changed=changed, sts_creds=assumed_role.credentials.__dict__, sts_user=assumed_role.user.__dict__)
def main():
argument_spec = ec2_argument_spec()
argument_spec.update(
dict(
role_arn = dict(required=True, default=None),
role_session_name = dict(required=True, default=None),
duration_seconds = dict(required=False, default=None, type='int'),
external_id = dict(required=False, default=None),
policy = dict(required=False, default=None),
mfa_serial_number = dict(required=False, default=None),
mfa_token = dict(required=False, default=None)
)
)
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_BOTO:
module.fail_json(msg='boto required for this module')
region, ec2_url, aws_connect_params = get_aws_connection_info(module)
if region:
try:
connection = connect_to_aws(boto.sts, region, **aws_connect_params)
except (boto.exception.NoAuthHandlerFound, AnsibleAWSError) as e:
module.fail_json(msg=str(e))
else:
module.fail_json(msg="region must be specified")
try:
assume_role_policy(connection, module)
except BotoServerError as e:
module.fail_json(msg=e)
if __name__ == '__main__':
main()
| gpl-3.0 |
cyanut/django-haystack | example_project/settings.py | 11 | 1128 | # encoding: utf-8
from __future__ import absolute_import, division, print_function, unicode_literals
import os
# All the normal settings apply. What's included here are the bits you'll have
# to customize.
# Add Haystack to INSTALLED_APPS. You can do this by simply placing in your list.
INSTALLED_APPS = INSTALLED_APPS + [
'haystack',
]
HAYSTACK_CONNECTIONS = {
'default': {
# For Solr:
'ENGINE': 'haystack.backends.solr_backend.SolrEngine',
'URL': 'http://localhost:9001/solr/example',
'TIMEOUT': 60 * 5,
'INCLUDE_SPELLING': True,
},
'whoosh': {
# For Whoosh:
'ENGINE': 'haystack.backends.whoosh_backend.WhooshEngine',
'PATH': os.path.join(os.path.dirname(__file__), 'whoosh_index'),
'INCLUDE_SPELLING': True,
},
'simple': {
# For Simple:
'ENGINE': 'haystack.backends.simple_backend.SimpleEngine',
},
'xapian': {
# For Xapian (requires the third-party install):
'ENGINE': 'xapian_backend.XapianEngine',
'PATH': os.path.join(os.path.dirname(__file__), 'xapian_index'),
}
}
| bsd-3-clause |
ribag/ganeti-experiments | test/py/ganeti.jstore_unittest.py | 7 | 3251 | #!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301, USA.
"""Script for testing ganeti.jstore"""
import re
import unittest
import random
from ganeti import constants
from ganeti import utils
from ganeti import compat
from ganeti import errors
from ganeti import jstore
import testutils
class TestFormatJobID(testutils.GanetiTestCase):
def test(self):
self.assertEqual(jstore.FormatJobID(0), 0)
self.assertEqual(jstore.FormatJobID(30498), 30498)
self.assertEqual(jstore.FormatJobID(319472592764518609),
319472592764518609)
def testErrors(self):
for i in [-1, -2288, -9667, -0.205641, 0.0, 0.1, 13041.4472, "", "Hello",
[], [1], {}]:
self.assertRaises(errors.ProgrammerError, jstore.FormatJobID, i)
class TestGetArchiveDirectory(testutils.GanetiTestCase):
def test(self):
tests = [
("0", [0, 1, 3343, 9712, 9999]),
("1", [10000, 13188, 19999]),
("29", [290000, 296041, 298796, 299999]),
("30", [300000, 309384]),
]
for (exp, job_ids) in tests:
for job_id in job_ids:
fmt_id = jstore.FormatJobID(job_id)
self.assertEqual(jstore.GetArchiveDirectory(fmt_id), exp)
self.assertEqual(jstore.ParseJobId(fmt_id), job_id)
def testErrors(self):
self.assertRaises(errors.ParameterError, jstore.GetArchiveDirectory, None)
self.assertRaises(errors.ParameterError, jstore.GetArchiveDirectory, "foo")
class TestParseJobId(testutils.GanetiTestCase):
def test(self):
self.assertEqual(jstore.ParseJobId(29981), 29981)
self.assertEqual(jstore.ParseJobId("12918"), 12918)
def testErrors(self):
self.assertRaises(errors.ParameterError, jstore.ParseJobId, "")
self.assertRaises(errors.ParameterError, jstore.ParseJobId, "MXXI")
self.assertRaises(errors.ParameterError, jstore.ParseJobId, [])
class TestReadNumericFile(testutils.GanetiTestCase):
def testNonExistingFile(self):
result = jstore._ReadNumericFile("/tmp/this/file/does/not/exist")
self.assertTrue(result is None)
def testValidFile(self):
tmpfile = self._CreateTempFile()
for (data, exp) in [("123", 123), ("0\n", 0)]:
utils.WriteFile(tmpfile, data=data)
result = jstore._ReadNumericFile(tmpfile)
self.assertEqual(result, exp)
def testInvalidContent(self):
tmpfile = self._CreateTempFile()
utils.WriteFile(tmpfile, data="{wrong content")
self.assertRaises(errors.JobQueueError, jstore._ReadNumericFile, tmpfile)
if __name__ == "__main__":
testutils.GanetiTestProgram()
| gpl-2.0 |
zahodi/ansible | lib/ansible/modules/network/nxos/nxos_command.py | 33 | 8801 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'core',
'version': '1.0'}
DOCUMENTATION = """
---
module: nxos_command
version_added: "2.1"
author: "Peter Sprygada (@privateip)"
short_description: Run arbitrary command on Cisco NXOS devices
description:
- Sends an arbitrary command to an NXOS node and returns the results
read from the device. This module includes an
argument that will cause the module to wait for a specific condition
before returning or timing out if the condition is not met.
extends_documentation_fragment: nxos
options:
commands:
description:
- The commands to send to the remote NXOS device over the
configured provider. The resulting output from the command
is returned. If the I(wait_for) argument is provided, the
module is not returned until the condition is satisfied or
the number of retires as expired.
- The I(commands) argument also accepts an alternative form
that allows for complex values that specify the command
to run and the output format to return. This can be done
on a command by command basis. The complex argument supports
the keywords C(command) and C(output) where C(command) is the
command to run and C(output) is one of 'text' or 'json'.
required: true
wait_for:
description:
- Specifies what to evaluate from the output of the command
and what conditionals to apply. This argument will cause
the task to wait for a particular conditional to be true
before moving forward. If the conditional is not true
by the configured retries, the task fails. See examples.
required: false
default: null
aliases: ['waitfor']
version_added: "2.2"
match:
description:
- The I(match) argument is used in conjunction with the
I(wait_for) argument to specify the match policy. Valid
values are C(all) or C(any). If the value is set to C(all)
then all conditionals in the I(wait_for) must be satisfied. If
the value is set to C(any) then only one of the values must be
satisfied.
required: false
default: all
version_added: "2.2"
retries:
description:
- Specifies the number of retries a command should by tried
before it is considered failed. The command is run on the
target device every retry and evaluated against the I(wait_for)
conditionals.
required: false
default: 10
interval:
description:
- Configures the interval in seconds to wait between retries
of the command. If the command does not pass the specified
conditional, the interval indicates how to long to wait before
trying the command again.
required: false
default: 1
"""
EXAMPLES = """
# Note: examples below use the following provider dict to handle
# transport and authentication to the node.
vars:
cli:
host: "{{ inventory_hostname }}"
username: admin
password: admin
transport: cli
- name: run show version on remote devices
nxos_command:
commands: show version
provider: "{{ cli }}"
- name: run show version and check to see if output contains Cisco
nxos_command:
commands: show version
wait_for: result[0] contains Cisco
provider: "{{ cli }}"
- name: run multiple commands on remote nodes
nxos_command:
commands:
- show version
- show interfaces
provider: "{{ cli }}"
- name: run multiple commands and evaluate the output
nxos_command:
commands:
- show version
- show interfaces
wait_for:
- result[0] contains Cisco
- result[1] contains loopback0
provider: "{{ cli }}"
- name: run commands and specify the output format
nxos_command:
commands:
- command: show version
output: json
provider: "{{ cli }}"
"""
RETURN = """
stdout:
description: the set of responses from the commands
returned: always
type: list
sample: ['...', '...']
stdout_lines:
description: The value of stdout split into a list
returned: always
type: list
sample: [['...', '...'], ['...'], ['...']]
failed_conditions:
description: the conditionals that failed
returned: failed
type: list
sample: ['...', '...']
"""
import ansible.module_utils.nxos
from ansible.module_utils.basic import get_exception
from ansible.module_utils.network import NetworkModule, NetworkError
from ansible.module_utils.netcli import CommandRunner
from ansible.module_utils.netcli import FailedConditionsError
from ansible.module_utils.netcli import FailedConditionalError
from ansible.module_utils.netcli import AddCommandError, AddConditionError
VALID_KEYS = ['command', 'output', 'prompt', 'response']
def to_lines(stdout):
for item in stdout:
if isinstance(item, basestring):
item = str(item).split('\n')
yield item
def parse_commands(module):
for cmd in module.params['commands']:
if isinstance(cmd, basestring):
cmd = dict(command=cmd, output=None)
elif 'command' not in cmd:
module.fail_json(msg='command keyword argument is required')
elif cmd.get('output') not in [None, 'text', 'json']:
module.fail_json(msg='invalid output specified for command')
elif not set(cmd.keys()).issubset(VALID_KEYS):
module.fail_json(msg='unknown keyword specified')
yield cmd
def main():
spec = dict(
# { command: <str>, output: <str>, prompt: <str>, response: <str> }
commands=dict(type='list', required=True),
wait_for=dict(type='list', aliases=['waitfor']),
match=dict(default='all', choices=['any', 'all']),
retries=dict(default=10, type='int'),
interval=dict(default=1, type='int')
)
module = NetworkModule(argument_spec=spec,
supports_check_mode=True)
commands = list(parse_commands(module))
conditionals = module.params['wait_for'] or list()
warnings = list()
runner = CommandRunner(module)
for cmd in commands:
if module.check_mode and not cmd['command'].startswith('show'):
warnings.append('only show commands are supported when using '
'check mode, not executing `%s`' % cmd['command'])
else:
if cmd['command'].startswith('conf'):
module.fail_json(msg='nxos_command does not support running '
'config mode commands. Please use '
'nxos_config instead')
try:
runner.add_command(**cmd)
except AddCommandError:
exc = get_exception()
warnings.append('duplicate command detected: %s' % cmd)
try:
for item in conditionals:
runner.add_conditional(item)
except AddConditionError:
exc = get_exception()
module.fail_json(msg=str(exc), condition=exc.condition)
runner.retries = module.params['retries']
runner.interval = module.params['interval']
runner.match = module.params['match']
try:
runner.run()
except FailedConditionsError:
exc = get_exception()
module.fail_json(msg=str(exc), failed_conditions=exc.failed_conditions)
except FailedConditionalError:
exc = get_exception()
module.fail_json(msg=str(exc), failed_conditional=exc.failed_conditional)
except NetworkError:
exc = get_exception()
module.fail_json(msg=str(exc), **exc.kwargs)
result = dict(changed=False)
result['stdout'] = list()
for cmd in commands:
try:
output = runner.get_command(cmd['command'], cmd.get('output'))
except ValueError:
output = 'command not executed due to check_mode, see warnings'
result['stdout'].append(output)
result['warnings'] = warnings
result['stdout_lines'] = list(to_lines(result['stdout']))
module.exit_json(**result)
if __name__ == '__main__':
main()
| gpl-3.0 |
manasi24/jiocloud-tempest-qatempest | tempest/api/identity/admin/v3/test_projects_negative.py | 11 | 3532 | # Copyright 2013 OpenStack, LLC
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.identity import base
from tempest import test
class ProjectsNegativeTestJSON(base.BaseIdentityV3AdminTest):
@test.attr(type=['negative'])
@test.idempotent_id('24c49279-45dd-4155-887a-cb738c2385aa')
def test_list_projects_by_unauthorized_user(self):
# Non-admin user should not be able to list projects
self.assertRaises(lib_exc.Forbidden,
self.non_admin_client.list_projects)
@test.attr(type=['negative'])
@test.idempotent_id('874c3e84-d174-4348-a16b-8c01f599561b')
def test_project_create_duplicate(self):
# Project names should be unique
project_name = data_utils.rand_name('project-dup')
project = self.client.create_project(project_name)
self.data.projects.append(project)
self.assertRaises(
lib_exc.Conflict, self.client.create_project, project_name)
@test.attr(type=['negative'])
@test.idempotent_id('8fba9de2-3e1f-4e77-812a-60cb68f8df13')
def test_create_project_by_unauthorized_user(self):
# Non-admin user should not be authorized to create a project
project_name = data_utils.rand_name('project')
self.assertRaises(
lib_exc.Forbidden, self.non_admin_client.create_project,
project_name)
@test.attr(type=['negative'])
@test.idempotent_id('7828db17-95e5-475b-9432-9a51b4aa79a9')
def test_create_project_with_empty_name(self):
# Project name should not be empty
self.assertRaises(lib_exc.BadRequest, self.client.create_project,
name='')
@test.attr(type=['negative'])
@test.idempotent_id('502b6ceb-b0c8-4422-bf53-f08fdb21e2f0')
def test_create_projects_name_length_over_64(self):
# Project name length should not be greater than 64 characters
project_name = 'a' * 65
self.assertRaises(lib_exc.BadRequest, self.client.create_project,
project_name)
@test.attr(type=['negative'])
@test.idempotent_id('8d68c012-89e0-4394-8d6b-ccd7196def97')
def test_project_delete_by_unauthorized_user(self):
# Non-admin user should not be able to delete a project
project_name = data_utils.rand_name('project')
project = self.client.create_project(project_name)
self.data.projects.append(project)
self.assertRaises(
lib_exc.Forbidden, self.non_admin_client.delete_project,
project['id'])
@test.attr(type=['negative'])
@test.idempotent_id('7965b581-60c1-43b7-8169-95d4ab7fc6fb')
def test_delete_non_existent_project(self):
# Attempt to delete a non existent project should fail
self.assertRaises(lib_exc.NotFound, self.client.delete_project,
data_utils.rand_uuid_hex())
| apache-2.0 |
alejandrogallo/dotfiles | bin/pick.py | 1 | 3226 | #! /usr/bin/env python2
# -*- coding: utf-8 -*-
import curses
import time
import os
import re
import sys
import tempfile
def terminate(main_screen):
"""
:returns: TODO
"""
curses.nocbreak();
main_screen.keypad(0);
curses.echo()
curses.endwin()
def init():
"""
:returns: TODO
"""
main_screen=curses.initscr()
curses.noecho()
curses.cbreak()
curses.curs_set(0)
return main_screen
def regexize(SEARCH):
"""TODO: Docstring for regexize.
:SEARCH: TODO
:returns: TODO
"""
REGEX_SEARCH=""
for i in range(len(SEARCH)):
REGEX_SEARCH+=".*"+SEARCH[i]
return REGEX_SEARCH
def print_debug(main_screen, text):
"""TODO: Docstring for print_debug.
:main_screen: TODO
:text: TODO
:returns: TODO
"""
main_screen.addstr(max_height-1,0,"Debug: "+text)
# inBufferName = tempfile.mktemp()
# inBuffer = open(inBufferName, "w+")
# inBuffer.write(sys.stdin.read())
# inBuffer.close()
lineBuffer=[]
lineBuffer = sys.stdin.readlines()
# print(lineBuffer)
sys.stdin = open("/dev/tty", "r")
# a = input("put an input")
# print(a)
# sys.exit(1)
main_screen = init() # main window
max_height = int(main_screen.getmaxyx()[0])
buffer_height = max_height - 2
# lineBuffer = getHistory()
# lineBuffer = open(inBufferName)
PROMPT="Pick:"
main_screen.addstr(0, 0, PROMPT, curses.A_REVERSE)
main_screen.refresh()
SEARCH=""
# keys
ESC = 27
ENTER = 10
Ctrl_P = 16
Ctrl_N = 14
DOWN = curses.KEY_DOWN
UP = curses.KEY_UP
selected_match=1
matches=[]
i=0
print_debug(main_screen,"%s"%i)
try:
while 1:
++i
time.sleep(1)
c=32
# c = main_screen.getch()
# c = input("sdf")
# print(c)
# clean every time
for j in range(len(matches)):
main_screen.addstr(j+1,0," "*(len(matches[j])+2))
main_screen.addstr(0, len(PROMPT)+1," "*len(SEARCH))
# curses.KEY_UP
if c==127:
SEARCH=SEARCH[:-1]
elif c == Ctrl_N or c == DOWN:
selected_match= (selected_match+1) if selected_match-len(matches) else len(matches)
elif c == Ctrl_P or c == UP:
selected_match= (selected_match-1) if selected_match-1 else 1
elif c == ESC: # ESC
terminate(main_screen)
sys.exit(0)
elif c == ENTER: # ENTER
# curses.flash()
terminate(main_screen)
os.system(matches[selected_match-1])
sys.exit(0)
elif 126>=c>=32:
SEARCH+=chr(c)
matches=[]
main_screen.addstr(selected_match, 1,">")
main_screen.addstr(0, len(PROMPT)+1,SEARCH)
# print_debug(main_screen,str(c))
print_debug(main_screen,"%s"%i)
for j,line in enumerate(lineBuffer):
if re.match(regexize(SEARCH),line, re.IGNORECASE):
matches.append(line)
main_screen.addstr(len(matches),3,line)
if len(matches)>=buffer_height:
break
except:
terminate(main_screen)
else:
terminate(main_screen)
#vim-run: python3 % <<<"asdfsdf adsfasdf\nasdf"
#vim-run: echo -e "1\n2\n3" | python3 %
#vim-run: python3 %
| unlicense |
massot/odoo | addons/account/wizard/account_report_common_partner.py | 385 | 1999 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp.osv import fields, osv
class account_common_partner_report(osv.osv_memory):
_name = 'account.common.partner.report'
_description = 'Account Common Partner Report'
_inherit = "account.common.report"
_columns = {
'result_selection': fields.selection([('customer','Receivable Accounts'),
('supplier','Payable Accounts'),
('customer_supplier','Receivable and Payable Accounts')],
"Partner's", required=True),
}
_defaults = {
'result_selection': 'customer',
}
def pre_print_report(self, cr, uid, ids, data, context=None):
if context is None:
context = {}
data['form'].update(self.read(cr, uid, ids, ['result_selection'], context=context)[0])
return data
#vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
AndrewSallans/osf.io | scripts/consistency/fix_tag_guids.py | 64 | 2104 | """Removes legacy Tag objects from the Guid namespace.
Tags were once GuidStoredObjects, but are no longer. The Guid table was not
cleaned of these references.
This caused a specific issue where "project" was a Tag id, and therefore was
resolveable to a Guid object, thereby breaking our routing system for URLs
beginning with /project/.
This script must be run from the OSF root directory for the imports to work.
::
$ python -m scripts.consistency.fix_tag_guids dry
$ python -m scripts.consistency.fix_tag_guids
Log:
Performed on production by sloria on 2014-08-15 at 11.45AM. 892 invalid GUID
objects were removed.
"""
import sys
from nose.tools import * # noqa
from framework import Q
from framework.guid.model import Guid
from website.app import init_app
from tests.base import OsfTestCase
from tests.factories import TagFactory, NodeFactory
QUERY = Q('referent.1', 'eq', "tag")
def main():
# Set up storage backends
init_app(routes=False)
targets = get_targets()
if 'dry' in sys.argv:
print('{n} invalid GUID objects will be removed.'.format(n=targets.count()))
sys.exit(0)
else:
do_migration()
if get_targets().count() == 0:
print('All invalid references removed.')
else:
print('Failed to remove all references.')
sys.exit(1)
def do_migration():
Guid.remove(QUERY)
def get_targets():
return Guid.find(QUERY)
class TestMigrateLegacyTagGUIDObjects(OsfTestCase):
def setUp(self):
OsfTestCase.setUp(self)
tag = TagFactory()
self.target_guid = Guid(referent=tag)
self.target_guid.save()
self.nontarget_guid = Guid(referent=NodeFactory())
def test_get_targets(self):
result = list(get_targets())
assert_in(self.target_guid, result)
assert_not_in(self.nontarget_guid, result)
def test_do_migration(self):
# sanity check
assert_equal(len(list(get_targets())), 1)
do_migration()
assert_equal(len(list(get_targets())), 0)
if __name__ == '__main__':
main()
| apache-2.0 |
gunan/tensorflow | tensorflow/lite/micro/examples/magic_wand/train/data_split_test.py | 19 | 3122 | # Lint as: python3
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test for data_split.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import unittest
from data_split import read_data
from data_split import split_data
class TestSplit(unittest.TestCase):
def setUp(self): # pylint: disable=g-missing-super-call
self.data = read_data("./data/complete_data")
self.num_dic = {"wing": 0, "ring": 0, "slope": 0, "negative": 0}
with open("./data/complete_data", "r") as f:
lines = f.readlines()
self.num = len(lines)
def test_read_data(self):
self.assertEqual(len(self.data), self.num)
self.assertIsInstance(self.data, list)
self.assertIsInstance(self.data[0], dict)
self.assertEqual(
set(list(self.data[-1])), set(["gesture", "accel_ms2_xyz", "name"]))
def test_split_data(self):
with open("./data/complete_data", "r") as f:
lines = f.readlines()
for idx, line in enumerate(lines): # pylint: disable=unused-variable
dic = json.loads(line)
for ges in self.num_dic:
if dic["gesture"] == ges:
self.num_dic[ges] += 1
train_data_0, valid_data_0, test_data_100 = split_data(self.data, 0, 0)
train_data_50, valid_data_50, test_data_0 = split_data(self.data, 0.5, 0.5)
train_data_60, valid_data_20, test_data_20 = split_data(self.data, 0.6, 0.2)
len_60 = int(self.num_dic["wing"] * 0.6) + int(
self.num_dic["ring"] * 0.6) + int(self.num_dic["slope"] * 0.6) + int(
self.num_dic["negative"] * 0.6)
len_50 = int(self.num_dic["wing"] * 0.5) + int(
self.num_dic["ring"] * 0.5) + int(self.num_dic["slope"] * 0.5) + int(
self.num_dic["negative"] * 0.5)
len_20 = int(self.num_dic["wing"] * 0.2) + int(
self.num_dic["ring"] * 0.2) + int(self.num_dic["slope"] * 0.2) + int(
self.num_dic["negative"] * 0.2)
self.assertEqual(len(train_data_0), 0)
self.assertEqual(len(train_data_50), len_50)
self.assertEqual(len(train_data_60), len_60)
self.assertEqual(len(valid_data_0), 0)
self.assertEqual(len(valid_data_50), len_50)
self.assertEqual(len(valid_data_20), len_20)
self.assertEqual(len(test_data_100), self.num)
self.assertEqual(len(test_data_0), (self.num - 2 * len_50))
self.assertEqual(len(test_data_20), (self.num - len_60 - len_20))
if __name__ == "__main__":
unittest.main()
| apache-2.0 |
tecwebjoao/TecWeb-TF-2T-B-SI | venv/Lib/encodings/cp1258.py | 272 | 13364 | """ Python Character Mapping Codec cp1258 generated from 'MAPPINGS/VENDORS/MICSFT/WINDOWS/CP1258.TXT' with gencodec.py.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_table)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_table)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_table)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_table)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='cp1258',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Table
decoding_table = (
'\x00' # 0x00 -> NULL
'\x01' # 0x01 -> START OF HEADING
'\x02' # 0x02 -> START OF TEXT
'\x03' # 0x03 -> END OF TEXT
'\x04' # 0x04 -> END OF TRANSMISSION
'\x05' # 0x05 -> ENQUIRY
'\x06' # 0x06 -> ACKNOWLEDGE
'\x07' # 0x07 -> BELL
'\x08' # 0x08 -> BACKSPACE
'\t' # 0x09 -> HORIZONTAL TABULATION
'\n' # 0x0A -> LINE FEED
'\x0b' # 0x0B -> VERTICAL TABULATION
'\x0c' # 0x0C -> FORM FEED
'\r' # 0x0D -> CARRIAGE RETURN
'\x0e' # 0x0E -> SHIFT OUT
'\x0f' # 0x0F -> SHIFT IN
'\x10' # 0x10 -> DATA LINK ESCAPE
'\x11' # 0x11 -> DEVICE CONTROL ONE
'\x12' # 0x12 -> DEVICE CONTROL TWO
'\x13' # 0x13 -> DEVICE CONTROL THREE
'\x14' # 0x14 -> DEVICE CONTROL FOUR
'\x15' # 0x15 -> NEGATIVE ACKNOWLEDGE
'\x16' # 0x16 -> SYNCHRONOUS IDLE
'\x17' # 0x17 -> END OF TRANSMISSION BLOCK
'\x18' # 0x18 -> CANCEL
'\x19' # 0x19 -> END OF MEDIUM
'\x1a' # 0x1A -> SUBSTITUTE
'\x1b' # 0x1B -> ESCAPE
'\x1c' # 0x1C -> FILE SEPARATOR
'\x1d' # 0x1D -> GROUP SEPARATOR
'\x1e' # 0x1E -> RECORD SEPARATOR
'\x1f' # 0x1F -> UNIT SEPARATOR
' ' # 0x20 -> SPACE
'!' # 0x21 -> EXCLAMATION MARK
'"' # 0x22 -> QUOTATION MARK
'#' # 0x23 -> NUMBER SIGN
'$' # 0x24 -> DOLLAR SIGN
'%' # 0x25 -> PERCENT SIGN
'&' # 0x26 -> AMPERSAND
"'" # 0x27 -> APOSTROPHE
'(' # 0x28 -> LEFT PARENTHESIS
')' # 0x29 -> RIGHT PARENTHESIS
'*' # 0x2A -> ASTERISK
'+' # 0x2B -> PLUS SIGN
',' # 0x2C -> COMMA
'-' # 0x2D -> HYPHEN-MINUS
'.' # 0x2E -> FULL STOP
'/' # 0x2F -> SOLIDUS
'0' # 0x30 -> DIGIT ZERO
'1' # 0x31 -> DIGIT ONE
'2' # 0x32 -> DIGIT TWO
'3' # 0x33 -> DIGIT THREE
'4' # 0x34 -> DIGIT FOUR
'5' # 0x35 -> DIGIT FIVE
'6' # 0x36 -> DIGIT SIX
'7' # 0x37 -> DIGIT SEVEN
'8' # 0x38 -> DIGIT EIGHT
'9' # 0x39 -> DIGIT NINE
':' # 0x3A -> COLON
';' # 0x3B -> SEMICOLON
'<' # 0x3C -> LESS-THAN SIGN
'=' # 0x3D -> EQUALS SIGN
'>' # 0x3E -> GREATER-THAN SIGN
'?' # 0x3F -> QUESTION MARK
'@' # 0x40 -> COMMERCIAL AT
'A' # 0x41 -> LATIN CAPITAL LETTER A
'B' # 0x42 -> LATIN CAPITAL LETTER B
'C' # 0x43 -> LATIN CAPITAL LETTER C
'D' # 0x44 -> LATIN CAPITAL LETTER D
'E' # 0x45 -> LATIN CAPITAL LETTER E
'F' # 0x46 -> LATIN CAPITAL LETTER F
'G' # 0x47 -> LATIN CAPITAL LETTER G
'H' # 0x48 -> LATIN CAPITAL LETTER H
'I' # 0x49 -> LATIN CAPITAL LETTER I
'J' # 0x4A -> LATIN CAPITAL LETTER J
'K' # 0x4B -> LATIN CAPITAL LETTER K
'L' # 0x4C -> LATIN CAPITAL LETTER L
'M' # 0x4D -> LATIN CAPITAL LETTER M
'N' # 0x4E -> LATIN CAPITAL LETTER N
'O' # 0x4F -> LATIN CAPITAL LETTER O
'P' # 0x50 -> LATIN CAPITAL LETTER P
'Q' # 0x51 -> LATIN CAPITAL LETTER Q
'R' # 0x52 -> LATIN CAPITAL LETTER R
'S' # 0x53 -> LATIN CAPITAL LETTER S
'T' # 0x54 -> LATIN CAPITAL LETTER T
'U' # 0x55 -> LATIN CAPITAL LETTER U
'V' # 0x56 -> LATIN CAPITAL LETTER V
'W' # 0x57 -> LATIN CAPITAL LETTER W
'X' # 0x58 -> LATIN CAPITAL LETTER X
'Y' # 0x59 -> LATIN CAPITAL LETTER Y
'Z' # 0x5A -> LATIN CAPITAL LETTER Z
'[' # 0x5B -> LEFT SQUARE BRACKET
'\\' # 0x5C -> REVERSE SOLIDUS
']' # 0x5D -> RIGHT SQUARE BRACKET
'^' # 0x5E -> CIRCUMFLEX ACCENT
'_' # 0x5F -> LOW LINE
'`' # 0x60 -> GRAVE ACCENT
'a' # 0x61 -> LATIN SMALL LETTER A
'b' # 0x62 -> LATIN SMALL LETTER B
'c' # 0x63 -> LATIN SMALL LETTER C
'd' # 0x64 -> LATIN SMALL LETTER D
'e' # 0x65 -> LATIN SMALL LETTER E
'f' # 0x66 -> LATIN SMALL LETTER F
'g' # 0x67 -> LATIN SMALL LETTER G
'h' # 0x68 -> LATIN SMALL LETTER H
'i' # 0x69 -> LATIN SMALL LETTER I
'j' # 0x6A -> LATIN SMALL LETTER J
'k' # 0x6B -> LATIN SMALL LETTER K
'l' # 0x6C -> LATIN SMALL LETTER L
'm' # 0x6D -> LATIN SMALL LETTER M
'n' # 0x6E -> LATIN SMALL LETTER N
'o' # 0x6F -> LATIN SMALL LETTER O
'p' # 0x70 -> LATIN SMALL LETTER P
'q' # 0x71 -> LATIN SMALL LETTER Q
'r' # 0x72 -> LATIN SMALL LETTER R
's' # 0x73 -> LATIN SMALL LETTER S
't' # 0x74 -> LATIN SMALL LETTER T
'u' # 0x75 -> LATIN SMALL LETTER U
'v' # 0x76 -> LATIN SMALL LETTER V
'w' # 0x77 -> LATIN SMALL LETTER W
'x' # 0x78 -> LATIN SMALL LETTER X
'y' # 0x79 -> LATIN SMALL LETTER Y
'z' # 0x7A -> LATIN SMALL LETTER Z
'{' # 0x7B -> LEFT CURLY BRACKET
'|' # 0x7C -> VERTICAL LINE
'}' # 0x7D -> RIGHT CURLY BRACKET
'~' # 0x7E -> TILDE
'\x7f' # 0x7F -> DELETE
'\u20ac' # 0x80 -> EURO SIGN
'\ufffe' # 0x81 -> UNDEFINED
'\u201a' # 0x82 -> SINGLE LOW-9 QUOTATION MARK
'\u0192' # 0x83 -> LATIN SMALL LETTER F WITH HOOK
'\u201e' # 0x84 -> DOUBLE LOW-9 QUOTATION MARK
'\u2026' # 0x85 -> HORIZONTAL ELLIPSIS
'\u2020' # 0x86 -> DAGGER
'\u2021' # 0x87 -> DOUBLE DAGGER
'\u02c6' # 0x88 -> MODIFIER LETTER CIRCUMFLEX ACCENT
'\u2030' # 0x89 -> PER MILLE SIGN
'\ufffe' # 0x8A -> UNDEFINED
'\u2039' # 0x8B -> SINGLE LEFT-POINTING ANGLE QUOTATION MARK
'\u0152' # 0x8C -> LATIN CAPITAL LIGATURE OE
'\ufffe' # 0x8D -> UNDEFINED
'\ufffe' # 0x8E -> UNDEFINED
'\ufffe' # 0x8F -> UNDEFINED
'\ufffe' # 0x90 -> UNDEFINED
'\u2018' # 0x91 -> LEFT SINGLE QUOTATION MARK
'\u2019' # 0x92 -> RIGHT SINGLE QUOTATION MARK
'\u201c' # 0x93 -> LEFT DOUBLE QUOTATION MARK
'\u201d' # 0x94 -> RIGHT DOUBLE QUOTATION MARK
'\u2022' # 0x95 -> BULLET
'\u2013' # 0x96 -> EN DASH
'\u2014' # 0x97 -> EM DASH
'\u02dc' # 0x98 -> SMALL TILDE
'\u2122' # 0x99 -> TRADE MARK SIGN
'\ufffe' # 0x9A -> UNDEFINED
'\u203a' # 0x9B -> SINGLE RIGHT-POINTING ANGLE QUOTATION MARK
'\u0153' # 0x9C -> LATIN SMALL LIGATURE OE
'\ufffe' # 0x9D -> UNDEFINED
'\ufffe' # 0x9E -> UNDEFINED
'\u0178' # 0x9F -> LATIN CAPITAL LETTER Y WITH DIAERESIS
'\xa0' # 0xA0 -> NO-BREAK SPACE
'\xa1' # 0xA1 -> INVERTED EXCLAMATION MARK
'\xa2' # 0xA2 -> CENT SIGN
'\xa3' # 0xA3 -> POUND SIGN
'\xa4' # 0xA4 -> CURRENCY SIGN
'\xa5' # 0xA5 -> YEN SIGN
'\xa6' # 0xA6 -> BROKEN BAR
'\xa7' # 0xA7 -> SECTION SIGN
'\xa8' # 0xA8 -> DIAERESIS
'\xa9' # 0xA9 -> COPYRIGHT SIGN
'\xaa' # 0xAA -> FEMININE ORDINAL INDICATOR
'\xab' # 0xAB -> LEFT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xac' # 0xAC -> NOT SIGN
'\xad' # 0xAD -> SOFT HYPHEN
'\xae' # 0xAE -> REGISTERED SIGN
'\xaf' # 0xAF -> MACRON
'\xb0' # 0xB0 -> DEGREE SIGN
'\xb1' # 0xB1 -> PLUS-MINUS SIGN
'\xb2' # 0xB2 -> SUPERSCRIPT TWO
'\xb3' # 0xB3 -> SUPERSCRIPT THREE
'\xb4' # 0xB4 -> ACUTE ACCENT
'\xb5' # 0xB5 -> MICRO SIGN
'\xb6' # 0xB6 -> PILCROW SIGN
'\xb7' # 0xB7 -> MIDDLE DOT
'\xb8' # 0xB8 -> CEDILLA
'\xb9' # 0xB9 -> SUPERSCRIPT ONE
'\xba' # 0xBA -> MASCULINE ORDINAL INDICATOR
'\xbb' # 0xBB -> RIGHT-POINTING DOUBLE ANGLE QUOTATION MARK
'\xbc' # 0xBC -> VULGAR FRACTION ONE QUARTER
'\xbd' # 0xBD -> VULGAR FRACTION ONE HALF
'\xbe' # 0xBE -> VULGAR FRACTION THREE QUARTERS
'\xbf' # 0xBF -> INVERTED QUESTION MARK
'\xc0' # 0xC0 -> LATIN CAPITAL LETTER A WITH GRAVE
'\xc1' # 0xC1 -> LATIN CAPITAL LETTER A WITH ACUTE
'\xc2' # 0xC2 -> LATIN CAPITAL LETTER A WITH CIRCUMFLEX
'\u0102' # 0xC3 -> LATIN CAPITAL LETTER A WITH BREVE
'\xc4' # 0xC4 -> LATIN CAPITAL LETTER A WITH DIAERESIS
'\xc5' # 0xC5 -> LATIN CAPITAL LETTER A WITH RING ABOVE
'\xc6' # 0xC6 -> LATIN CAPITAL LETTER AE
'\xc7' # 0xC7 -> LATIN CAPITAL LETTER C WITH CEDILLA
'\xc8' # 0xC8 -> LATIN CAPITAL LETTER E WITH GRAVE
'\xc9' # 0xC9 -> LATIN CAPITAL LETTER E WITH ACUTE
'\xca' # 0xCA -> LATIN CAPITAL LETTER E WITH CIRCUMFLEX
'\xcb' # 0xCB -> LATIN CAPITAL LETTER E WITH DIAERESIS
'\u0300' # 0xCC -> COMBINING GRAVE ACCENT
'\xcd' # 0xCD -> LATIN CAPITAL LETTER I WITH ACUTE
'\xce' # 0xCE -> LATIN CAPITAL LETTER I WITH CIRCUMFLEX
'\xcf' # 0xCF -> LATIN CAPITAL LETTER I WITH DIAERESIS
'\u0110' # 0xD0 -> LATIN CAPITAL LETTER D WITH STROKE
'\xd1' # 0xD1 -> LATIN CAPITAL LETTER N WITH TILDE
'\u0309' # 0xD2 -> COMBINING HOOK ABOVE
'\xd3' # 0xD3 -> LATIN CAPITAL LETTER O WITH ACUTE
'\xd4' # 0xD4 -> LATIN CAPITAL LETTER O WITH CIRCUMFLEX
'\u01a0' # 0xD5 -> LATIN CAPITAL LETTER O WITH HORN
'\xd6' # 0xD6 -> LATIN CAPITAL LETTER O WITH DIAERESIS
'\xd7' # 0xD7 -> MULTIPLICATION SIGN
'\xd8' # 0xD8 -> LATIN CAPITAL LETTER O WITH STROKE
'\xd9' # 0xD9 -> LATIN CAPITAL LETTER U WITH GRAVE
'\xda' # 0xDA -> LATIN CAPITAL LETTER U WITH ACUTE
'\xdb' # 0xDB -> LATIN CAPITAL LETTER U WITH CIRCUMFLEX
'\xdc' # 0xDC -> LATIN CAPITAL LETTER U WITH DIAERESIS
'\u01af' # 0xDD -> LATIN CAPITAL LETTER U WITH HORN
'\u0303' # 0xDE -> COMBINING TILDE
'\xdf' # 0xDF -> LATIN SMALL LETTER SHARP S
'\xe0' # 0xE0 -> LATIN SMALL LETTER A WITH GRAVE
'\xe1' # 0xE1 -> LATIN SMALL LETTER A WITH ACUTE
'\xe2' # 0xE2 -> LATIN SMALL LETTER A WITH CIRCUMFLEX
'\u0103' # 0xE3 -> LATIN SMALL LETTER A WITH BREVE
'\xe4' # 0xE4 -> LATIN SMALL LETTER A WITH DIAERESIS
'\xe5' # 0xE5 -> LATIN SMALL LETTER A WITH RING ABOVE
'\xe6' # 0xE6 -> LATIN SMALL LETTER AE
'\xe7' # 0xE7 -> LATIN SMALL LETTER C WITH CEDILLA
'\xe8' # 0xE8 -> LATIN SMALL LETTER E WITH GRAVE
'\xe9' # 0xE9 -> LATIN SMALL LETTER E WITH ACUTE
'\xea' # 0xEA -> LATIN SMALL LETTER E WITH CIRCUMFLEX
'\xeb' # 0xEB -> LATIN SMALL LETTER E WITH DIAERESIS
'\u0301' # 0xEC -> COMBINING ACUTE ACCENT
'\xed' # 0xED -> LATIN SMALL LETTER I WITH ACUTE
'\xee' # 0xEE -> LATIN SMALL LETTER I WITH CIRCUMFLEX
'\xef' # 0xEF -> LATIN SMALL LETTER I WITH DIAERESIS
'\u0111' # 0xF0 -> LATIN SMALL LETTER D WITH STROKE
'\xf1' # 0xF1 -> LATIN SMALL LETTER N WITH TILDE
'\u0323' # 0xF2 -> COMBINING DOT BELOW
'\xf3' # 0xF3 -> LATIN SMALL LETTER O WITH ACUTE
'\xf4' # 0xF4 -> LATIN SMALL LETTER O WITH CIRCUMFLEX
'\u01a1' # 0xF5 -> LATIN SMALL LETTER O WITH HORN
'\xf6' # 0xF6 -> LATIN SMALL LETTER O WITH DIAERESIS
'\xf7' # 0xF7 -> DIVISION SIGN
'\xf8' # 0xF8 -> LATIN SMALL LETTER O WITH STROKE
'\xf9' # 0xF9 -> LATIN SMALL LETTER U WITH GRAVE
'\xfa' # 0xFA -> LATIN SMALL LETTER U WITH ACUTE
'\xfb' # 0xFB -> LATIN SMALL LETTER U WITH CIRCUMFLEX
'\xfc' # 0xFC -> LATIN SMALL LETTER U WITH DIAERESIS
'\u01b0' # 0xFD -> LATIN SMALL LETTER U WITH HORN
'\u20ab' # 0xFE -> DONG SIGN
'\xff' # 0xFF -> LATIN SMALL LETTER Y WITH DIAERESIS
)
### Encoding table
encoding_table=codecs.charmap_build(decoding_table)
| apache-2.0 |
misterdanb/micropython | tests/basics/gen_yield_from_ducktype.py | 107 | 1034 | class MyGen:
def __init__(self):
self.v = 0
def __iter__(self):
return self
def __next__(self):
self.v += 1
if self.v > 5:
raise StopIteration
return self.v
def gen():
yield from MyGen()
def gen2():
yield from gen()
print(list(gen()))
print(list(gen2()))
class Incrementer:
def __iter__(self):
return self
def __next__(self):
return self.send(None)
def send(self, val):
if val is None:
return "Incrementer initialized"
return val + 1
def gen3():
yield from Incrementer()
g = gen3()
print(next(g))
print(g.send(5))
print(g.send(100))
#
# Test proper handling of StopIteration vs other exceptions
#
class MyIter:
def __iter__(self):
return self
def __next__(self):
raise StopIteration(42)
def gen4():
global ret
ret = yield from MyIter()
1//0
ret = None
try:
print(list(gen4()))
except ZeroDivisionError:
print("ZeroDivisionError")
print(ret)
| mit |
RO-ny9/python-for-android | python-build/python-libs/gdata/samples/oauth/oauth_example.py | 89 | 4415 | #!/usr/bin/python
#
# Copyright (C) 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'kunalmshah.userid (Kunal Shah)'
import sys
import os.path
import getopt
import gdata.auth
import gdata.docs.service
class OAuthSample(object):
"""An OAuthSample object demonstrates the three-legged OAuth process."""
def __init__(self, consumer_key, consuer_secret):
"""Constructor for the OAuthSample object.
Takes a consumer key and consumer secret, authenticates using OAuth
mechanism and lists the document titles using Document List Data API.
Uses HMAC-SHA1 signature method.
Args:
consumer_key: string Domain identifying third_party web application.
consumer_secret: string Secret generated during registration.
Returns:
An OAuthSample object used to run the sample demonstrating the
way to use OAuth authentication mode.
"""
self.consumer_key = consumer_key
self.consumer_secret = consuer_secret
self.gd_client = gdata.docs.service.DocsService()
def _PrintFeed(self, feed):
"""Prints out the contents of a feed to the console.
Args:
feed: A gdata.docs.DocumentListFeed instance.
"""
if not feed.entry:
print 'No entries in feed.\n'
i = 1
for entry in feed.entry:
print '%d. %s\n' % (i, entry.title.text.encode('UTF-8'))
i += 1
def _ListAllDocuments(self):
"""Retrieves a list of all of a user's documents and displays them."""
feed = self.gd_client.GetDocumentListFeed()
self._PrintFeed(feed)
def Run(self):
"""Demonstrates usage of OAuth authentication mode and retrieves a list of
documents using Document List Data API."""
print '\nSTEP 1: Set OAuth input parameters.'
self.gd_client.SetOAuthInputParameters(
gdata.auth.OAuthSignatureMethod.HMAC_SHA1,
self.consumer_key, consumer_secret=self.consumer_secret)
print '\nSTEP 2: Fetch OAuth Request token.'
request_token = self.gd_client.FetchOAuthRequestToken()
print 'Request Token fetched: %s' % request_token
print '\nSTEP 3: Set the fetched OAuth token.'
self.gd_client.SetOAuthToken(request_token)
print 'OAuth request token set.'
print '\nSTEP 4: Generate OAuth authorization URL.'
auth_url = self.gd_client.GenerateOAuthAuthorizationURL()
print 'Authorization URL: %s' % auth_url
raw_input('Manually go to the above URL and authenticate.'
'Press a key after authorization.')
print '\nSTEP 5: Upgrade to an OAuth access token.'
self.gd_client.UpgradeToOAuthAccessToken()
print 'Access Token: %s' % (
self.gd_client.token_store.find_token(request_token.scopes[0]))
print '\nYour Documents:\n'
self._ListAllDocuments()
print 'STEP 6: Revoke the OAuth access token after use.'
self.gd_client.RevokeOAuthToken()
print 'OAuth access token revoked.'
def main():
"""Demonstrates usage of OAuth authentication mode.
Prints a list of documents. This demo uses HMAC-SHA1 signature method.
"""
# Parse command line options
try:
opts, args = getopt.getopt(sys.argv[1:], '', ['consumer_key=',
'consumer_secret='])
except getopt.error, msg:
print ('python oauth_example.py --consumer_key [oauth_consumer_key] '
'--consumer_secret [consumer_secret] ')
sys.exit(2)
consumer_key = ''
consumer_secret = ''
# Process options
for option, arg in opts:
if option == '--consumer_key':
consumer_key = arg
elif option == '--consumer_secret':
consumer_secret = arg
while not consumer_key:
consumer_key = raw_input('Please enter consumer key: ')
while not consumer_secret:
consumer_secret = raw_input('Please enter consumer secret: ')
sample = OAuthSample(consumer_key, consumer_secret)
sample.Run()
if __name__ == '__main__':
main()
| apache-2.0 |
Tokyo-Buffalo/tokyosouth | env/lib/python3.6/site-packages/twisted/python/test/test_zipstream.py | 1 | 12160 | # Copyright (c) Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Tests for L{twisted.python.zipstream}
"""
import random
import zipfile
from hashlib import md5
from twisted.python import zipstream, filepath
from twisted.trial import unittest
class FileEntryMixin:
"""
File entry classes should behave as file-like objects
"""
def getFileEntry(self, contents):
"""
Return an appropriate zip file entry
"""
filename = self.mktemp()
with zipfile.ZipFile(filename, 'w', self.compression) as z:
z.writestr('content', contents)
z = zipstream.ChunkingZipFile(filename, 'r')
return z.readfile('content')
def test_isatty(self):
"""
zip files should not be ttys, so isatty() should be false
"""
with self.getFileEntry('') as fileEntry:
self.assertFalse(fileEntry.isatty())
def test_closed(self):
"""
The C{closed} attribute should reflect whether C{close()} has been
called.
"""
with self.getFileEntry('') as fileEntry:
self.assertFalse(fileEntry.closed)
self.assertTrue(fileEntry.closed)
def test_readline(self):
"""
C{readline()} should mirror L{file.readline} and return up to a single
delimiter.
"""
with self.getFileEntry(b'hoho\nho') as fileEntry:
self.assertEqual(fileEntry.readline(), b'hoho\n')
self.assertEqual(fileEntry.readline(), b'ho')
self.assertEqual(fileEntry.readline(), b'')
def test_next(self):
"""
Zip file entries should implement the iterator protocol as files do.
"""
with self.getFileEntry(b'ho\nhoho') as fileEntry:
self.assertEqual(fileEntry.next(), b'ho\n')
self.assertEqual(fileEntry.next(), b'hoho')
self.assertRaises(StopIteration, fileEntry.next)
def test_readlines(self):
"""
C{readlines()} should return a list of all the lines.
"""
with self.getFileEntry(b'ho\nho\nho') as fileEntry:
self.assertEqual(fileEntry.readlines(), [b'ho\n', b'ho\n', b'ho'])
def test_iteration(self):
"""
C{__iter__()} and C{xreadlines()} should return C{self}.
"""
with self.getFileEntry('') as fileEntry:
self.assertIs(iter(fileEntry), fileEntry)
self.assertIs(fileEntry.xreadlines(), fileEntry)
def test_readWhole(self):
"""
C{.read()} should read the entire file.
"""
contents = b"Hello, world!"
with self.getFileEntry(contents) as entry:
self.assertEqual(entry.read(), contents)
def test_readPartial(self):
"""
C{.read(num)} should read num bytes from the file.
"""
contents = "0123456789"
with self.getFileEntry(contents) as entry:
one = entry.read(4)
two = entry.read(200)
self.assertEqual(one, b"0123")
self.assertEqual(two, b"456789")
def test_tell(self):
"""
C{.tell()} should return the number of bytes that have been read so
far.
"""
contents = "x" * 100
with self.getFileEntry(contents) as entry:
entry.read(2)
self.assertEqual(entry.tell(), 2)
entry.read(4)
self.assertEqual(entry.tell(), 6)
class DeflatedZipFileEntryTests(FileEntryMixin, unittest.TestCase):
"""
DeflatedZipFileEntry should be file-like
"""
compression = zipfile.ZIP_DEFLATED
class ZipFileEntryTests(FileEntryMixin, unittest.TestCase):
"""
ZipFileEntry should be file-like
"""
compression = zipfile.ZIP_STORED
class ZipstreamTests(unittest.TestCase):
"""
Tests for twisted.python.zipstream
"""
def setUp(self):
"""
Creates junk data that can be compressed and a test directory for any
files that will be created
"""
self.testdir = filepath.FilePath(self.mktemp())
self.testdir.makedirs()
self.unzipdir = self.testdir.child('unzipped')
self.unzipdir.makedirs()
def makeZipFile(self, contents, directory=''):
"""
Makes a zip file archive containing len(contents) files. Contents
should be a list of strings, each string being the content of one file.
"""
zpfilename = self.testdir.child('zipfile.zip').path
with zipfile.ZipFile(zpfilename, 'w') as zpfile:
for i, content in enumerate(contents):
filename = str(i)
if directory:
filename = directory + "/" + filename
zpfile.writestr(filename, content)
return zpfilename
def test_invalidMode(self):
"""
A ChunkingZipFile opened in write-mode should not allow .readfile(),
and raise a RuntimeError instead.
"""
with zipstream.ChunkingZipFile(self.mktemp(), "w") as czf:
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_closedArchive(self):
"""
A closed ChunkingZipFile should raise a L{RuntimeError} when
.readfile() is invoked.
"""
czf = zipstream.ChunkingZipFile(self.makeZipFile(["something"]), "r")
czf.close()
self.assertRaises(RuntimeError, czf.readfile, "something")
def test_invalidHeader(self):
"""
A zipfile entry with the wrong magic number should raise BadZipfile for
readfile(), but that should not affect other files in the archive.
"""
fn = self.makeZipFile(["test contents",
"more contents"])
with zipfile.ZipFile(fn, "r") as zf:
zeroOffset = zf.getinfo("0").header_offset
# Zero out just the one header.
with open(fn, "r+b") as scribble:
scribble.seek(zeroOffset, 0)
scribble.write(b'0' * 4)
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
with czf.readfile("1") as zfe:
self.assertEqual(zfe.read(), b"more contents")
def test_filenameMismatch(self):
"""
A zipfile entry with a different filename than is found in the central
directory should raise BadZipfile.
"""
fn = self.makeZipFile([b"test contents",
b"more contents"])
with zipfile.ZipFile(fn, "r") as zf:
info = zf.getinfo("0")
info.filename = "not zero"
with open(fn, "r+b") as scribble:
scribble.seek(info.header_offset, 0)
scribble.write(info.FileHeader())
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
with czf.readfile("1") as zfe:
self.assertEqual(zfe.read(), b"more contents")
def test_unsupportedCompression(self):
"""
A zipfile which describes an unsupported compression mechanism should
raise BadZipfile.
"""
fn = self.mktemp()
with zipfile.ZipFile(fn, "w") as zf:
zi = zipfile.ZipInfo("0")
zf.writestr(zi, "some data")
# Mangle its compression type in the central directory; can't do
# this before the writestr call or zipfile will (correctly) tell us
# not to pass bad compression types :)
zi.compress_type = 1234
with zipstream.ChunkingZipFile(fn) as czf:
self.assertRaises(zipfile.BadZipfile, czf.readfile, "0")
def test_extraData(self):
"""
readfile() should skip over 'extra' data present in the zip metadata.
"""
fn = self.mktemp()
with zipfile.ZipFile(fn, 'w') as zf:
zi = zipfile.ZipInfo("0")
zi.extra = b"hello, extra"
zf.writestr(zi, b"the real data")
with zipstream.ChunkingZipFile(fn) as czf, czf.readfile("0") as zfe:
self.assertEqual(zfe.read(), b"the real data")
def test_unzipIterChunky(self):
"""
L{twisted.python.zipstream.unzipIterChunky} returns an iterator which
must be exhausted to completely unzip the input archive.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
contents = [i.encode("ascii") for i in contents]
zpfilename = self.makeZipFile(contents)
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
self.assertEqual(
set(self.unzipdir.listdir()),
set(map(str, range(numfiles))))
for child in self.unzipdir.children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
def test_unzipIterChunkyDirectory(self):
"""
The path to which a file is extracted by L{zipstream.unzipIterChunky}
is determined by joining the C{directory} argument to C{unzip} with the
path within the archive of the file being extracted.
"""
numfiles = 10
contents = ['This is test file %d!' % i for i in range(numfiles)]
contents = [i.encode("ascii") for i in contents]
zpfilename = self.makeZipFile(contents, 'foo')
list(zipstream.unzipIterChunky(zpfilename, self.unzipdir.path))
fileContents = {str(num).encode("ascii") for num in range(numfiles)}
self.assertEqual(
set(self.unzipdir.child(b'foo').listdir()),
fileContents)
for child in self.unzipdir.child(b'foo').children():
num = int(child.basename())
self.assertEqual(child.getContent(), contents[num])
# XXX these tests are kind of gross and old, but I think unzipIterChunky is
# kind of a gross function anyway. We should really write an abstract
# copyTo/moveTo that operates on FilePath and make sure ZipPath can support
# it, then just deprecate / remove this stuff.
def _unzipIterChunkyTest(self, compression, chunksize, lower, upper):
"""
unzipIterChunky should unzip the given number of bytes per iteration.
"""
junk = b''
for n in range(1000):
num = round(random.random(), 12)
numEncoded = str(num).encode("ascii")
junk += b' '+numEncoded
junkmd5 = md5(junk).hexdigest()
tempdir = filepath.FilePath(self.mktemp())
tempdir.makedirs()
zfpath = tempdir.child('bigfile.zip').path
self._makebigfile(zfpath, compression, junk)
uziter = zipstream.unzipIterChunky(zfpath, tempdir.path,
chunksize=chunksize)
r = next(uziter)
# test that the number of chunks is in the right ballpark;
# this could theoretically be any number but statistically it
# should always be in this range
approx = lower < r < upper
self.assertTrue(approx)
for r in uziter:
pass
self.assertEqual(r, 0)
with tempdir.child("zipstreamjunk").open() as f:
newmd5 = md5(f.read()).hexdigest()
self.assertEqual(newmd5, junkmd5)
def test_unzipIterChunkyStored(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a stored archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_STORED, 500, 35, 45)
def test_chunkyDeflated(self):
"""
unzipIterChunky should unzip the given number of bytes per iteration on
a deflated archive.
"""
self._unzipIterChunkyTest(zipfile.ZIP_DEFLATED, 972, 23, 27)
def _makebigfile(self, filename, compression, junk):
"""
Create a zip file with the given file name and compression scheme.
"""
with zipfile.ZipFile(filename, 'w', compression) as zf:
for i in range(10):
fn = 'zipstream%d' % i
zf.writestr(fn, "")
zf.writestr('zipstreamjunk', junk)
| mit |
trondhindenes/ansible | lib/ansible/modules/cloud/vmware/vmware_cluster.py | 20 | 10016 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2015, Joseph Callen <jcallen () csc.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: vmware_cluster
short_description: Manage VMware vSphere clusters
description:
- Add or remove VMware vSphere clusters.
version_added: '2.0'
author:
- Joseph Callen (@jcpowermac)
requirements:
- Tested on ESXi 5.5
- PyVmomi installed
options:
cluster_name:
description:
- The name of the cluster that will be created.
required: yes
datacenter_name:
description:
- The name of the datacenter the cluster will be created in.
required: yes
enable_drs:
description:
- If set to C(yes) will enable DRS when the cluster is created.
type: bool
default: 'no'
enable_ha:
description:
- If set to C(yes) will enable HA when the cluster is created.
type: bool
default: 'no'
enable_vsan:
description:
- If set to C(yes) will enable vSAN when the cluster is created.
type: bool
default: 'no'
state:
description:
- Create (C(present)) or remove (C(absent)) a VMware vSphere cluster.
choices: [absent, present]
default: present
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = r'''
- name: Create Cluster
vmware_cluster:
hostname: '{{ vcenter_hostname }}'
username: '{{ vcenter_username }}'
password: '{{ vcenter_password }}'
datacenter_name: datacenter
cluster_name: cluster
enable_ha: yes
enable_drs: yes
enable_vsan: yes
delegate_to: localhost
'''
try:
from pyVmomi import vim, vmodl
HAS_PYVMOMI = True
except ImportError:
HAS_PYVMOMI = False
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.vmware import (HAS_PYVMOMI,
TaskError,
connect_to_api,
find_cluster_by_name_datacenter,
find_datacenter_by_name,
vmware_argument_spec,
wait_for_task
)
class VMwareCluster(object):
def __init__(self, module):
self.module = module
self.cluster_name = module.params['cluster_name']
self.datacenter_name = module.params['datacenter_name']
self.enable_drs = module.params['enable_drs']
self.enable_ha = module.params['enable_ha']
self.enable_vsan = module.params['enable_vsan']
self.desired_state = module.params['state']
self.datacenter = None
self.cluster = None
self.content = connect_to_api(module)
def process_state(self):
cluster_states = {
'absent': {
'present': self.state_destroy_cluster,
'absent': self.state_exit_unchanged,
},
'present': {
'update': self.state_update_cluster,
'present': self.state_exit_unchanged,
'absent': self.state_create_cluster,
}
}
current_state = self.check_cluster_configuration()
# Based on the desired_state and the current_state call
# the appropriate method from the dictionary
cluster_states[self.desired_state][current_state]()
def configure_ha(self):
das_config = vim.cluster.DasConfigInfo()
das_config.enabled = self.enable_ha
das_config.admissionControlPolicy = vim.cluster.FailoverLevelAdmissionControlPolicy()
das_config.admissionControlPolicy.failoverLevel = 2
return das_config
def configure_drs(self):
drs_config = vim.cluster.DrsConfigInfo()
drs_config.enabled = self.enable_drs
# Set to partially automated
drs_config.vmotionRate = 3
return drs_config
def configure_vsan(self):
vsan_config = vim.vsan.cluster.ConfigInfo()
vsan_config.enabled = self.enable_vsan
vsan_config.defaultConfig = vim.vsan.cluster.ConfigInfo.HostDefaultInfo()
vsan_config.defaultConfig.autoClaimStorage = False
return vsan_config
def state_create_cluster(self):
try:
cluster_config_spec = vim.cluster.ConfigSpecEx()
cluster_config_spec.dasConfig = self.configure_ha()
cluster_config_spec.drsConfig = self.configure_drs()
if self.enable_vsan:
cluster_config_spec.vsanConfig = self.configure_vsan()
if not self.module.check_mode:
self.datacenter.hostFolder.CreateClusterEx(self.cluster_name, cluster_config_spec)
self.module.exit_json(changed=True)
except vim.fault.DuplicateName:
self.module.fail_json(msg="A cluster with the name %s already exists" % self.cluster_name)
except vmodl.fault.InvalidArgument:
self.module.fail_json(msg="Cluster configuration specification parameter is invalid")
except vim.fault.InvalidName:
self.module.fail_json(msg="%s is an invalid name for a cluster" % self.cluster_name)
except vmodl.fault.NotSupported:
# This should never happen
self.module.fail_json(msg="Trying to create a cluster on an incorrect folder object")
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
# This should never happen either
self.module.fail_json(msg=method_fault.msg)
def state_destroy_cluster(self):
changed = True
result = None
try:
if not self.module.check_mode:
task = self.cluster.Destroy_Task()
changed, result = wait_for_task(task)
self.module.exit_json(changed=changed, result=result)
except vim.fault.VimFault as vim_fault:
self.module.fail_json(msg=vim_fault.msg)
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
def state_exit_unchanged(self):
self.module.exit_json(changed=False)
def state_update_cluster(self):
cluster_config_spec = vim.cluster.ConfigSpecEx()
changed = True
result = None
if self.cluster.configurationEx.dasConfig.enabled != self.enable_ha:
cluster_config_spec.dasConfig = self.configure_ha()
if self.cluster.configurationEx.drsConfig.enabled != self.enable_drs:
cluster_config_spec.drsConfig = self.configure_drs()
if self.cluster.configurationEx.vsanConfigInfo.enabled != self.enable_vsan:
cluster_config_spec.vsanConfig = self.configure_vsan()
try:
if not self.module.check_mode:
task = self.cluster.ReconfigureComputeResource_Task(cluster_config_spec, True)
changed, result = wait_for_task(task)
self.module.exit_json(changed=changed, result=result)
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
except TaskError as task_e:
self.module.fail_json(msg=str(task_e))
def check_cluster_configuration(self):
try:
self.datacenter = find_datacenter_by_name(self.content, self.datacenter_name)
if self.datacenter is None:
self.module.fail_json(msg="Datacenter %s does not exist, "
"please create first with Ansible Module vmware_datacenter or manually."
% self.datacenter_name)
self.cluster = find_cluster_by_name_datacenter(self.datacenter, self.cluster_name)
if self.cluster is None:
return 'absent'
else:
desired_state = (self.enable_ha,
self.enable_drs,
self.enable_vsan)
current_state = (self.cluster.configurationEx.dasConfig.enabled,
self.cluster.configurationEx.drsConfig.enabled,
self.cluster.configurationEx.vsanConfigInfo.enabled)
if desired_state != current_state:
return 'update'
else:
return 'present'
except vmodl.RuntimeFault as runtime_fault:
self.module.fail_json(msg=runtime_fault.msg)
except vmodl.MethodFault as method_fault:
self.module.fail_json(msg=method_fault.msg)
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(dict(
cluster_name=dict(type='str', required=True),
datacenter_name=dict(type='str', required=True),
enable_drs=dict(type='bool', default=False),
enable_ha=dict(type='bool', default=False),
enable_vsan=dict(type='bool', default=False),
state=dict(type='str', default='present', choices=['absent', 'present']),
))
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
)
if not HAS_PYVMOMI:
module.fail_json(msg='pyvmomi is required for this module')
vmware_cluster = VMwareCluster(module)
vmware_cluster.process_state()
if __name__ == '__main__':
main()
| gpl-3.0 |
benosteen/mypaint | gui/preferenceswindow.py | 1 | 13712 | # This file is part of MyPaint.
# Copyright (C) 2008-2009 by Martin Renold <martinxyz@gmx.ch>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
"preferences dialog"
from bisect import bisect_left
from gettext import gettext as _
import gtk, os
gdk = gtk.gdk
from functionwindow import CurveWidget
from lib import mypaintlib
import windowing, filehandling
device_modes = [
('disabled', _("Disabled (no pressure sensitivity)")),
('screen', _("Screen (normal)")),
('window', _("Window (not recommended)")), ]
RESPONSE_REVERT = 1
# Rebindable mouse buttons
mouse_button_actions = [
# These can be names of actions within ActionGroups defined elsewhere,
# or names of actions the handler interprets itself.
# NOTE: The translatable strings for actions are duplicated from
# their action definition. Please keep in sync (or refactor to get the string from there)
# (action_or_whatever, label)
('no_action', _("No action")), #[0] is the default for the comboboxes
('popup_menu', _("Menu")),
('ToggleSubwindows', _("Toggle Subwindows")),
('ColorPickerPopup', _("Pick Color")),
('PickContext', _('Pick Context (layer, brush and color)')),
('PickLayer', _('Select Layer at Cursor')),
('pan_canvas', _("Pan")),
('zoom_canvas', _("Zoom")),
('rotate_canvas', _("Rotate")),
('straight_line', _("Straight Line")),
('straight_line_sequence', _("Sequence of Straight Lines")),
('ColorChangerPopup', _("Color Changer")),
('ColorRingPopup', _("Color Ring")),
('ColorHistoryPopup', _("Color History")),
]
mouse_button_prefs = [
# Used for creating the menus,
# (pref_name, label)
("input.button1_shift_action", _("Button 1 + Shift")),
("input.button1_ctrl_action", _("Button 1 + Ctrl (or Alt)")),
("input.button2_action", _("Button 2")),
("input.button2_shift_action", _("Button 2 + Shift")),
("input.button2_ctrl_action", _("Button 2 + Ctrl (or Alt)")),
("input.button3_action", _("Button 3")),
("input.button3_shift_action", _("Button 3 + Shift")),
("input.button3_ctrl_action", _("Button 3 + Ctrl (or Alt)")),
]
class Window(windowing.Dialog):
'''Window for manipulating preferences.'''
def __init__(self, app):
flags = gtk.DIALOG_DESTROY_WITH_PARENT
buttons = (gtk.STOCK_REVERT_TO_SAVED, RESPONSE_REVERT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT)
windowing.Dialog.__init__(self, app=app, title=_('Preferences'),
parent=app.drawWindow, flags=flags,
buttons=buttons)
self.connect('response', self.on_response)
self.in_update_ui = False
# Set up widgets
nb = gtk.Notebook()
nb.set_border_width(12)
self.vbox.pack_start(nb, expand=True, padding=0)
### Input tab
table = gtk.Table(5, 3)
table.set_border_width(12)
table.set_col_spacing(0, 12)
table.set_col_spacing(1, 12)
table.set_row_spacings(6)
current_row = 0
# TRANSLATORS: Tab label
nb.append_page(table, gtk.Label(_('Pen Input')))
xopt = gtk.FILL | gtk.EXPAND
yopt = gtk.FILL
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup(_('<b>Input Device</b>'))
table.attach(l, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_line_wrap(True)
l.set_markup(_('Scale input pressure to brush pressure. This is applied to all input devices. The mouse button has an input pressure of 0.5 when pressed.'))
table.attach(l, 1, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
t = gtk.Table(4, 4)
self.cv = CurveWidget(self.pressure_curve_changed_cb, magnetic=False)
t.attach(self.cv, 0, 3, 0, 3, gtk.EXPAND | gtk.FILL, gtk.EXPAND | gtk.FILL, 5, 0)
l1 = gtk.Label('1.0')
if l1.set_angle:
# TRANSLATORS: Graph y-axis label
l2 = gtk.Label(_('Brush Pressure'))
l2.set_angle(90)
else:
l2 = gtk.Label('')
l3 = gtk.Label('0.0')
t.attach(l1, 3, 4, 0, 1, 0, 0, 5, 0)
t.attach(l2, 3, 4, 1, 2, 0, gtk.EXPAND, 5, 0)
t.attach(l3, 3, 4, 2, 3, 0, 0, 5, 0)
l4 = gtk.Label('0.0')
# TRANSLATORS: Graph x-axis label
l5 = gtk.Label(_('Input Pressure'))
l5.set_justify(gtk.JUSTIFY_CENTER)
l6 = gtk.Label('1.0')
t.attach(l4, 0, 1, 3, 4, 0, 0, 5, 0)
t.attach(l5, 1, 2, 3, 4, gtk.EXPAND, 0, 5, 0)
t.attach(l6, 2, 3, 3, 4, 0, 0, 5, 0)
table.attach(t, 1, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label(_('Mode: '))
l.set_alignment(0.0, 0.5)
table.attach(l, 1, 2, current_row, current_row + 1, xopt, yopt)
combo = self.input_devices_combo = gtk.combo_box_new_text()
for m, s in device_modes:
combo.append_text(s)
combo.connect('changed', self.input_devices_combo_changed_cb)
table.attach(combo, 2, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
### Buttons tab
table = gtk.Table(5, 3)
table.set_border_width(12)
table.set_col_spacing(0, 12)
table.set_col_spacing(1, 12)
table.set_row_spacings(6)
current_row = 0
nb.append_page(table, gtk.Label(_('Buttons')))
xopt = gtk.FILL | gtk.EXPAND
yopt = gtk.FILL
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup(_('<b>Pen and mouse button mappings</b>'))
table.attach(l, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
# Mouse button actions
self.mouse_action_comboboxes = {}
for pref_name, label_str in mouse_button_prefs:
l = gtk.Label(label_str)
l.set_alignment(0.0, 0.5)
table.attach(l, 1, 2, current_row, current_row + 1, xopt, yopt)
action_name = self.app.preferences.get(pref_name, None)
c = gtk.combo_box_new_text()
self.mouse_action_comboboxes[pref_name] = c
for a, s in mouse_button_actions:
c.append_text(s)
c.connect("changed", self.mouse_button_action_changed, pref_name)
table.attach(c, 2, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
### Saving tab
table = gtk.Table(5, 3)
table.set_border_width(12)
table.set_col_spacing(0, 12)
table.set_col_spacing(1, 12)
table.set_row_spacings(6)
current_row = 0
nb.append_page(table, gtk.Label(_('Saving')))
xopt = gtk.FILL | gtk.EXPAND
yopt = gtk.FILL
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup(_('<b>Saving</b>'))
table.attach(l, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label(_('Default file format:'))
l.set_alignment(0.0, 0.5)
combo = self.defaultsaveformat_combo = gtk.combo_box_new_text()
self.defaultsaveformat_values = [filehandling.SAVE_FORMAT_ORA,
filehandling.SAVE_FORMAT_PNGSOLID, filehandling.SAVE_FORMAT_JPEG]
for saveformat in self.defaultsaveformat_values:
format_desc = self.app.filehandler.saveformats[saveformat][0]
combo.append_text(format_desc)
combo.connect('changed', self.defaultsaveformat_combo_changed_cb)
table.attach(l, 1, 2, current_row, current_row + 1, xopt, yopt)
table.attach(combo, 2, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup(_('<b>Save Next Scrap</b>'))
table.attach(l, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label(_('Path and filename prefix:'))
l.set_alignment(0.0, 0.5)
self.prefix_entry = gtk.Entry()
self.prefix_entry.connect('changed', self.prefix_entry_changed_cb)
table.attach(l, 1, 2, current_row, current_row + 1, xopt, yopt)
table.attach(self.prefix_entry, 2, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
### View tab
table = gtk.Table(2, 4)
table.set_border_width(12)
table.set_col_spacing(0, 12)
table.set_col_spacing(1, 12)
table.set_row_spacings(6)
current_row = 0
nb.append_page(table, gtk.Label(_('View')))
xopt = gtk.FILL | gtk.EXPAND
yopt = gtk.FILL
l = gtk.Label()
l.set_alignment(0.0, 0.5)
l.set_markup(_('<b>Default View</b>'))
table.attach(l, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
l = gtk.Label(_('Default zoom:'))
l.set_alignment(0.0, 0.5)
combo = self.defaultzoom_combo = gtk.combo_box_new_text()
# Different from doc.zoomlevel_values because we only want a subset
# - keep sorted for bisect
self.defaultzoom_values = [0.25, 0.50, 1.0, 2.0]
for val in self.defaultzoom_values:
combo.append_text('%d%%' % (val*100))
combo.connect('changed', self.defaultzoom_combo_changed_cb)
table.attach(l, 1, 2, current_row, current_row + 1, xopt, yopt)
table.attach(combo, 2, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
b = self.highqualityzoom_checkbox = gtk.CheckButton(_('High quality zoom (may result in slow scrolling)'))
b.connect('toggled', self.highqualityzoom_checkbox_changed_cb)
table.attach(b, 0, 3, current_row, current_row + 1, xopt, yopt)
current_row += 1
def on_response(self, dialog, response, *args):
if response == gtk.RESPONSE_ACCEPT:
self.app.save_settings()
self.hide()
elif response == RESPONSE_REVERT:
self.app.load_settings()
self.app.apply_settings()
def update_ui(self):
"""Update the preferences window to reflect the current settings."""
if self.in_update_ui:
return
self.in_update_ui = True
p = self.app.preferences
self.cv.points = p['input.global_pressure_mapping']
self.prefix_entry.set_text(p['saving.scrap_prefix'])
# Device mode
mode_config = p.get("input.device_mode", None)
mode_idx = i = 0
for mode_name, junk in device_modes:
if mode_config == mode_name:
mode_idx = i
break
i += 1
self.input_devices_combo.set_active(mode_idx)
zoom = p['view.default_zoom']
zoomlevel = min(bisect_left(self.defaultzoom_values, zoom),
len(self.defaultzoom_values) - 1)
self.defaultzoom_combo.set_active(zoomlevel)
self.highqualityzoom_checkbox.set_active(p['view.high_quality_zoom'])
saveformat_config = p['saving.default_format']
saveformat_idx = self.app.filehandler.config2saveformat[saveformat_config]
idx = self.defaultsaveformat_values.index(saveformat_idx)
# FIXME: ^^^^^^^^^ try/catch/default may be more tolerant & futureproof
self.defaultsaveformat_combo.set_active(idx)
# Mouse button
for pref_name, junk in mouse_button_prefs:
action_config = p.get(pref_name, None)
action_idx = i = 0
for action_name, junk in mouse_button_actions:
if action_config == action_name:
action_idx = i
break
i += 1
combobox = self.mouse_action_comboboxes[pref_name]
combobox.set_active(action_idx)
self.cv.queue_draw()
self.in_update_ui = False
# Callbacks for widgets that manipulate settings
def input_devices_combo_changed_cb(self, widget):
i = widget.get_property("active")
mode = device_modes[i][0]
self.app.preferences['input.device_mode'] = mode
self.app.apply_settings()
def mouse_button_action_changed(self, widget, pref_name):
i = widget.get_property("active")
action = mouse_button_actions[i][0]
self.app.preferences[pref_name] = action
self.app.apply_settings()
def pressure_curve_changed_cb(self, widget):
self.app.preferences['input.global_pressure_mapping'] = self.cv.points[:]
self.app.apply_settings()
def prefix_entry_changed_cb(self, widget):
self.app.preferences['saving.scrap_prefix'] = widget.get_text()
def defaultzoom_combo_changed_cb(self, widget):
zoomlevel = self.defaultzoom_combo.get_active()
zoom = self.defaultzoom_values[zoomlevel]
self.app.preferences['view.default_zoom'] = zoom
def highqualityzoom_checkbox_changed_cb(self, widget):
self.app.preferences['view.high_quality_zoom'] = bool(widget.get_active())
self.app.doc.tdw.queue_draw()
def defaultsaveformat_combo_changed_cb(self, widget):
idx = self.defaultsaveformat_combo.get_active()
saveformat = self.defaultsaveformat_values[idx]
# Reverse lookup
for key, val in self.app.filehandler.config2saveformat.iteritems():
if val == saveformat:
formatstr = key
self.app.preferences['saving.default_format'] = formatstr
| gpl-2.0 |
Acidburn0zzz/servo | tests/wpt/web-platform-tests/tools/wptserve/tests/functional/test_pipes.py | 10 | 9029 | import os
import unittest
import time
import json
from six.moves import urllib
import pytest
wptserve = pytest.importorskip("wptserve")
from .base import TestUsingServer, doc_root
class TestStatus(TestUsingServer):
def test_status(self):
resp = self.request("/document.txt", query="pipe=status(202)")
self.assertEqual(resp.getcode(), 202)
class TestHeader(TestUsingServer):
def test_not_set(self):
resp = self.request("/document.txt", query="pipe=header(X-TEST,PASS)")
self.assertEqual(resp.info()["X-TEST"], "PASS")
def test_set(self):
resp = self.request("/document.txt", query="pipe=header(Content-Type,text/html)")
self.assertEqual(resp.info()["Content-Type"], "text/html")
def test_multiple(self):
resp = self.request("/document.txt", query="pipe=header(X-Test,PASS)|header(Content-Type,text/html)")
self.assertEqual(resp.info()["X-TEST"], "PASS")
self.assertEqual(resp.info()["Content-Type"], "text/html")
def test_multiple_same(self):
resp = self.request("/document.txt", query="pipe=header(Content-Type,FAIL)|header(Content-Type,text/html)")
self.assertEqual(resp.info()["Content-Type"], "text/html")
def test_multiple_append(self):
resp = self.request("/document.txt", query="pipe=header(X-Test,1)|header(X-Test,2,True)")
self.assert_multiple_headers(resp, "X-Test", ["1", "2"])
class TestSlice(TestUsingServer):
def test_both_bounds(self):
resp = self.request("/document.txt", query="pipe=slice(1,10)")
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(resp.read(), expected[1:10])
def test_no_upper(self):
resp = self.request("/document.txt", query="pipe=slice(1)")
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(resp.read(), expected[1:])
def test_no_lower(self):
resp = self.request("/document.txt", query="pipe=slice(null,10)")
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(resp.read(), expected[:10])
class TestSub(TestUsingServer):
def test_sub_config(self):
resp = self.request("/sub.txt", query="pipe=sub")
expected = b"localhost localhost %i" % self.server.port
self.assertEqual(resp.read().rstrip(), expected)
def test_sub_file_hash(self):
resp = self.request("/sub_file_hash.sub.txt")
expected = b"""
md5: JmI1W8fMHfSfCarYOSxJcw==
sha1: nqpWqEw4IW8NjD6R375gtrQvtTo=
sha224: RqQ6fMmta6n9TuA/vgTZK2EqmidqnrwBAmQLRQ==
sha256: G6Ljg1uPejQxqFmvFOcV/loqnjPTW5GSOePOfM/u0jw=
sha384: lkXHChh1BXHN5nT5BYhi1x67E1CyYbPKRKoF2LTm5GivuEFpVVYtvEBHtPr74N9E
sha512: r8eLGRTc7ZznZkFjeVLyo6/FyQdra9qmlYCwKKxm3kfQAswRS9+3HsYk3thLUhcFmmWhK4dXaICzJwGFonfXwg=="""
self.assertEqual(resp.read().rstrip(), expected.strip())
def test_sub_file_hash_unrecognized(self):
with self.assertRaises(urllib.error.HTTPError):
self.request("/sub_file_hash_unrecognized.sub.txt")
def test_sub_headers(self):
resp = self.request("/sub_headers.txt", query="pipe=sub", headers={"X-Test": "PASS"})
expected = b"PASS"
self.assertEqual(resp.read().rstrip(), expected)
def test_sub_location(self):
resp = self.request("/sub_location.sub.txt?query_string")
expected = """
host: localhost:{0}
hostname: localhost
path: /sub_location.sub.txt
pathname: /sub_location.sub.txt
port: {0}
query: ?query_string
scheme: http
server: http://localhost:{0}""".format(self.server.port).encode("ascii")
self.assertEqual(resp.read().rstrip(), expected.strip())
def test_sub_params(self):
resp = self.request("/sub_params.txt", query="test=PASS&pipe=sub")
expected = b"PASS"
self.assertEqual(resp.read().rstrip(), expected)
def test_sub_url_base(self):
resp = self.request("/sub_url_base.sub.txt")
self.assertEqual(resp.read().rstrip(), b"Before / After")
def test_sub_url_base_via_filename_with_query(self):
resp = self.request("/sub_url_base.sub.txt?pipe=slice(5,10)")
self.assertEqual(resp.read().rstrip(), b"e / A")
def test_sub_uuid(self):
resp = self.request("/sub_uuid.sub.txt")
self.assertRegexpMatches(resp.read().rstrip(), b"Before [a-f0-9-]+ After")
def test_sub_var(self):
resp = self.request("/sub_var.sub.txt")
port = self.server.port
expected = b"localhost %d A %d B localhost C" % (port, port)
self.assertEqual(resp.read().rstrip(), expected)
def test_sub_fs_path(self):
resp = self.request("/subdir/sub_path.sub.txt")
root = os.path.abspath(doc_root)
expected = """%(root)s%(sep)ssubdir%(sep)ssub_path.sub.txt
%(root)s%(sep)ssub_path.sub.txt
%(root)s%(sep)ssub_path.sub.txt
""" % {"root": root, "sep": os.path.sep}
self.assertEqual(resp.read(), expected.encode("utf8"))
def test_sub_header_or_default(self):
resp = self.request("/sub_header_or_default.sub.txt", headers={"X-Present": "OK"})
expected = b"OK\nabsent-default"
self.assertEqual(resp.read().rstrip(), expected)
class TestTrickle(TestUsingServer):
def test_trickle(self):
#Actually testing that the response trickles in is not that easy
t0 = time.time()
resp = self.request("/document.txt", query="pipe=trickle(1:d2:5:d1:r2)")
t1 = time.time()
expected = open(os.path.join(doc_root, "document.txt"), 'rb').read()
self.assertEqual(resp.read(), expected)
self.assertGreater(6, t1-t0)
def test_headers(self):
resp = self.request("/document.txt", query="pipe=trickle(d0.01)")
self.assertEqual(resp.info()["Cache-Control"], "no-cache, no-store, must-revalidate")
self.assertEqual(resp.info()["Pragma"], "no-cache")
self.assertEqual(resp.info()["Expires"], "0")
class TestPipesWithVariousHandlers(TestUsingServer):
def test_with_python_file_handler(self):
resp = self.request("/test_string.py", query="pipe=slice(null,2)")
self.assertEqual(resp.read(), b"PA")
def test_with_python_func_handler(self):
@wptserve.handlers.handler
def handler(request, response):
return "PASS"
route = ("GET", "/test/test_pipes_1/", handler)
self.server.router.register(*route)
resp = self.request(route[1], query="pipe=slice(null,2)")
self.assertEqual(resp.read(), b"PA")
def test_with_python_func_handler_using_response_writer(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_content("PASS")
route = ("GET", "/test/test_pipes_1/", handler)
self.server.router.register(*route)
resp = self.request(route[1], query="pipe=slice(null,2)")
# slice has not been applied to the response, because response.writer was used.
self.assertEqual(resp.read(), b"PASS")
def test_header_pipe_with_python_func_using_response_writer(self):
@wptserve.handlers.handler
def handler(request, response):
response.writer.write_content("CONTENT")
route = ("GET", "/test/test_pipes_1/", handler)
self.server.router.register(*route)
resp = self.request(route[1], query="pipe=header(X-TEST,FAIL)")
# header pipe was ignored, because response.writer was used.
self.assertFalse(resp.info().get("X-TEST"))
self.assertEqual(resp.read(), b"CONTENT")
def test_with_json_handler(self):
@wptserve.handlers.json_handler
def handler(request, response):
return json.dumps({'data': 'PASS'})
route = ("GET", "/test/test_pipes_2/", handler)
self.server.router.register(*route)
resp = self.request(route[1], query="pipe=slice(null,2)")
self.assertEqual(resp.read(), b'"{')
def test_slice_with_as_is_handler(self):
resp = self.request("/test.asis", query="pipe=slice(null,2)")
self.assertEqual(202, resp.getcode())
self.assertEqual("Giraffe", resp.msg)
self.assertEqual("PASS", resp.info()["X-Test"])
# slice has not been applied to the response, because response.writer was used.
self.assertEqual(b"Content", resp.read())
def test_headers_with_as_is_handler(self):
resp = self.request("/test.asis", query="pipe=header(X-TEST,FAIL)")
self.assertEqual(202, resp.getcode())
self.assertEqual("Giraffe", resp.msg)
# header pipe was ignored.
self.assertEqual("PASS", resp.info()["X-TEST"])
self.assertEqual(b"Content", resp.read())
def test_trickle_with_as_is_handler(self):
t0 = time.time()
resp = self.request("/test.asis", query="pipe=trickle(1:d2:5:d1:r2)")
t1 = time.time()
self.assertTrue(b'Content' in resp.read())
self.assertGreater(6, t1-t0)
if __name__ == '__main__':
unittest.main()
| mpl-2.0 |
blighj/django | tests/forms_tests/field_tests/test_urlfield.py | 23 | 7737 | from django.forms import URLField, ValidationError
from django.test import SimpleTestCase
from . import FormFieldAssertionsMixin
class URLFieldTest(FormFieldAssertionsMixin, SimpleTestCase):
def test_urlfield_1(self):
f = URLField()
self.assertWidgetRendersTo(f, '<input type="url" name="f" id="id_f" required />')
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean('')
with self.assertRaisesMessage(ValidationError, "'This field is required.'"):
f.clean(None)
self.assertEqual('http://localhost', f.clean('http://localhost'))
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://example.com.', f.clean('http://example.com.'))
self.assertEqual('http://www.example.com', f.clean('http://www.example.com'))
self.assertEqual('http://www.example.com:8000/test', f.clean('http://www.example.com:8000/test'))
self.assertEqual('http://valid-with-hyphens.com', f.clean('valid-with-hyphens.com'))
self.assertEqual('http://subdomain.domain.com', f.clean('subdomain.domain.com'))
self.assertEqual('http://200.8.9.10', f.clean('http://200.8.9.10'))
self.assertEqual('http://200.8.9.10:8000/test', f.clean('http://200.8.9.10:8000/test'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('foo')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://example')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://example.')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('com.')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('.')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://.com')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://invalid-.com')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://-invalid.com')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://inv-.alid-.com')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://inv-.-alid.com')
self.assertEqual('http://valid-----hyphens.com', f.clean('http://valid-----hyphens.com'))
self.assertEqual(
'http://some.idn.xyz\xe4\xf6\xfc\xdfabc.domain.com:123/blah',
f.clean('http://some.idn.xyzäöüßabc.domain.com:123/blah')
)
self.assertEqual(
'http://www.example.com/s/http://code.djangoproject.com/ticket/13804',
f.clean('www.example.com/s/http://code.djangoproject.com/ticket/13804')
)
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('[a')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://[a')
def test_url_regex_ticket11198(self):
f = URLField()
# hangs "forever" if catastrophic backtracking in ticket:#11198 not fixed
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://%s' % ("X" * 200,))
# a second test, to make sure the problem is really addressed, even on
# domains that don't fail the domain label length check in the regex
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://%s' % ("X" * 60,))
def test_urlfield_2(self):
f = URLField(required=False)
self.assertEqual('', f.clean(''))
self.assertEqual('', f.clean(None))
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://www.example.com', f.clean('http://www.example.com'))
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('foo')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://example')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://example.')
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean('http://.com')
def test_urlfield_5(self):
f = URLField(min_length=15, max_length=20)
self.assertWidgetRendersTo(f, '<input id="id_f" type="url" name="f" maxlength="20" minlength="15" required />')
with self.assertRaisesMessage(ValidationError, "'Ensure this value has at least 15 characters (it has 12).'"):
f.clean('http://f.com')
self.assertEqual('http://example.com', f.clean('http://example.com'))
with self.assertRaisesMessage(ValidationError, "'Ensure this value has at most 20 characters (it has 37).'"):
f.clean('http://abcdefghijklmnopqrstuvwxyz.com')
def test_urlfield_6(self):
f = URLField(required=False)
self.assertEqual('http://example.com', f.clean('example.com'))
self.assertEqual('', f.clean(''))
self.assertEqual('https://example.com', f.clean('https://example.com'))
def test_urlfield_7(self):
f = URLField()
self.assertEqual('http://example.com', f.clean('http://example.com'))
self.assertEqual('http://example.com/test', f.clean('http://example.com/test'))
self.assertEqual(
'http://example.com?some_param=some_value',
f.clean('http://example.com?some_param=some_value')
)
def test_urlfield_9(self):
f = URLField()
urls = (
'http://עברית.idn.icann.org/',
'http://sãopaulo.com/',
'http://sãopaulo.com.br/',
'http://пример.испытание/',
'http://مثال.إختبار/',
'http://例子.测试/',
'http://例子.測試/',
'http://उदाहरण.परीक्षा/',
'http://例え.テスト/',
'http://مثال.آزمایشی/',
'http://실례.테스트/',
'http://العربية.idn.icann.org/',
)
for url in urls:
# Valid IDN
self.assertEqual(url, f.clean(url))
def test_urlfield_10(self):
"""URLField correctly validates IPv6 (#18779)."""
f = URLField()
urls = (
'http://[12:34::3a53]/',
'http://[a34:9238::]:8080/',
)
for url in urls:
self.assertEqual(url, f.clean(url))
def test_urlfield_not_string(self):
f = URLField(required=False)
with self.assertRaisesMessage(ValidationError, "'Enter a valid URL.'"):
f.clean(23)
def test_urlfield_normalization(self):
f = URLField()
self.assertEqual(f.clean('http://example.com/ '), 'http://example.com/')
def test_urlfield_strip_on_none_value(self):
f = URLField(required=False, empty_value=None)
self.assertIsNone(f.clean(None))
def test_urlfield_unable_to_set_strip_kwarg(self):
msg = "__init__() got multiple values for keyword argument 'strip'"
with self.assertRaisesMessage(TypeError, msg):
URLField(strip=False)
| bsd-3-clause |
wreckJ/intellij-community | python/lib/Lib/urllib2.py | 82 | 47582 | """An extensible library for opening URLs using a variety of protocols
The simplest way to use this module is to call the urlopen function,
which accepts a string containing a URL or a Request object (described
below). It opens the URL and returns the results as file-like
object; the returned object has some extra methods described below.
The OpenerDirector manages a collection of Handler objects that do
all the actual work. Each Handler implements a particular protocol or
option. The OpenerDirector is a composite object that invokes the
Handlers needed to open the requested URL. For example, the
HTTPHandler performs HTTP GET and POST requests and deals with
non-error returns. The HTTPRedirectHandler automatically deals with
HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
deals with digest authentication.
urlopen(url, data=None) -- basic usage is the same as original
urllib. pass the url and optionally data to post to an HTTP URL, and
get a file-like object back. One difference is that you can also pass
a Request instance instead of URL. Raises a URLError (subclass of
IOError); for HTTP errors, raises an HTTPError, which can also be
treated as a valid response.
build_opener -- function that creates a new OpenerDirector instance.
will install the default handlers. accepts one or more Handlers as
arguments, either instances or Handler classes that it will
instantiate. if one of the argument is a subclass of the default
handler, the argument will be installed instead of the default.
install_opener -- installs a new opener as the default opener.
objects of interest:
OpenerDirector --
Request -- an object that encapsulates the state of a request. the
state can be a simple as the URL. it can also include extra HTTP
headers, e.g. a User-Agent.
BaseHandler --
exceptions:
URLError-- a subclass of IOError, individual protocols have their own
specific subclass
HTTPError-- also a valid HTTP response, so you can treat an HTTP error
as an exceptional event or valid response
internals:
BaseHandler and parent
_call_chain conventions
Example usage:
import urllib2
# set up authentication info
authinfo = urllib2.HTTPBasicAuthHandler()
authinfo.add_password(realm='PDQ Application',
uri='https://mahler:8092/site-updates.py',
user='klem',
passwd='geheim$parole')
proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
# build a new opener that adds authentication and caching FTP handlers
opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
# install it
urllib2.install_opener(opener)
f = urllib2.urlopen('http://www.python.org/')
"""
# XXX issues:
# If an authentication error handler that tries to perform
# authentication for some reason but fails, how should the error be
# signalled? The client needs to know the HTTP error code. But if
# the handler knows that the problem was, e.g., that it didn't know
# that hash algo that requested in the challenge, it would be good to
# pass that information along to the client, too.
# ftp errors aren't handled cleanly
# check digest against correct (i.e. non-apache) implementation
# Possible extensions:
# complex proxies XXX not sure what exactly was meant by this
# abstract factory for opener
import base64
import hashlib
import httplib
import mimetools
import os
import posixpath
import random
import re
import socket
import sys
import time
import urlparse
import bisect
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from urllib import (unwrap, unquote, splittype, splithost, quote,
addinfourl, splitport, splitgophertype, splitquery,
splitattr, ftpwrapper, noheaders, splituser, splitpasswd, splitvalue)
# support for FileHandler, proxies via environment variables
from urllib import localhost, url2pathname, getproxies
# used in User-Agent header sent
__version__ = sys.version[:3]
_opener = None
def urlopen(url, data=None):
global _opener
if _opener is None:
_opener = build_opener()
return _opener.open(url, data)
def install_opener(opener):
global _opener
_opener = opener
# do these error classes make sense?
# make sure all of the IOError stuff is overridden. we just want to be
# subtypes.
class URLError(IOError):
# URLError is a sub-type of IOError, but it doesn't share any of
# the implementation. need to override __init__ and __str__.
# It sets self.args for compatibility with other EnvironmentError
# subclasses, but args doesn't have the typical format with errno in
# slot 0 and strerror in slot 1. This may be better than nothing.
def __init__(self, reason):
self.args = reason,
self.reason = reason
def __str__(self):
return '<urlopen error %s>' % self.reason
class HTTPError(URLError, addinfourl):
"""Raised when HTTP error occurs, but also acts like non-error return"""
__super_init = addinfourl.__init__
def __init__(self, url, code, msg, hdrs, fp):
self.code = code
self.msg = msg
self.hdrs = hdrs
self.fp = fp
self.filename = url
# The addinfourl classes depend on fp being a valid file
# object. In some cases, the HTTPError may not have a valid
# file object. If this happens, the simplest workaround is to
# not initialize the base classes.
if fp is not None:
self.__super_init(fp, hdrs, url)
def __str__(self):
return 'HTTP Error %s: %s' % (self.code, self.msg)
class GopherError(URLError):
pass
# copied from cookielib.py
_cut_port_re = re.compile(r":\d+$")
def request_host(request):
"""Return request-host, as defined by RFC 2965.
Variation from RFC: returned value is lowercased, for convenient
comparison.
"""
url = request.get_full_url()
host = urlparse.urlparse(url)[1]
if host == "":
host = request.get_header("Host", "")
# remove port, if present
host = _cut_port_re.sub("", host, 1)
return host.lower()
class Request:
def __init__(self, url, data=None, headers={},
origin_req_host=None, unverifiable=False):
# unwrap('<URL:type://host/path>') --> 'type://host/path'
self.__original = unwrap(url)
self.type = None
# self.__r_type is what's left after doing the splittype
self.host = None
self.port = None
self.data = data
self.headers = {}
for key, value in headers.items():
self.add_header(key, value)
self.unredirected_hdrs = {}
if origin_req_host is None:
origin_req_host = request_host(self)
self.origin_req_host = origin_req_host
self.unverifiable = unverifiable
def __getattr__(self, attr):
# XXX this is a fallback mechanism to guard against these
# methods getting called in a non-standard order. this may be
# too complicated and/or unnecessary.
# XXX should the __r_XXX attributes be public?
if attr[:12] == '_Request__r_':
name = attr[12:]
if hasattr(Request, 'get_' + name):
getattr(self, 'get_' + name)()
return getattr(self, attr)
raise AttributeError, attr
def get_method(self):
if self.has_data():
return "POST"
else:
return "GET"
# XXX these helper methods are lame
def add_data(self, data):
self.data = data
def has_data(self):
return self.data is not None
def get_data(self):
return self.data
def get_full_url(self):
return self.__original
def get_type(self):
if self.type is None:
self.type, self.__r_type = splittype(self.__original)
if self.type is None:
raise ValueError, "unknown url type: %s" % self.__original
return self.type
def get_host(self):
if self.host is None:
self.host, self.__r_host = splithost(self.__r_type)
if self.host:
self.host = unquote(self.host)
return self.host
def get_selector(self):
return self.__r_host
def set_proxy(self, host, type):
self.host, self.type = host, type
self.__r_host = self.__original
def get_origin_req_host(self):
return self.origin_req_host
def is_unverifiable(self):
return self.unverifiable
def add_header(self, key, val):
# useful for something like authentication
self.headers[key.capitalize()] = val
def add_unredirected_header(self, key, val):
# will not be added to a redirected request
self.unredirected_hdrs[key.capitalize()] = val
def has_header(self, header_name):
return (header_name in self.headers or
header_name in self.unredirected_hdrs)
def get_header(self, header_name, default=None):
return self.headers.get(
header_name,
self.unredirected_hdrs.get(header_name, default))
def header_items(self):
hdrs = self.unredirected_hdrs.copy()
hdrs.update(self.headers)
return hdrs.items()
class OpenerDirector:
def __init__(self):
client_version = "Python-urllib/%s" % __version__
self.addheaders = [('User-agent', client_version)]
# manage the individual handlers
self.handlers = []
self.handle_open = {}
self.handle_error = {}
self.process_response = {}
self.process_request = {}
def add_handler(self, handler):
if not hasattr(handler, "add_parent"):
raise TypeError("expected BaseHandler instance, got %r" %
type(handler))
added = False
for meth in dir(handler):
if meth in ["redirect_request", "do_open", "proxy_open"]:
# oops, coincidental match
continue
i = meth.find("_")
protocol = meth[:i]
condition = meth[i+1:]
if condition.startswith("error"):
j = condition.find("_") + i + 1
kind = meth[j+1:]
try:
kind = int(kind)
except ValueError:
pass
lookup = self.handle_error.get(protocol, {})
self.handle_error[protocol] = lookup
elif condition == "open":
kind = protocol
lookup = self.handle_open
elif condition == "response":
kind = protocol
lookup = self.process_response
elif condition == "request":
kind = protocol
lookup = self.process_request
else:
continue
handlers = lookup.setdefault(kind, [])
if handlers:
bisect.insort(handlers, handler)
else:
handlers.append(handler)
added = True
if added:
# XXX why does self.handlers need to be sorted?
bisect.insort(self.handlers, handler)
handler.add_parent(self)
def close(self):
# Only exists for backwards compatibility.
pass
def _call_chain(self, chain, kind, meth_name, *args):
# Handlers raise an exception if no one else should try to handle
# the request, or return None if they can't but another handler
# could. Otherwise, they return the response.
handlers = chain.get(kind, ())
for handler in handlers:
func = getattr(handler, meth_name)
result = func(*args)
if result is not None:
return result
def open(self, fullurl, data=None):
# accept a URL or a Request object
if isinstance(fullurl, basestring):
req = Request(fullurl, data)
else:
req = fullurl
if data is not None:
req.add_data(data)
protocol = req.get_type()
# pre-process request
meth_name = protocol+"_request"
for processor in self.process_request.get(protocol, []):
meth = getattr(processor, meth_name)
req = meth(req)
response = self._open(req, data)
# post-process response
meth_name = protocol+"_response"
for processor in self.process_response.get(protocol, []):
meth = getattr(processor, meth_name)
response = meth(req, response)
return response
def _open(self, req, data=None):
result = self._call_chain(self.handle_open, 'default',
'default_open', req)
if result:
return result
protocol = req.get_type()
result = self._call_chain(self.handle_open, protocol, protocol +
'_open', req)
if result:
return result
return self._call_chain(self.handle_open, 'unknown',
'unknown_open', req)
def error(self, proto, *args):
if proto in ('http', 'https'):
# XXX http[s] protocols are special-cased
dict = self.handle_error['http'] # https is not different than http
proto = args[2] # YUCK!
meth_name = 'http_error_%s' % proto
http_err = 1
orig_args = args
else:
dict = self.handle_error
meth_name = proto + '_error'
http_err = 0
args = (dict, proto, meth_name) + args
result = self._call_chain(*args)
if result:
return result
if http_err:
args = (dict, 'default', 'http_error_default') + orig_args
return self._call_chain(*args)
# XXX probably also want an abstract factory that knows when it makes
# sense to skip a superclass in favor of a subclass and when it might
# make sense to include both
def build_opener(*handlers):
"""Create an opener object from a list of handlers.
The opener will use several default handlers, including support
for HTTP and FTP.
If any of the handlers passed as arguments are subclasses of the
default handlers, the default handlers will not be used.
"""
import types
def isclass(obj):
return isinstance(obj, types.ClassType) or hasattr(obj, "__bases__")
opener = OpenerDirector()
default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
HTTPDefaultErrorHandler, HTTPRedirectHandler,
FTPHandler, FileHandler, HTTPErrorProcessor]
if hasattr(httplib, 'HTTPS'):
default_classes.append(HTTPSHandler)
skip = set()
for klass in default_classes:
for check in handlers:
if isclass(check):
if issubclass(check, klass):
skip.add(klass)
elif isinstance(check, klass):
skip.add(klass)
for klass in skip:
default_classes.remove(klass)
for klass in default_classes:
opener.add_handler(klass())
for h in handlers:
if isclass(h):
h = h()
opener.add_handler(h)
return opener
class BaseHandler:
handler_order = 500
def add_parent(self, parent):
self.parent = parent
def close(self):
# Only exists for backwards compatibility
pass
def __lt__(self, other):
if not hasattr(other, "handler_order"):
# Try to preserve the old behavior of having custom classes
# inserted after default ones (works only for custom user
# classes which are not aware of handler_order).
return True
return self.handler_order < other.handler_order
class HTTPErrorProcessor(BaseHandler):
"""Process HTTP error responses."""
handler_order = 1000 # after all other processing
def http_response(self, request, response):
code, msg, hdrs = response.code, response.msg, response.info()
if code not in (200, 206):
response = self.parent.error(
'http', request, response, code, msg, hdrs)
return response
https_response = http_response
class HTTPDefaultErrorHandler(BaseHandler):
def http_error_default(self, req, fp, code, msg, hdrs):
raise HTTPError(req.get_full_url(), code, msg, hdrs, fp)
class HTTPRedirectHandler(BaseHandler):
# maximum number of redirections to any single URL
# this is needed because of the state that cookies introduce
max_repeats = 4
# maximum total number of redirections (regardless of URL) before
# assuming we're in a loop
max_redirections = 10
def redirect_request(self, req, fp, code, msg, headers, newurl):
"""Return a Request or None in response to a redirect.
This is called by the http_error_30x methods when a
redirection response is received. If a redirection should
take place, return a new Request to allow http_error_30x to
perform the redirect. Otherwise, raise HTTPError if no-one
else should try to handle this url. Return None if you can't
but another Handler might.
"""
m = req.get_method()
if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
or code in (301, 302, 303) and m == "POST"):
# Strictly (according to RFC 2616), 301 or 302 in response
# to a POST MUST NOT cause a redirection without confirmation
# from the user (of urllib2, in this case). In practice,
# essentially all clients do redirect in this case, so we
# do the same.
# be conciliant with URIs containing a space
newurl = newurl.replace(' ', '%20')
return Request(newurl,
headers=req.headers,
origin_req_host=req.get_origin_req_host(),
unverifiable=True)
else:
raise HTTPError(req.get_full_url(), code, msg, headers, fp)
# Implementation note: To avoid the server sending us into an
# infinite loop, the request object needs to track what URLs we
# have already seen. Do this by adding a handler-specific
# attribute to the Request object.
def http_error_302(self, req, fp, code, msg, headers):
# Some servers (incorrectly) return multiple Location headers
# (so probably same goes for URI). Use first header.
if 'location' in headers:
newurl = headers.getheaders('location')[0]
elif 'uri' in headers:
newurl = headers.getheaders('uri')[0]
else:
return
newurl = urlparse.urljoin(req.get_full_url(), newurl)
# XXX Probably want to forget about the state of the current
# request, although that might interact poorly with other
# handlers that also use handler-specific request attributes
new = self.redirect_request(req, fp, code, msg, headers, newurl)
if new is None:
return
# loop detection
# .redirect_dict has a key url if url was previously visited.
if hasattr(req, 'redirect_dict'):
visited = new.redirect_dict = req.redirect_dict
if (visited.get(newurl, 0) >= self.max_repeats or
len(visited) >= self.max_redirections):
raise HTTPError(req.get_full_url(), code,
self.inf_msg + msg, headers, fp)
else:
visited = new.redirect_dict = req.redirect_dict = {}
visited[newurl] = visited.get(newurl, 0) + 1
# Don't close the fp until we are sure that we won't use it
# with HTTPError.
fp.read()
fp.close()
return self.parent.open(new)
http_error_301 = http_error_303 = http_error_307 = http_error_302
inf_msg = "The HTTP server returned a redirect error that would " \
"lead to an infinite loop.\n" \
"The last 30x error message was:\n"
def _parse_proxy(proxy):
"""Return (scheme, user, password, host/port) given a URL or an authority.
If a URL is supplied, it must have an authority (host:port) component.
According to RFC 3986, having an authority component means the URL must
have two slashes after the scheme:
>>> _parse_proxy('file:/ftp.example.com/')
Traceback (most recent call last):
ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
The first three items of the returned tuple may be None.
Examples of authority parsing:
>>> _parse_proxy('proxy.example.com')
(None, None, None, 'proxy.example.com')
>>> _parse_proxy('proxy.example.com:3128')
(None, None, None, 'proxy.example.com:3128')
The authority component may optionally include userinfo (assumed to be
username:password):
>>> _parse_proxy('joe:password@proxy.example.com')
(None, 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('joe:password@proxy.example.com:3128')
(None, 'joe', 'password', 'proxy.example.com:3128')
Same examples, but with URLs instead:
>>> _parse_proxy('http://proxy.example.com/')
('http', None, None, 'proxy.example.com')
>>> _parse_proxy('http://proxy.example.com:3128/')
('http', None, None, 'proxy.example.com:3128')
>>> _parse_proxy('http://joe:password@proxy.example.com/')
('http', 'joe', 'password', 'proxy.example.com')
>>> _parse_proxy('http://joe:password@proxy.example.com:3128')
('http', 'joe', 'password', 'proxy.example.com:3128')
Everything after the authority is ignored:
>>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
('ftp', 'joe', 'password', 'proxy.example.com')
Test for no trailing '/' case:
>>> _parse_proxy('http://joe:password@proxy.example.com')
('http', 'joe', 'password', 'proxy.example.com')
"""
scheme, r_scheme = splittype(proxy)
if not r_scheme.startswith("/"):
# authority
scheme = None
authority = proxy
else:
# URL
if not r_scheme.startswith("//"):
raise ValueError("proxy URL with no authority: %r" % proxy)
# We have an authority, so for RFC 3986-compliant URLs (by ss 3.
# and 3.3.), path is empty or starts with '/'
end = r_scheme.find("/", 2)
if end == -1:
end = None
authority = r_scheme[2:end]
userinfo, hostport = splituser(authority)
if userinfo is not None:
user, password = splitpasswd(userinfo)
else:
user = password = None
return scheme, user, password, hostport
class ProxyHandler(BaseHandler):
# Proxies must be in front
handler_order = 100
def __init__(self, proxies=None):
if proxies is None:
proxies = getproxies()
assert hasattr(proxies, 'has_key'), "proxies must be a mapping"
self.proxies = proxies
for type, url in proxies.items():
setattr(self, '%s_open' % type,
lambda r, proxy=url, type=type, meth=self.proxy_open: \
meth(r, proxy, type))
def proxy_open(self, req, proxy, type):
orig_type = req.get_type()
proxy_type, user, password, hostport = _parse_proxy(proxy)
if proxy_type is None:
proxy_type = orig_type
if user and password:
user_pass = '%s:%s' % (unquote(user), unquote(password))
creds = base64.b64encode(user_pass).strip()
req.add_header('Proxy-authorization', 'Basic ' + creds)
hostport = unquote(hostport)
req.set_proxy(hostport, proxy_type)
if orig_type == proxy_type:
# let other handlers take care of it
return None
else:
# need to start over, because the other handlers don't
# grok the proxy's URL type
# e.g. if we have a constructor arg proxies like so:
# {'http': 'ftp://proxy.example.com'}, we may end up turning
# a request for http://acme.example.com/a into one for
# ftp://proxy.example.com/a
return self.parent.open(req)
class HTTPPasswordMgr:
def __init__(self):
self.passwd = {}
def add_password(self, realm, uri, user, passwd):
# uri could be a single URI or a sequence
if isinstance(uri, basestring):
uri = [uri]
if not realm in self.passwd:
self.passwd[realm] = {}
for default_port in True, False:
reduced_uri = tuple(
[self.reduce_uri(u, default_port) for u in uri])
self.passwd[realm][reduced_uri] = (user, passwd)
def find_user_password(self, realm, authuri):
domains = self.passwd.get(realm, {})
for default_port in True, False:
reduced_authuri = self.reduce_uri(authuri, default_port)
for uris, authinfo in domains.iteritems():
for uri in uris:
if self.is_suburi(uri, reduced_authuri):
return authinfo
return None, None
def reduce_uri(self, uri, default_port=True):
"""Accept authority or URI and extract only the authority and path."""
# note HTTP URLs do not have a userinfo component
parts = urlparse.urlsplit(uri)
if parts[1]:
# URI
scheme = parts[0]
authority = parts[1]
path = parts[2] or '/'
else:
# host or host:port
scheme = None
authority = uri
path = '/'
host, port = splitport(authority)
if default_port and port is None and scheme is not None:
dport = {"http": 80,
"https": 443,
}.get(scheme)
if dport is not None:
authority = "%s:%d" % (host, dport)
return authority, path
def is_suburi(self, base, test):
"""Check if test is below base in a URI tree
Both args must be URIs in reduced form.
"""
if base == test:
return True
if base[0] != test[0]:
return False
common = posixpath.commonprefix((base[1], test[1]))
if len(common) == len(base[1]):
return True
return False
class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
def find_user_password(self, realm, authuri):
user, password = HTTPPasswordMgr.find_user_password(self, realm,
authuri)
if user is not None:
return user, password
return HTTPPasswordMgr.find_user_password(self, None, authuri)
class AbstractBasicAuthHandler:
# XXX this allows for multiple auth-schemes, but will stupidly pick
# the last one with a realm specified.
rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', re.I)
# XXX could pre-emptively send auth info already accepted (RFC 2617,
# end of section 2, and section 1.2 immediately after "credentials"
# production).
def __init__(self, password_mgr=None):
if password_mgr is None:
password_mgr = HTTPPasswordMgr()
self.passwd = password_mgr
self.add_password = self.passwd.add_password
def http_error_auth_reqed(self, authreq, host, req, headers):
# host may be an authority (without userinfo) or a URL with an
# authority
# XXX could be multiple headers
authreq = headers.get(authreq, None)
if authreq:
mo = AbstractBasicAuthHandler.rx.search(authreq)
if mo:
scheme, realm = mo.groups()
if scheme.lower() == 'basic':
return self.retry_http_basic_auth(host, req, realm)
def retry_http_basic_auth(self, host, req, realm):
user, pw = self.passwd.find_user_password(realm, host)
if pw is not None:
raw = "%s:%s" % (user, pw)
auth = 'Basic %s' % base64.b64encode(raw).strip()
if req.headers.get(self.auth_header, None) == auth:
return None
req.add_header(self.auth_header, auth)
return self.parent.open(req)
else:
return None
class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Authorization'
def http_error_401(self, req, fp, code, msg, headers):
url = req.get_full_url()
return self.http_error_auth_reqed('www-authenticate',
url, req, headers)
class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
auth_header = 'Proxy-authorization'
def http_error_407(self, req, fp, code, msg, headers):
# http_error_auth_reqed requires that there is no userinfo component in
# authority. Assume there isn't one, since urllib2 does not (and
# should not, RFC 3986 s. 3.2.1) support requests for URLs containing
# userinfo.
authority = req.get_host()
return self.http_error_auth_reqed('proxy-authenticate',
authority, req, headers)
def randombytes(n):
"""Return n random bytes."""
# Use /dev/urandom if it is available. Fall back to random module
# if not. It might be worthwhile to extend this function to use
# other platform-specific mechanisms for getting random bytes.
if os.path.exists("/dev/urandom"):
f = open("/dev/urandom")
s = f.read(n)
f.close()
return s
else:
L = [chr(random.randrange(0, 256)) for i in range(n)]
return "".join(L)
class AbstractDigestAuthHandler:
# Digest authentication is specified in RFC 2617.
# XXX The client does not inspect the Authentication-Info header
# in a successful response.
# XXX It should be possible to test this implementation against
# a mock server that just generates a static set of challenges.
# XXX qop="auth-int" supports is shaky
def __init__(self, passwd=None):
if passwd is None:
passwd = HTTPPasswordMgr()
self.passwd = passwd
self.add_password = self.passwd.add_password
self.retried = 0
self.nonce_count = 0
def reset_retry_count(self):
self.retried = 0
def http_error_auth_reqed(self, auth_header, host, req, headers):
authreq = headers.get(auth_header, None)
if self.retried > 5:
# Don't fail endlessly - if we failed once, we'll probably
# fail a second time. Hm. Unless the Password Manager is
# prompting for the information. Crap. This isn't great
# but it's better than the current 'repeat until recursion
# depth exceeded' approach <wink>
raise HTTPError(req.get_full_url(), 401, "digest auth failed",
headers, None)
else:
self.retried += 1
if authreq:
scheme = authreq.split()[0]
if scheme.lower() == 'digest':
return self.retry_http_digest_auth(req, authreq)
def retry_http_digest_auth(self, req, auth):
token, challenge = auth.split(' ', 1)
chal = parse_keqv_list(parse_http_list(challenge))
auth = self.get_authorization(req, chal)
if auth:
auth_val = 'Digest %s' % auth
if req.headers.get(self.auth_header, None) == auth_val:
return None
req.add_unredirected_header(self.auth_header, auth_val)
resp = self.parent.open(req)
return resp
def get_cnonce(self, nonce):
# The cnonce-value is an opaque
# quoted string value provided by the client and used by both client
# and server to avoid chosen plaintext attacks, to provide mutual
# authentication, and to provide some message integrity protection.
# This isn't a fabulous effort, but it's probably Good Enough.
dig = hashlib.sha1("%s:%s:%s:%s" % (self.nonce_count, nonce, time.ctime(),
randombytes(8))).hexdigest()
return dig[:16]
def get_authorization(self, req, chal):
try:
realm = chal['realm']
nonce = chal['nonce']
qop = chal.get('qop')
algorithm = chal.get('algorithm', 'MD5')
# mod_digest doesn't send an opaque, even though it isn't
# supposed to be optional
opaque = chal.get('opaque', None)
except KeyError:
return None
H, KD = self.get_algorithm_impls(algorithm)
if H is None:
return None
user, pw = self.passwd.find_user_password(realm, req.get_full_url())
if user is None:
return None
# XXX not implemented yet
if req.has_data():
entdig = self.get_entity_digest(req.get_data(), chal)
else:
entdig = None
A1 = "%s:%s:%s" % (user, realm, pw)
A2 = "%s:%s" % (req.get_method(),
# XXX selector: what about proxies and full urls
req.get_selector())
if qop == 'auth':
self.nonce_count += 1
ncvalue = '%08x' % self.nonce_count
cnonce = self.get_cnonce(nonce)
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
else:
# XXX handle auth-int.
raise URLError("qop '%s' is not supported." % qop)
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
'response="%s"' % (user, realm, nonce, req.get_selector(),
respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
base += ', digest="%s"' % entdig
base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return base
def get_algorithm_impls(self, algorithm):
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
H = lambda x: hashlib.md5(x).hexdigest()
elif algorithm == 'SHA':
H = lambda x: hashlib.sha1(x).hexdigest()
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
return H, KD
def get_entity_digest(self, data, chal):
# XXX not implemented yet
return None
class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
"""An authentication protocol defined by RFC 2069
Digest authentication improves on basic authentication because it
does not transmit passwords in the clear.
"""
auth_header = 'Authorization'
handler_order = 490 # before Basic auth
def http_error_401(self, req, fp, code, msg, headers):
host = urlparse.urlparse(req.get_full_url())[1]
retry = self.http_error_auth_reqed('www-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
auth_header = 'Proxy-Authorization'
handler_order = 490 # before Basic auth
def http_error_407(self, req, fp, code, msg, headers):
host = req.get_host()
retry = self.http_error_auth_reqed('proxy-authenticate',
host, req, headers)
self.reset_retry_count()
return retry
class AbstractHTTPHandler(BaseHandler):
def __init__(self, debuglevel=0):
self._debuglevel = debuglevel
def set_http_debuglevel(self, level):
self._debuglevel = level
def do_request_(self, request):
host = request.get_host()
if not host:
raise URLError('no host given')
if request.has_data(): # POST
data = request.get_data()
if not request.has_header('Content-type'):
request.add_unredirected_header(
'Content-type',
'application/x-www-form-urlencoded')
if not request.has_header('Content-length'):
request.add_unredirected_header(
'Content-length', '%d' % len(data))
scheme, sel = splittype(request.get_selector())
sel_host, sel_path = splithost(sel)
if not request.has_header('Host'):
request.add_unredirected_header('Host', sel_host or host)
for name, value in self.parent.addheaders:
name = name.capitalize()
if not request.has_header(name):
request.add_unredirected_header(name, value)
return request
def do_open(self, http_class, req):
"""Return an addinfourl object for the request, using http_class.
http_class must implement the HTTPConnection API from httplib.
The addinfourl return value is a file-like object. It also
has methods and attributes including:
- info(): return a mimetools.Message object for the headers
- geturl(): return the original request URL
- code: HTTP status code
"""
host = req.get_host()
if not host:
raise URLError('no host given')
h = http_class(host) # will parse host:port
h.set_debuglevel(self._debuglevel)
headers = dict(req.headers)
headers.update(req.unredirected_hdrs)
# We want to make an HTTP/1.1 request, but the addinfourl
# class isn't prepared to deal with a persistent connection.
# It will try to read all remaining data from the socket,
# which will block while the server waits for the next request.
# So make sure the connection gets closed after the (only)
# request.
headers["Connection"] = "close"
headers = dict(
(name.title(), val) for name, val in headers.items())
try:
h.request(req.get_method(), req.get_selector(), req.data, headers)
r = h.getresponse()
except socket.error, err: # XXX what error?
raise URLError(err)
# Pick apart the HTTPResponse object to get the addinfourl
# object initialized properly.
# Wrap the HTTPResponse object in socket's file object adapter
# for Windows. That adapter calls recv(), so delegate recv()
# to read(). This weird wrapping allows the returned object to
# have readline() and readlines() methods.
# XXX It might be better to extract the read buffering code
# out of socket._fileobject() and into a base class.
r.recv = r.read
fp = socket._fileobject(r, close=True)
resp = addinfourl(fp, r.msg, req.get_full_url())
resp.code = r.status
resp.msg = r.reason
return resp
class HTTPHandler(AbstractHTTPHandler):
def http_open(self, req):
return self.do_open(httplib.HTTPConnection, req)
http_request = AbstractHTTPHandler.do_request_
if hasattr(httplib, 'HTTPS'):
class HTTPSHandler(AbstractHTTPHandler):
def https_open(self, req):
return self.do_open(httplib.HTTPSConnection, req)
https_request = AbstractHTTPHandler.do_request_
class HTTPCookieProcessor(BaseHandler):
def __init__(self, cookiejar=None):
import cookielib
if cookiejar is None:
cookiejar = cookielib.CookieJar()
self.cookiejar = cookiejar
def http_request(self, request):
self.cookiejar.add_cookie_header(request)
return request
def http_response(self, request, response):
self.cookiejar.extract_cookies(response, request)
return response
https_request = http_request
https_response = http_response
class UnknownHandler(BaseHandler):
def unknown_open(self, req):
type = req.get_type()
raise URLError('unknown url type: %s' % type)
def parse_keqv_list(l):
"""Parse list of key=value strings where keys are not duplicated."""
parsed = {}
for elt in l:
k, v = elt.split('=', 1)
if v[0] == '"' and v[-1] == '"':
v = v[1:-1]
parsed[k] = v
return parsed
def parse_http_list(s):
"""Parse lists as described by RFC 2068 Section 2.
In particular, parse comma-separated lists where the elements of
the list may include quoted-strings. A quoted-string could
contain a comma. A non-quoted string could have quotes in the
middle. Neither commas nor quotes count if they are escaped.
Only double-quotes count, not single-quotes.
"""
res = []
part = ''
escape = quote = False
for cur in s:
if escape:
part += cur
escape = False
continue
if quote:
if cur == '\\':
escape = True
continue
elif cur == '"':
quote = False
part += cur
continue
if cur == ',':
res.append(part)
part = ''
continue
if cur == '"':
quote = True
part += cur
# append last part
if part:
res.append(part)
return [part.strip() for part in res]
class FileHandler(BaseHandler):
# Use local file or FTP depending on form of URL
def file_open(self, req):
url = req.get_selector()
if url[:2] == '//' and url[2:3] != '/':
req.type = 'ftp'
return self.parent.open(req)
else:
return self.open_local_file(req)
# names for the localhost
names = None
def get_names(self):
if FileHandler.names is None:
try:
FileHandler.names = (socket.gethostbyname('localhost'),
socket.gethostbyname(socket.gethostname()))
except socket.gaierror:
FileHandler.names = (socket.gethostbyname('localhost'),)
return FileHandler.names
# not entirely sure what the rules are here
def open_local_file(self, req):
import email.Utils
import mimetypes
host = req.get_host()
file = req.get_selector()
localfile = url2pathname(file)
stats = os.stat(localfile)
size = stats.st_size
modified = email.Utils.formatdate(stats.st_mtime, usegmt=True)
mtype = mimetypes.guess_type(file)[0]
headers = mimetools.Message(StringIO(
'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
(mtype or 'text/plain', size, modified)))
if host:
host, port = splitport(host)
if not host or \
(not port and socket.gethostbyname(host) in self.get_names()):
return addinfourl(open(localfile, 'rb'),
headers, 'file:'+file)
raise URLError('file not on local host')
class FTPHandler(BaseHandler):
def ftp_open(self, req):
import ftplib
import mimetypes
host = req.get_host()
if not host:
raise IOError, ('ftp error', 'no host given')
host, port = splitport(host)
if port is None:
port = ftplib.FTP_PORT
else:
port = int(port)
# username/password handling
user, host = splituser(host)
if user:
user, passwd = splitpasswd(user)
else:
passwd = None
host = unquote(host)
user = unquote(user or '')
passwd = unquote(passwd or '')
try:
host = socket.gethostbyname(host)
except socket.error, msg:
raise URLError(msg)
path, attrs = splitattr(req.get_selector())
dirs = path.split('/')
dirs = map(unquote, dirs)
dirs, file = dirs[:-1], dirs[-1]
if dirs and not dirs[0]:
dirs = dirs[1:]
try:
fw = self.connect_ftp(user, passwd, host, port, dirs)
type = file and 'I' or 'D'
for attr in attrs:
attr, value = splitvalue(attr)
if attr.lower() == 'type' and \
value in ('a', 'A', 'i', 'I', 'd', 'D'):
type = value.upper()
fp, retrlen = fw.retrfile(file, type)
headers = ""
mtype = mimetypes.guess_type(req.get_full_url())[0]
if mtype:
headers += "Content-type: %s\n" % mtype
if retrlen is not None and retrlen >= 0:
headers += "Content-length: %d\n" % retrlen
sf = StringIO(headers)
headers = mimetools.Message(sf)
return addinfourl(fp, headers, req.get_full_url())
except ftplib.all_errors, msg:
raise IOError, ('ftp error', msg), sys.exc_info()[2]
def connect_ftp(self, user, passwd, host, port, dirs):
fw = ftpwrapper(user, passwd, host, port, dirs)
## fw.ftp.set_debuglevel(1)
return fw
class CacheFTPHandler(FTPHandler):
# XXX would be nice to have pluggable cache strategies
# XXX this stuff is definitely not thread safe
def __init__(self):
self.cache = {}
self.timeout = {}
self.soonest = 0
self.delay = 60
self.max_conns = 16
def setTimeout(self, t):
self.delay = t
def setMaxConns(self, m):
self.max_conns = m
def connect_ftp(self, user, passwd, host, port, dirs):
key = user, host, port, '/'.join(dirs)
if key in self.cache:
self.timeout[key] = time.time() + self.delay
else:
self.cache[key] = ftpwrapper(user, passwd, host, port, dirs)
self.timeout[key] = time.time() + self.delay
self.check_cache()
return self.cache[key]
def check_cache(self):
# first check for old ones
t = time.time()
if self.soonest <= t:
for k, v in self.timeout.items():
if v < t:
self.cache[k].close()
del self.cache[k]
del self.timeout[k]
self.soonest = min(self.timeout.values())
# then check the size
if len(self.cache) == self.max_conns:
for k, v in self.timeout.items():
if v == self.soonest:
del self.cache[k]
del self.timeout[k]
break
self.soonest = min(self.timeout.values())
class GopherHandler(BaseHandler):
def gopher_open(self, req):
# XXX can raise socket.error
import gopherlib # this raises DeprecationWarning in 2.5
host = req.get_host()
if not host:
raise GopherError('no host given')
host = unquote(host)
selector = req.get_selector()
type, selector = splitgophertype(selector)
selector, query = splitquery(selector)
selector = unquote(selector)
if query:
query = unquote(query)
fp = gopherlib.send_query(selector, query, host)
else:
fp = gopherlib.send_selector(selector, host)
return addinfourl(fp, noheaders(), req.get_full_url())
| apache-2.0 |
nugget/home-assistant | homeassistant/components/scsgate/__init__.py | 7 | 5410 | """
Support for SCSGate components.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/scsgate/
"""
import logging
from threading import Lock
import voluptuous as vol
from homeassistant.const import (CONF_DEVICE, CONF_NAME)
from homeassistant.core import EVENT_HOMEASSISTANT_STOP
import homeassistant.helpers.config_validation as cv
REQUIREMENTS = ['scsgate==0.1.0']
_LOGGER = logging.getLogger(__name__)
ATTR_STATE = 'state'
CONF_SCS_ID = 'scs_id'
DOMAIN = 'scsgate'
SCSGATE = None
CONFIG_SCHEMA = vol.Schema({
DOMAIN: vol.Schema({
vol.Required(CONF_DEVICE): cv.string,
}),
}, extra=vol.ALLOW_EXTRA)
SCSGATE_SCHEMA = vol.Schema({
vol.Required(CONF_SCS_ID): cv.string,
vol.Optional(CONF_NAME): cv.string,
})
def setup(hass, config):
"""Set up the SCSGate component."""
device = config[DOMAIN][CONF_DEVICE]
global SCSGATE
try:
SCSGATE = SCSGate(device=device, logger=_LOGGER)
SCSGATE.start()
except Exception as exception: # pylint: disable=broad-except
_LOGGER.error("Cannot setup SCSGate component: %s", exception)
return False
def stop_monitor(event):
"""Stop the SCSGate."""
_LOGGER.info("Stopping SCSGate monitor thread")
SCSGATE.stop()
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, stop_monitor)
return True
class SCSGate:
"""The class for dealing with the SCSGate device via scsgate.Reactor."""
def __init__(self, device, logger):
"""Initialize the SCSGate."""
self._logger = logger
self._devices = {}
self._devices_to_register = {}
self._devices_to_register_lock = Lock()
self._device_being_registered = None
self._device_being_registered_lock = Lock()
from scsgate.connection import Connection
connection = Connection(device=device, logger=self._logger)
from scsgate.reactor import Reactor
self._reactor = Reactor(
connection=connection, logger=self._logger,
handle_message=self.handle_message)
def handle_message(self, message):
"""Handle a messages seen on the bus."""
from scsgate.messages import StateMessage, ScenarioTriggeredMessage
self._logger.debug("Received message {}".format(message))
if not isinstance(message, StateMessage) and \
not isinstance(message, ScenarioTriggeredMessage):
msg = "Ignored message {} - not relevant type".format(
message)
self._logger.debug(msg)
return
if message.entity in self._devices:
new_device_activated = False
with self._devices_to_register_lock:
if message.entity == self._device_being_registered:
self._device_being_registered = None
new_device_activated = True
if new_device_activated:
self._activate_next_device()
try:
self._devices[message.entity].process_event(message)
except Exception as exception: # pylint: disable=broad-except
msg = "Exception while processing event: {}".format(exception)
self._logger.error(msg)
else:
self._logger.info(
"Ignoring state message for device {} because unknown".format(
message.entity))
@property
def devices(self):
"""Return a dictionary with known devices.
Key is device ID, value is the device itself.
"""
return self._devices
def add_device(self, device):
"""Add the specified device.
The list contain already registered ones.
Beware: this is not what you usually want to do, take a look at
`add_devices_to_register`
"""
self._devices[device.scs_id] = device
def add_devices_to_register(self, devices):
"""List of devices to be registered."""
with self._devices_to_register_lock:
for device in devices:
self._devices_to_register[device.scs_id] = device
self._activate_next_device()
def _activate_next_device(self):
"""Start the activation of the first device."""
from scsgate.tasks import GetStatusTask
with self._devices_to_register_lock:
while self._devices_to_register:
_, device = self._devices_to_register.popitem()
self._devices[device.scs_id] = device
self._device_being_registered = device.scs_id
self._reactor.append_task(GetStatusTask(target=device.scs_id))
def is_device_registered(self, device_id):
"""Check whether a device is already registered or not."""
with self._devices_to_register_lock:
if device_id in self._devices_to_register.keys():
return False
with self._device_being_registered_lock:
if device_id == self._device_being_registered:
return False
return True
def start(self):
"""Start the scsgate.Reactor."""
self._reactor.start()
def stop(self):
"""Stop the scsgate.Reactor."""
self._reactor.stop()
def append_task(self, task):
"""Register a new task to be executed."""
self._reactor.append_task(task)
| apache-2.0 |
ftramer/stanford-suns-nips17 | untargeted/nets/inception_resnet_v2.py | 8 | 16151 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Contains the definition of the Inception Resnet V2 architecture.
As described in http://arxiv.org/abs/1602.07261.
Inception-v4, Inception-ResNet and the Impact of Residual Connections
on Learning
Christian Szegedy, Sergey Ioffe, Vincent Vanhoucke, Alex Alemi
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
slim = tf.contrib.slim
def block35(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 35x35 resnet block."""
with tf.variable_scope(scope, 'Block35', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 32, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 32, 3, scope='Conv2d_0b_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 32, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 48, 3, scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 64, 3, scope='Conv2d_0c_3x3')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_1, tower_conv2_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block17(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 17x17 resnet block."""
with tf.variable_scope(scope, 'Block17', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 128, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 160, [1, 7],
scope='Conv2d_0b_1x7')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 192, [7, 1],
scope='Conv2d_0c_7x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def block8(net, scale=1.0, activation_fn=tf.nn.relu, scope=None, reuse=None):
"""Builds the 8x8 resnet block."""
with tf.variable_scope(scope, 'Block8', [net], reuse=reuse):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 192, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 192, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 224, [1, 3],
scope='Conv2d_0b_1x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 256, [3, 1],
scope='Conv2d_0c_3x1')
mixed = tf.concat(axis=3, values=[tower_conv, tower_conv1_2])
up = slim.conv2d(mixed, net.get_shape()[3], 1, normalizer_fn=None,
activation_fn=None, scope='Conv2d_1x1')
net += scale * up
if activation_fn:
net = activation_fn(net)
return net
def inception_resnet_v2_base(inputs,
final_endpoint='Conv2d_7b_1x1',
output_stride=16,
align_feature_maps=False,
scope=None):
"""Inception model from http://arxiv.org/abs/1602.07261.
Constructs an Inception Resnet v2 network from inputs to the given final
endpoint. This method can construct the network up to the final inception
block Conv2d_7b_1x1.
Args:
inputs: a tensor of size [batch_size, height, width, channels].
final_endpoint: specifies the endpoint to construct the network up to. It
can be one of ['Conv2d_1a_3x3', 'Conv2d_2a_3x3', 'Conv2d_2b_3x3',
'MaxPool_3a_3x3', 'Conv2d_3b_1x1', 'Conv2d_4a_3x3', 'MaxPool_5a_3x3',
'Mixed_5b', 'Mixed_6a', 'PreAuxLogits', 'Mixed_7a', 'Conv2d_7b_1x1']
output_stride: A scalar that specifies the requested ratio of input to
output spatial resolution. Only supports 8 and 16.
align_feature_maps: When true, changes all the VALID paddings in the network
to SAME padding so that the feature maps are aligned.
scope: Optional variable_scope.
Returns:
tensor_out: output tensor corresponding to the final_endpoint.
end_points: a set of activations for external use, for example summaries or
losses.
Raises:
ValueError: if final_endpoint is not set to one of the predefined values,
or if the output_stride is not 8 or 16, or if the output_stride is 8 and
we request an end point after 'PreAuxLogits'.
"""
if output_stride != 8 and output_stride != 16:
raise ValueError('output_stride must be 8 or 16.')
padding = 'SAME' if align_feature_maps else 'VALID'
end_points = {}
def add_and_check_final(name, net):
end_points[name] = net
return name == final_endpoint
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs]):
with slim.arg_scope([slim.conv2d, slim.max_pool2d, slim.avg_pool2d],
stride=1, padding='SAME'):
# 149 x 149 x 32
net = slim.conv2d(inputs, 32, 3, stride=2, padding=padding,
scope='Conv2d_1a_3x3')
if add_and_check_final('Conv2d_1a_3x3', net): return net, end_points
# 147 x 147 x 32
net = slim.conv2d(net, 32, 3, padding=padding,
scope='Conv2d_2a_3x3')
if add_and_check_final('Conv2d_2a_3x3', net): return net, end_points
# 147 x 147 x 64
net = slim.conv2d(net, 64, 3, scope='Conv2d_2b_3x3')
if add_and_check_final('Conv2d_2b_3x3', net): return net, end_points
# 73 x 73 x 64
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_3a_3x3')
if add_and_check_final('MaxPool_3a_3x3', net): return net, end_points
# 73 x 73 x 80
net = slim.conv2d(net, 80, 1, padding=padding,
scope='Conv2d_3b_1x1')
if add_and_check_final('Conv2d_3b_1x1', net): return net, end_points
# 71 x 71 x 192
net = slim.conv2d(net, 192, 3, padding=padding,
scope='Conv2d_4a_3x3')
if add_and_check_final('Conv2d_4a_3x3', net): return net, end_points
# 35 x 35 x 192
net = slim.max_pool2d(net, 3, stride=2, padding=padding,
scope='MaxPool_5a_3x3')
if add_and_check_final('MaxPool_5a_3x3', net): return net, end_points
# 35 x 35 x 320
with tf.variable_scope('Mixed_5b'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 96, 1, scope='Conv2d_1x1')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 48, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 64, 5,
scope='Conv2d_0b_5x5')
with tf.variable_scope('Branch_2'):
tower_conv2_0 = slim.conv2d(net, 64, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2_0, 96, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 96, 3,
scope='Conv2d_0c_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.avg_pool2d(net, 3, stride=1, padding='SAME',
scope='AvgPool_0a_3x3')
tower_pool_1 = slim.conv2d(tower_pool, 64, 1,
scope='Conv2d_0b_1x1')
net = tf.concat(
[tower_conv, tower_conv1_1, tower_conv2_2, tower_pool_1], 3)
if add_and_check_final('Mixed_5b', net): return net, end_points
# TODO(alemi): Register intermediate endpoints
net = slim.repeat(net, 10, block35, scale=0.17)
# 17 x 17 x 1088 if output_stride == 8,
# 33 x 33 x 1088 if output_stride == 16
use_atrous = output_stride == 8
with tf.variable_scope('Mixed_6a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 384, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1_0 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1_0, 256, 3,
scope='Conv2d_0b_3x3')
tower_conv1_2 = slim.conv2d(tower_conv1_1, 384, 3,
stride=1 if use_atrous else 2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_pool = slim.max_pool2d(net, 3, stride=1 if use_atrous else 2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat([tower_conv, tower_conv1_2, tower_pool], 3)
if add_and_check_final('Mixed_6a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
with slim.arg_scope([slim.conv2d], rate=2 if use_atrous else 1):
net = slim.repeat(net, 20, block17, scale=0.10)
if add_and_check_final('PreAuxLogits', net): return net, end_points
if output_stride == 8:
# TODO(gpapan): Properly support output_stride for the rest of the net.
raise ValueError('output_stride==8 is only supported up to the '
'PreAuxlogits end_point for now.')
# 8 x 8 x 2080
with tf.variable_scope('Mixed_7a'):
with tf.variable_scope('Branch_0'):
tower_conv = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv_1 = slim.conv2d(tower_conv, 384, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_1'):
tower_conv1 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv1_1 = slim.conv2d(tower_conv1, 288, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_2'):
tower_conv2 = slim.conv2d(net, 256, 1, scope='Conv2d_0a_1x1')
tower_conv2_1 = slim.conv2d(tower_conv2, 288, 3,
scope='Conv2d_0b_3x3')
tower_conv2_2 = slim.conv2d(tower_conv2_1, 320, 3, stride=2,
padding=padding,
scope='Conv2d_1a_3x3')
with tf.variable_scope('Branch_3'):
tower_pool = slim.max_pool2d(net, 3, stride=2,
padding=padding,
scope='MaxPool_1a_3x3')
net = tf.concat(
[tower_conv_1, tower_conv1_1, tower_conv2_2, tower_pool], 3)
if add_and_check_final('Mixed_7a', net): return net, end_points
# TODO(alemi): register intermediate endpoints
net = slim.repeat(net, 9, block8, scale=0.20)
net = block8(net, activation_fn=None)
# 8 x 8 x 1536
net = slim.conv2d(net, 1536, 1, scope='Conv2d_7b_1x1')
if add_and_check_final('Conv2d_7b_1x1', net): return net, end_points
raise ValueError('final_endpoint (%s) not recognized', final_endpoint)
def inception_resnet_v2(inputs, num_classes=1001, is_training=True,
dropout_keep_prob=0.8,
reuse=None,
scope='InceptionResnetV2',
create_aux_logits=True):
"""Creates the Inception Resnet V2 model.
Args:
inputs: a 4-D tensor of size [batch_size, height, width, 3].
num_classes: number of predicted classes.
is_training: whether is training or not.
dropout_keep_prob: float, the fraction to keep before final layer.
reuse: whether or not the network and its variables should be reused. To be
able to reuse 'scope' must be given.
scope: Optional variable_scope.
create_aux_logits: Whether to include the auxilliary logits.
Returns:
logits: the logits outputs of the model.
end_points: the set of end_points from the inception model.
"""
end_points = {}
with tf.variable_scope(scope, 'InceptionResnetV2', [inputs, num_classes],
reuse=reuse) as scope:
with slim.arg_scope([slim.batch_norm, slim.dropout],
is_training=is_training):
net, end_points = inception_resnet_v2_base(inputs, scope=scope)
if create_aux_logits:
with tf.variable_scope('AuxLogits'):
aux = end_points['PreAuxLogits']
aux = slim.avg_pool2d(aux, 5, stride=3, padding='VALID',
scope='Conv2d_1a_3x3')
aux = slim.conv2d(aux, 128, 1, scope='Conv2d_1b_1x1')
aux = slim.conv2d(aux, 768, aux.get_shape()[1:3],
padding='VALID', scope='Conv2d_2a_5x5')
aux = slim.flatten(aux)
aux = slim.fully_connected(aux, num_classes, activation_fn=None,
scope='Logits')
end_points['AuxLogits'] = aux
with tf.variable_scope('Logits'):
net = slim.avg_pool2d(net, net.get_shape()[1:3], padding='VALID',
scope='AvgPool_1a_8x8')
net = slim.flatten(net)
net = slim.dropout(net, dropout_keep_prob, is_training=is_training,
scope='Dropout')
end_points['PreLogitsFlatten'] = net
logits = slim.fully_connected(net, num_classes, activation_fn=None,
scope='Logits')
end_points['Logits'] = logits
end_points['Predictions'] = tf.nn.softmax(logits, name='Predictions')
return logits, end_points
inception_resnet_v2.default_image_size = 299
def inception_resnet_v2_arg_scope(weight_decay=0.00004,
batch_norm_decay=0.9997,
batch_norm_epsilon=0.001):
"""Yields the scope with the default parameters for inception_resnet_v2.
Args:
weight_decay: the weight decay for weights variables.
batch_norm_decay: decay for the moving average of batch_norm momentums.
batch_norm_epsilon: small float added to variance to avoid dividing by zero.
Returns:
a arg_scope with the parameters needed for inception_resnet_v2.
"""
# Set weight_decay for weights in conv2d and fully_connected layers.
with slim.arg_scope([slim.conv2d, slim.fully_connected],
weights_regularizer=slim.l2_regularizer(weight_decay),
biases_regularizer=slim.l2_regularizer(weight_decay)):
batch_norm_params = {
'decay': batch_norm_decay,
'epsilon': batch_norm_epsilon,
}
# Set activation_fn and parameters for batch_norm.
with slim.arg_scope([slim.conv2d], activation_fn=tf.nn.relu,
normalizer_fn=slim.batch_norm,
normalizer_params=batch_norm_params) as scope:
return scope
| mit |
rspavel/spack | var/spack/repos/builtin/packages/exasp2/package.py | 3 | 3198 | # Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
import glob
class Exasp2(MakefilePackage):
"""ExaSP2 is a reference implementation of typical linear algebra algorithms
and workloads for a quantum molecular dynamics (QMD) electronic structure
code. The algorithm is based on a recursive second-order Fermi-Operator
expansion method (SP2) and is tailored for density functional based
tight-binding calculations of material systems. The SP2 algorithm variants
are part of the Los Alamos Transferable Tight-binding for Energetics
(LATTE) code, based on a matrix expansion of the Fermi operator in a
recursive series of generalized matrix-matrix multiplications. It is
created and maintained by Co-Design Center for Particle Applications
(CoPA). The code is intended to serve as a vehicle for co-design by
allowing others to extend and/or reimplement as needed to test performance
of new architectures, programming models, etc."""
tags = ['proxy-app']
homepage = "https://github.com/ECP-copa/ExaSP2"
url = "https://github.com/ECP-copa/ExaSP2/tarball/v1.0"
git = "https://github.com/ECP-copa/ExaSP2.git"
version('develop', branch='master')
version('1.0', sha256='59986ea70391a1b382d2ed22d5cf013f46c0c15e44ed95dcd875a917adfc6211')
variant('mpi', default=True, description='Build With MPI Support')
depends_on('bml')
depends_on('blas')
depends_on('lapack')
depends_on('mpi', when='+mpi')
depends_on('bml@1.2.3:+mpi', when='+mpi')
build_directory = 'src'
@property
def build_targets(self):
targets = []
spec = self.spec
if '+mpi' in spec:
targets.append('PARALLEL=MPI')
targets.append('MPICC={0}'.format(spec['mpi'].mpicc))
targets.append('MPI_LIB=-L' + spec['mpi'].prefix.lib + ' -lmpi')
targets.append('MPI_INCLUDE=-I' + spec['mpi'].prefix.include)
else:
targets.append('PARALLEL=NONE')
# NOTE: no blas except for mkl has been properly tested. OpenBlas was
# briefly but not rigoruously tested. Using generic blas approach to
# meet Spack requirements
targets.append('BLAS=GENERIC_SPACKBLAS')
math_libs = str(spec['lapack'].libs)
math_libs += ' ' + str(spec['lapack'].libs)
targets.append('SPACKBLASLIBFLAGS=' + math_libs)
math_includes = spec['lapack'].prefix.include
math_includes += " -I" + spec['blas'].prefix.include
targets.append('SPACKBLASINCLUDES=' + math_includes)
# And BML
bml_lib_dirs = spec['bml'].libs.directories[0]
targets.append('BML_PATH=' + bml_lib_dirs)
targets.append('--file=Makefile.vanilla')
return targets
def install(self, spec, prefix):
mkdir(prefix.bin)
mkdir(prefix.doc)
for files in glob.glob('bin/ExaSP2-*'):
install(files, prefix.bin)
install('LICENSE.md', prefix.doc)
install('README.md', prefix.doc)
| lgpl-2.1 |
qPCR4vir/orange | Orange/OrangeWidgets/OWReport.py | 6 | 18916 | # Widgets cannot be reset to the settings they had at the time of reporting.
# The reason lies in the OWGUI callback mechanism: callbacks are triggered only
# when the controls are changed by the user. If the related widget's attribute
# is changed programmatically, the control is updated but the callback is not
# called. This is done intentionally and with a very solid reason: it enables
# us to do multiple changes without, for instance, the widget being redrawn
# every time. Besides, it would probably lead to cycles or at least a great
# number of redundant calls. However, since setting attributes does not trigger
# callbacks, setting the attributes here would have not other effect than
# changing the widget's controls and leaving it in undefined (possibly
# invalid) state. The reason why we do not have these problems in "normal" use
# of settings is that the context independent settings are loaded only when
# the widget is initialized and the context dependent settings are retrieved
# when the new data is sent and the widget "knows" it has to reconfigure.
# The only solution would be to require all the widgets have a method for
# updating everything from scratch according to settings. This would require
# a lot of work, which
# could even not be feasible. For instance, there are widget which get the
# data, compute something and discard the data. This is good since it is
# memory efficient, but it may prohibit the widget from implementing the
# update-from-the-scratch method.
import os
import time
import tempfile
import shutil
import re
import pickle
import binascii
import xml.dom.minidom
from OWWidget import *
from PyQt4.QtWebKit import *
from Orange.utils import environ
def get_instance():
"""
Return the global `ReportWindow` instance.
"""
app = QApplication.instance()
if not hasattr(app, "_reportWindow"):
report = ReportWindow()
app._reportWindow = report
app.sendPostedEvents(report, 0)
app.aboutToQuit.connect(report.removeTemp)
# event loop will still process deferred delete events
# after aboutToQuit is emitted
app.aboutToQuit.connect(report.deleteLater)
return app._reportWindow
def escape(s):
return s.replace("\\", "\\\\").replace("\n", "\\n").replace("'", "\\'")
class MyListWidget(QListWidget):
def __init__(self, parent, widget):
QListWidget.__init__(self, parent)
self.widget = widget
def dropEvent(self, ev):
QListWidget.dropEvent(self, ev)
self.widget.rebuildHtml()
def mousePressEvent(self, ev):
QListWidget.mousePressEvent(self, ev)
node = self.currentItem()
if ev.button() == Qt.RightButton and node:
self.widget.nodePopup.popup(ev.globalPos())
class ReportWindow(OWWidget):
indexfile = os.path.join(environ.widget_install_dir,
"report", "index.html")
def __init__(self):
OWWidget.__init__(self, None, None, "Report")
self.dontScroll = False
self.widgets = []
self.counter = 0
self.tempdir = tempfile.mkdtemp("", "orange-report-")
self.tree = MyListWidget(self.controlArea, self)
self.tree.setDragEnabled(True)
self.tree.setDragDropMode(QAbstractItemView.InternalMove)
self.tree.setFixedWidth(200)
self.controlArea.layout().addWidget(self.tree)
QObject.connect(self.tree, SIGNAL("currentItemChanged(QListWidgetItem *, QListWidgetItem *)"), self.selectionChanged)
QObject.connect(self.tree, SIGNAL("itemActivated ( QListWidgetItem *)"), self.raiseWidget)
QObject.connect(self.tree, SIGNAL("itemDoubleClicked ( QListWidgetItem *)"), self.raiseWidget)
QObject.connect(self.tree, SIGNAL("itemChanged ( QListWidgetItem *)"), self.itemChanged)
self.treeItems = {}
self.reportBrowser = QWebView(self.mainArea)
# self.reportBrowser.setUrl(QUrl.fromLocalFile(self.indexfile))
self.reportBrowser.setHtml(open(self.indexfile, "rb").read())
frame = self.reportBrowser.page().mainFrame()
frame.addToJavaScriptWindowObject("myself", self)
frame.setScrollBarPolicy(Qt.Vertical, Qt.ScrollBarAsNeeded)
self.javascript = frame.evaluateJavaScript
self.mainArea.layout().addWidget(self.reportBrowser)
box = OWGUI.widgetBox(self.controlArea)
saveButton = OWGUI.button(box, self, "&Save", self.saveReport)
printButton = OWGUI.button(box, self, "&Print", self.printReport)
saveButton.setAutoDefault(0)
self.nodePopup = QMenu("Widget")
self.showWidgetAction = self.nodePopup.addAction("Show widget", self.showActiveNodeWidget)
self.nodePopup.addSeparator()
self.deleteAction = self.nodePopup.addAction("Remove", self.removeActiveNode, Qt.Key_Delete)
self.deleteAllAction = self.nodePopup.addAction("Remove All", self.clearReport)
self.nodePopup.setEnabled(1)
self.resize(900, 850)
def removeTemp(self):
try:
shutil.rmtree(self.tempdir)
except:
pass
def __call__(self, name, data, widgetId, icon, wtime=None):
if not self.isVisible():
self.show()
else:
self.raise_()
self.counter += 1
elid = "N%03i" % self.counter
widnode = QListWidgetItem(icon, name, self.tree)
widnode.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | Qt.ItemIsDragEnabled | Qt.ItemIsEditable)
widnode.elementId = elid
widnode.widgetId = widgetId
widnode.time = wtime or time.strftime("%a %b %d %y, %H:%M:%S")
widnode.data = data
widnode.name = name
self.tree.addItem(widnode)
self.treeItems[elid] = widnode
self.addEntry(widnode)
def appendReport(self, name, report, sender=None):
"""
Append a report section titled `name` and with html contents
`report`. `sender` if specified can be a OWBaseWidget instance
that is sending this report.
"""
if not self.isVisible():
self.show()
else:
self.raise_()
if sender is not None:
icon = sender.windowIcon()
widgetId = sender.widgetId
else:
icon = QIcon()
widgetId = -1
self.counter += 1
elid = "N%03i" % self.counter
widnode = QListWidgetItem(icon, name, self.tree)
widnode.setFlags(Qt.ItemIsEnabled | Qt.ItemIsSelectable | \
Qt.ItemIsDragEnabled | Qt.ItemIsEditable)
widnode.elementId = elid
widnode.widgetId = widgetId
import locale
time_encoding = locale.getlocale(locale.LC_TIME)[1] or locale.getpreferredencoding()
widnode.time = time.strftime("%a %b %d %y, %H:%M:%S").decode(time_encoding)
widnode.data = report
widnode.name = name
self.tree.addItem(widnode)
self.treeItems[elid] = widnode
self.addEntry(widnode)
def addEntry(self, widnode, scrollIntoView=True):
newEntry = (
'<div id="%s" onClick="myself.changeItem(this.id);">'
' <a name="%s">'
' <h1>%s<span class="timestamp">%s</span></h1>'
' <div class="insideh1">'
' %s'
' </div>'
'</div>'
) % (widnode.elementId, widnode.elementId, widnode.name,
widnode.time, widnode.data)
widnode.content = newEntry
body = self.bodyFrame()
body.appendInside(newEntry)
if scrollIntoView:
self.javascript("document.getElementById('%s').scrollIntoView();" % widnode.elementId)
def bodyFrame(self):
main = self.reportBrowser.page().mainFrame()
return main.findFirstElement("body")
def selectionChanged(self, current, previous):
if current:
if self.dontScroll:
self.javascript("document.getElementById('%s').className = 'selected';" % current.elementId)
else:
self.javascript("""
var newsel = document.getElementById('%s');
newsel.className = 'selected';
newsel.scrollIntoView();""" % current.elementId)
self.showWidgetAction.setEnabled(current.widgetId >= 0)
if previous:
self.javascript("document.getElementById('%s').className = '';" % previous.elementId)
def rebuildHtml(self):
self.bodyFrame().setInnerXml("")
for i in range(self.tree.count()):
self.addEntry(self.tree.item(i))
selected = self.tree.selectedItems()
if selected:
self.selectionChanged(selected[0], None)
@pyqtSignature("QString")
def changeItem(self, elid):
self.dontScroll = True
item = self.treeItems[str(elid)]
self.tree.setCurrentItem(item)
self.tree.scrollToItem(item)
self.dontScroll = False
def raiseWidget(self, node):
for widget in self.widgets:
if widget.instance.widgetId == node.widgetId:
break
else:
return
widget.instance.reshow()
def showActiveNodeWidget(self):
node = self.tree.currentItem()
if node:
self.raiseWidget(node)
re_h1 = re.compile(r'<h1>(?P<name>.*?)<span class="timestamp">')
def itemChanged(self, node):
if hasattr(node, "content"):
be, en = self.re_h1.search(node.content).span("name")
node.content = node.content[:be] + str(node.text()) + node.content[en:]
self.rebuildHtml()
def removeActiveNode(self):
node = self.tree.currentItem()
if node:
self.tree.takeItem(self.tree.row(node))
self.rebuildHtml()
def clearReport(self):
self.tree.clear()
self.rebuildHtml()
def printReport(self):
printer = QPrinter()
printDialog = QPrintDialog(printer, self)
printDialog.setWindowTitle("Print report")
if (printDialog.exec_() != QDialog.Accepted):
return
self.reportBrowser.print_(printer)
def getUniqueFileName(self, patt):
for i in xrange(1000000):
fn = os.path.join(self.tempdir, patt % i)
if not os.path.exists(fn):
return "file:///" + fn, fn
img_re = re.compile(r'<IMG.*?\ssrc="(?P<imgname>[^"]*)"',
re.DOTALL + re.IGNORECASE)
browser_re = re.compile(r'<!--browsercode(.*?)-->')
def getSaveDir(self):
"""
Return the initial file system path for the 'Save' dialog.
"""
if hasattr(self, "saveDir"):
# set by orngCanvas.OrangeCanvasDlg
return self.saveDir
else:
settings = QSettings()
savedir = QDesktopServices.storageLocation(
QDesktopServices.DocumentsLocation
)
if PYQT_VERSION < 0x40803:
savedir = settings.value("OWReport/save-directory",
defaultValue=savedir).toString()
else:
savedir = settings.value("OWReport/save-directory",
defaultValue=savedir,
type=unicode)
return savedir
def storeSaveDir(self, savedir):
"""
Store the chosen folder path for subsequent save dialog
initialization.
"""
if hasattr(self, "saveDir"):
self.saveDir = savedir
else:
settings = QSettings()
settings.setValue("OWReport/save-directory", savedir)
def saveReport(self):
"""
Save the report to a html file chosen by the user.
"""
savedir = self.getSaveDir()
filename = QFileDialog.getSaveFileName(self, "Save Report", savedir,
"Web page (*.html *.htm)")
filename = unicode(filename)
if not filename:
return
path, fname = os.path.split(filename)
self.storeSaveDir(path)
if not os.path.exists(path):
try:
os.makedirs(path)
except:
QMessageBox.error(None, "Error",
"Cannot create directory " + path)
tt = file(self.indexfile, "rt").read()
index = "<br/>".join('<a href="#%s">%s</a>' % (self.tree.item(i).elementId, self.re_h1.search(self.tree.item(i).content).group("name"))
for i in range(self.tree.count()))
######## Rewrite this to go through individual tree nodes. For one reason: this code used to work
## when the HTML stored in tree nodes included DIV and H1 tags, which it does not any more,
## so they have to be added here
data = "\n".join(self.tree.item(i).content for i in range(self.tree.count()))
tt = tt.replace("<body>", '<body><table width="100%%"><tr><td valign="top"><p style="padding-top:25px;">Index</p>%s</td><td>%s</td></tr></table>' % (index, data))
tt = self.browser_re.sub("\\1", tt)
filepref = "file:///"+self.tempdir
if filepref[-1] != os.sep:
filepref += os.sep
lfilepref = len(filepref)
imspos = -1
subdir = None
while True:
imspos = tt.find(filepref, imspos+1)
if imspos == -1:
break
if not subdir:
subdir = os.path.splitext(fname)[0]
if subdir == fname:
subdir += "_data"
cnt = 0
osubdir = subdir
while os.path.exists(os.path.join(path, subdir)):
cnt += 1
subdir = "%s%05i" % (osubdir, cnt)
absubdir = os.path.join(path, subdir)
os.mkdir(absubdir)
imname = tt[imspos+lfilepref:tt.find('"', imspos)]
shutil.copy(os.path.join(filepref[8:], imname), os.path.join(absubdir, imname))
if subdir:
tt = tt.replace(filepref, subdir+"/")
file(filename, "wb").write(tt.encode("utf8"))
def getDepth(item, expanded=True):
ccount = item.childCount()
return 1 + (ccount and (not expanded or item.isExpanded()) and max(getDepth(item.child(cc), expanded) for cc in range(ccount)))
# Need to use the tree's columnCount - children may have unattended additional columns
# (this happens, e.g. in the tree viewer)
def printTree(item, level, depthRem, visibleColumns, expanded=True):
res = '<tr>'+'<td width="16px"></td>'*level + \
'<td colspan="%i">%s</td>' % (depthRem, item.text(0) or (not level and "<root>") or "") + \
''.join('<td style="padding-left:10px">%s</td>' % item.text(i) for i in visibleColumns) + \
'</tr>\n'
if not expanded or item.isExpanded():
for i in range(item.childCount()):
res += printTree(item.child(i), level+1, depthRem-1, visibleColumns, expanded)
return res
def reportTree(tree, expanded=True):
tops = tree.topLevelItemCount()
header = tree.headerItem()
visibleColumns = [i for i in range(1, tree.columnCount()) if not tree.isColumnHidden(i)]
depth = tops and max(getDepth(tree.topLevelItem(cc), expanded) for cc in range(tops))
res = "<table>\n"
res += '<tr><th colspan="%i">%s</th>' % (depth, header.text(0))
res += ''.join('<th>%s</th>' % header.text(i) for i in visibleColumns)
res += '</tr>\n'
res += ''.join(printTree(tree.topLevelItem(cc), 0, depth, visibleColumns, expanded) for cc in range(tops))
res += "</table>\n"
return res
def reportCell(item, tag, style):
if not item:
return '<%s style="%s"/>' % (tag, style)
if isinstance(item, QTableWidgetItem):
alignment = {Qt.AlignLeft: "left", Qt.AlignRight: "right", Qt.AlignHCenter: "center"}.get(item.textAlignment() & Qt.AlignHorizontal_Mask, "left")
text = item.text().replace("&", "&").replace("<", "<").replace(">", ">")
return '<%s style="%s; text-align: %s">%s</%s>' % (tag, style, alignment, text, tag)
elif isinstance(item, QModelIndex):
align = item.data(Qt.TextAlignmentRole)
align, ok = align.toInt() if align.isValid() else Qt.AlignLeft, True
alignment = {Qt.AlignLeft: "left", Qt.AlignRight: "right", Qt.AlignHCenter: "center"}.get(align & Qt.AlignHorizontal_Mask, "left")
value = item.data(Qt.DisplayRole)
if value.type() >= QVariant.UserType:
text = str(value.toPyObject())
else:
text = str(value.toString())
text = text.replace("&", "&").replace("<", "<").replace(">", ">")
return '<%s style="%s; text-align: %s">%s</%s>' % (tag, style, alignment, text, tag)
elif isinstance(item, tuple): #(QAbstractItemModel, headerIndex)
model, index = item
align = model.headerData(index, Qt.Horizontal, Qt.TextAlignmentRole)
align, ok = align.toInt() if align.isValid() else Qt.AlignLeft, True
alignment = {Qt.AlignLeft: "left", Qt.AlignRight: "right", Qt.AlignHCenter: "center"}.get(align & Qt.AlignHorizontal_Mask, "left")
text = str(model.headerData(index, Qt.Horizontal, Qt.DisplayRole).toString()).replace("&", "&").replace("<", "<").replace(">", ">")
return '<%s style="%s; text-align: %s">%s</%s>' % (tag, style, alignment, text, tag)
def reportTable(table):
ncols = table.model().columnCount()
res = '<table style="border-bottom: thin solid black">\n'
vheadVisible = table.verticalHeader().isVisible()
shownColumns = [i for i in range(ncols) if not table.isColumnHidden(i)]
if table.horizontalHeader().isVisible():
res += "<tr>"+'<th></th>'*vheadVisible + "".join(reportCell(table.horizontalHeaderItem(i) if isinstance(table, QTableWidget) else (table.model(), i),
"th", "padding-left: 4px; padding-right: 4px;") for i in shownColumns) + "</tr>\n"
res += '<tr style="height: 2px">'+'<th colspan="%i" style="border-bottom: thin solid black; height: 2px;"></th>' % (ncols+vheadVisible)
for j in range(table.model().rowCount()):
res += "<tr>"
if vheadVisible:
if isinstance(table, QTableWidget):
vhi = table.verticalHeaderItem(j)
text = vhi.text() if vhi else ""
else:
text = str(table.model().headerData(j, Qt.Vertical, Qt.DisplayRole).toString())
res += "<th>%s</th>" % text
res += "".join(reportCell(table.item(j, i) if isinstance(table, QTableWidget) else table.model().index(j, i),
"td", "") for i in shownColumns) + "</tr>\n"
res += "</table>\n"
return res
| gpl-3.0 |
kalahbrown/HueBigSQL | desktop/core/ext-py/kazoo-2.0/kazoo/recipe/counter.py | 47 | 2674 | """Zookeeper Counter
:Maintainer: None
:Status: Unknown
"""
from kazoo.exceptions import BadVersionError
from kazoo.retry import ForceRetryError
class Counter(object):
"""Kazoo Counter
A shared counter of either int or float values. Changes to the
counter are done atomically. The general retry policy is used to
retry operations if concurrent changes are detected.
The data is marshaled using `repr(value)` and converted back using
`type(counter.default)(value)` both using an ascii encoding. As
such other data types might be used for the counter value.
Counter changes can raise
:class:`~kazoo.exceptions.BadVersionError` if the retry policy
wasn't able to apply a change.
Example usage:
.. code-block:: python
zk = KazooClient()
counter = zk.Counter("/int")
counter += 2
counter -= 1
counter.value == 1
counter = zk.Counter("/float", default=1.0)
counter += 2.0
counter.value == 3.0
"""
def __init__(self, client, path, default=0):
"""Create a Kazoo Counter
:param client: A :class:`~kazoo.client.KazooClient` instance.
:param path: The counter path to use.
:param default: The default value.
"""
self.client = client
self.path = path
self.default = default
self.default_type = type(default)
self._ensured_path = False
def _ensure_node(self):
if not self._ensured_path:
# make sure our node exists
self.client.ensure_path(self.path)
self._ensured_path = True
def _value(self):
self._ensure_node()
old, stat = self.client.get(self.path)
old = old.decode('ascii') if old != b'' else self.default
version = stat.version
data = self.default_type(old)
return data, version
@property
def value(self):
return self._value()[0]
def _change(self, value):
if not isinstance(value, self.default_type):
raise TypeError('invalid type for value change')
self.client.retry(self._inner_change, value)
return self
def _inner_change(self, value):
data, version = self._value()
data = repr(data + value).encode('ascii')
try:
self.client.set(self.path, data, version=version)
except BadVersionError: # pragma: nocover
raise ForceRetryError()
def __add__(self, value):
"""Add value to counter."""
return self._change(value)
def __sub__(self, value):
"""Subtract value from counter."""
return self._change(-value)
| apache-2.0 |
pombredanne/django-material | material/templatetags/material_form.py | 10 | 5207 | import os
from collections import defaultdict
from django.forms.forms import BoundField
from django.template.base import (
TemplateSyntaxError, Library,
Node, Variable, token_kwargs)
from django.template.loader import get_template
from django.template.loader_tags import IncludeNode
register = Library()
def _render_parts(context, parts_list):
parts = context['form_parts']
for partnode in parts_list:
part = partnode.resolve_part(context)
if partnode.section not in parts[part]:
value = partnode.render(context)
parts[part][partnode.section] = value
@register.tag('form')
class FormNode(Node):
"""
Template based form rendering
Example::
{% form template='material/form.html' form=form layout=view.layout %}
{% part form.email prepend %}<span class="input-group-addon" id="basic-addon1">@</span>{% endpart %}
{% endform %}
"""
def __init__(self, parser, token):
bits = token.split_contents()
remaining_bits = bits[1:]
self.kwargs = token_kwargs(remaining_bits, parser)
if remaining_bits:
raise TemplateSyntaxError("%r received an invalid token: %r" %
(bits[0], remaining_bits[0]))
for key in self.kwargs:
if key not in ('form', 'layout', 'template'):
raise TemplateSyntaxError("%r received an invalid key: %r" %
(bits[0], key))
self.kwargs[key] = self.kwargs[key]
self.nodelist = parser.parse(('end{}'.format(bits[0]),))
parser.delete_first_token()
def render(self, context):
form = self.kwargs.get('form')
form = form.resolve(context) if form else context.get('form')
if form is None:
return ''
# Take one of view.layout or form.layout
layout = self.kwargs.get('layout')
if layout is not None:
layout = layout.resolve(context)
if layout is None:
if 'view' in context:
view = context['view']
if hasattr(view, 'layout'):
layout = view.layout
if layout is None:
if hasattr(form, 'layout'):
layout = form.layout
template_name = self.kwargs.get('template', 'material/form.html')
template = get_template(template_name)
# Render form and parts
parts = defaultdict(dict) # part -> section -> value
with context.push(
form=form,
layout=layout,
form_template_pack=os.path.dirname(template_name),
form_parts=parts):
# direct children
children = (node for node in self.nodelist if isinstance(node, FormPartNode))
_render_parts(context, children)
# include
children = (node for node in self.nodelist if isinstance(node, IncludeNode))
for included_list in children:
included = included_list.template.resolve(context)
children = (node for node in included.nodelist if isinstance(node, FormPartNode))
_render_parts(context, children)
return template.render(context)
@register.tag('part')
class FormPartNode(Node):
def __init__(self, parser, token):
bits = token.split_contents()
if len(bits) > 5:
raise TemplateSyntaxError(
"%r accepts at most 4 arguments (part_id, section, asvar, varname), got: {}" %
(bits[0], ','.join(bits[1:])))
self.part_id = Variable(bits[1])
self.section = bits[2] if len(bits) >= 3 else None
self.varname = None
if len(bits) > 3:
if bits[3] != 'asvar':
raise TemplateSyntaxError('Forth argument should be asvar, got {}', format(bits[3]))
if len(bits) < 4:
raise TemplateSyntaxError('Variable name not provided')
else:
self.varname = Variable(bits[4])
self.nodelist = parser.parse(('end{}'.format(bits[0]),))
parser.delete_first_token()
def resolve_part(self, context):
part = self.part_id.resolve(context)
if isinstance(part, BoundField):
part = part.field
return part
def render(self, context):
part = self.resolve_part(context)
parts = context['form_parts']
if self.section in parts[part]:
# already rendered
if self.varname is not None:
context[self.varname.resolve(context)] = parts[part][self.section]
return ""
else:
return parts[part][self.section]
# child parts
children = (node for node in self.nodelist if isinstance(node, FormPartNode))
_render_parts(context, children)
# render own content
value = self.nodelist.render(context).strip()
if self.varname is not None:
context[self.varname.resolve(context)] = value
return ''
else:
if not value:
return ''
return value
| bsd-3-clause |
slisson/intellij-community | python/lib/Lib/commands.py | 120 | 2279 | """Execute shell commands via os.popen() and return status, output.
Interface summary:
import commands
outtext = commands.getoutput(cmd)
(exitstatus, outtext) = commands.getstatusoutput(cmd)
outtext = commands.getstatus(file) # returns output of "ls -ld file"
A trailing newline is removed from the output string.
Encapsulates the basic operation:
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
[Note: it would be nice to add functions to interpret the exit status.]
"""
__all__ = ["getstatusoutput","getoutput","getstatus"]
# Module 'commands'
#
# Various tools for executing commands and looking at their output and status.
#
# NB This only works (and is only relevant) for UNIX.
# Get 'ls -l' status for an object into a string
#
def getstatus(file):
"""Return output of "ls -ld <file>" in a string."""
return getoutput('ls -ld' + mkarg(file))
# Get the output from a shell command into a string.
# The exit status is ignored; a trailing newline is stripped.
# Assume the command will work with '{ ... ; } 2>&1' around it..
#
def getoutput(cmd):
"""Return output (stdout or stderr) of executing cmd in a shell."""
return getstatusoutput(cmd)[1]
# Ditto but preserving the exit status.
# Returns a pair (sts, output)
#
def getstatusoutput(cmd):
"""Return (status, output) of executing cmd in a shell."""
import os
pipe = os.popen('{ ' + cmd + '; } 2>&1', 'r')
text = pipe.read()
sts = pipe.close()
if sts is None: sts = 0
if text[-1:] == '\n': text = text[:-1]
return sts, text
# Make command argument from directory and pathname (prefix space, add quotes).
#
def mk2arg(head, x):
import os
return mkarg(os.path.join(head, x))
# Make a shell command argument from a string.
# Return a string beginning with a space followed by a shell-quoted
# version of the argument.
# Two strategies: enclose in single quotes if it contains none;
# otherwise, enclose in double quotes and prefix quotable characters
# with backslash.
#
def mkarg(x):
if '\'' not in x:
return ' \'' + x + '\''
s = ' "'
for c in x:
if c in '\\$"`':
s = s + '\\'
s = s + c
s = s + '"'
return s
| apache-2.0 |
atsolakid/edx-platform | lms/djangoapps/verify_student/migrations/0001_initial.py | 114 | 7568 | # -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding model 'SoftwareSecurePhotoVerification'
db.create_table('verify_student_softwaresecurephotoverification', (
('id', self.gf('django.db.models.fields.AutoField')(primary_key=True)),
('status', self.gf('model_utils.fields.StatusField')(default='created', max_length=100, no_check_for_status=True)),
('status_changed', self.gf('model_utils.fields.MonitorField')(default=datetime.datetime.now, monitor=u'status')),
('user', self.gf('django.db.models.fields.related.ForeignKey')(to=orm['auth.User'])),
('name', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('face_image_url', self.gf('django.db.models.fields.URLField')(max_length=255, blank=True)),
('photo_id_image_url', self.gf('django.db.models.fields.URLField')(max_length=255, blank=True)),
('receipt_id', self.gf('django.db.models.fields.CharField')(max_length=255, db_index=True)),
('created_at', self.gf('django.db.models.fields.DateTimeField')(auto_now_add=True, db_index=True, blank=True)),
('updated_at', self.gf('django.db.models.fields.DateTimeField')(auto_now=True, db_index=True, blank=True)),
('submitted_at', self.gf('django.db.models.fields.DateTimeField')(null=True, db_index=True)),
('reviewing_user', self.gf('django.db.models.fields.related.ForeignKey')(default=None, related_name='photo_verifications_reviewed', null=True, to=orm['auth.User'])),
('reviewing_service', self.gf('django.db.models.fields.CharField')(max_length=255, blank=True)),
('error_msg', self.gf('django.db.models.fields.TextField')(blank=True)),
('error_code', self.gf('django.db.models.fields.CharField')(max_length=50, blank=True)),
('photo_id_key', self.gf('django.db.models.fields.TextField')(max_length=1024)),
))
db.send_create_signal('verify_student', ['SoftwareSecurePhotoVerification'])
def backwards(self, orm):
# Deleting model 'SoftwareSecurePhotoVerification'
db.delete_table('verify_student_softwaresecurephotoverification')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'verify_student.softwaresecurephotoverification': {
'Meta': {'ordering': "['-created_at']", 'object_name': 'SoftwareSecurePhotoVerification'},
'created_at': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'db_index': 'True', 'blank': 'True'}),
'error_code': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'error_msg': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'face_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_image_url': ('django.db.models.fields.URLField', [], {'max_length': '255', 'blank': 'True'}),
'photo_id_key': ('django.db.models.fields.TextField', [], {'max_length': '1024'}),
'receipt_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'db_index': 'True'}),
'reviewing_service': ('django.db.models.fields.CharField', [], {'max_length': '255', 'blank': 'True'}),
'reviewing_user': ('django.db.models.fields.related.ForeignKey', [], {'default': 'None', 'related_name': "'photo_verifications_reviewed'", 'null': 'True', 'to': "orm['auth.User']"}),
'status': ('model_utils.fields.StatusField', [], {'default': "'created'", 'max_length': '100', u'no_check_for_status': 'True'}),
'status_changed': ('model_utils.fields.MonitorField', [], {'default': 'datetime.datetime.now', u'monitor': "u'status'"}),
'submitted_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'db_index': 'True'}),
'updated_at': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'db_index': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['verify_student']
| agpl-3.0 |
hatwar/Das_erpnext | erpnext/utilities/doctype/rename_tool/rename_tool.py | 46 | 1709 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
from frappe.model.document import Document
class RenameTool(Document):
pass
@frappe.whitelist()
def get_doctypes():
return frappe.db.sql_list("""select name from tabDocType
where ifnull(allow_rename,0)=1 and module!='Core' order by name""")
@frappe.whitelist()
def upload(select_doctype=None, rows=None):
from frappe.utils.csvutils import read_csv_content_from_attached_file
from frappe.model.rename_doc import rename_doc
if not select_doctype:
select_doctype = frappe.form_dict.select_doctype
if not frappe.has_permission(select_doctype, "write"):
raise frappe.PermissionError
if not rows:
rows = read_csv_content_from_attached_file(frappe.get_doc("Rename Tool", "Rename Tool"))
if not rows:
frappe.throw(_("Please select a valid csv file with data"))
max_rows = 500
if len(rows) > max_rows:
frappe.throw(_("Maximum {0} rows allowed").format(max_rows))
rename_log = []
for row in rows:
# if row has some content
if len(row) > 1 and row[0] and row[1]:
try:
if rename_doc(select_doctype, row[0], row[1]):
rename_log.append(_("Successful: ") + row[0] + " -> " + row[1])
frappe.db.commit()
else:
rename_log.append(_("Ignored: ") + row[0] + " -> " + row[1])
except Exception, e:
rename_log.append("<span style='color: RED'>" + \
_("Failed: ") + row[0] + " -> " + row[1] + "</span>")
rename_log.append("<span style='margin-left: 20px;'>" + repr(e) + "</span>")
return rename_log
| agpl-3.0 |
DebrahR/memory | server/lib/werkzeug/contrib/cache.py | 306 | 23519 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.cache
~~~~~~~~~~~~~~~~~~~~~~
The main problem with dynamic Web sites is, well, they're dynamic. Each
time a user requests a page, the webserver executes a lot of code, queries
the database, renders templates until the visitor gets the page he sees.
This is a lot more expensive than just loading a file from the file system
and sending it to the visitor.
For most Web applications, this overhead isn't a big deal but once it
becomes, you will be glad to have a cache system in place.
How Caching Works
=================
Caching is pretty simple. Basically you have a cache object lurking around
somewhere that is connected to a remote cache or the file system or
something else. When the request comes in you check if the current page
is already in the cache and if so, you're returning it from the cache.
Otherwise you generate the page and put it into the cache. (Or a fragment
of the page, you don't have to cache the full thing)
Here is a simple example of how to cache a sidebar for a template::
def get_sidebar(user):
identifier = 'sidebar_for/user%d' % user.id
value = cache.get(identifier)
if value is not None:
return value
value = generate_sidebar_for(user=user)
cache.set(identifier, value, timeout=60 * 5)
return value
Creating a Cache Object
=======================
To create a cache object you just import the cache system of your choice
from the cache module and instantiate it. Then you can start working
with that object:
>>> from werkzeug.contrib.cache import SimpleCache
>>> c = SimpleCache()
>>> c.set("foo", "value")
>>> c.get("foo")
'value'
>>> c.get("missing") is None
True
Please keep in mind that you have to create the cache and put it somewhere
you have access to it (either as a module global you can import or you just
put it into your WSGI application).
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import os
import re
import tempfile
from hashlib import md5
from time import time
try:
import cPickle as pickle
except ImportError:
import pickle
from werkzeug._compat import iteritems, string_types, text_type, \
integer_types, to_bytes
from werkzeug.posixemulation import rename
def _items(mappingorseq):
"""Wrapper for efficient iteration over mappings represented by dicts
or sequences::
>>> for k, v in _items((i, i*i) for i in xrange(5)):
... assert k*k == v
>>> for k, v in _items(dict((i, i*i) for i in xrange(5))):
... assert k*k == v
"""
if hasattr(mappingorseq, "iteritems"):
return mappingorseq.iteritems()
elif hasattr(mappingorseq, "items"):
return mappingorseq.items()
return mappingorseq
class BaseCache(object):
"""Baseclass for the cache systems. All the cache systems implement this
API or a superset of it.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`set`.
"""
def __init__(self, default_timeout=300):
self.default_timeout = default_timeout
def get(self, key):
"""Looks up key in the cache and returns the value for it.
If the key does not exist `None` is returned instead.
:param key: the key to be looked up.
"""
return None
def delete(self, key):
"""Deletes `key` from the cache. If it does not exist in the cache
nothing happens.
:param key: the key to delete.
"""
pass
def get_many(self, *keys):
"""Returns a list of values for the given keys.
For each key a item in the list is created. Example::
foo, bar = cache.get_many("foo", "bar")
If a key can't be looked up `None` is returned for that key
instead.
:param keys: The function accepts multiple keys as positional
arguments.
"""
return map(self.get, keys)
def get_dict(self, *keys):
"""Works like :meth:`get_many` but returns a dict::
d = cache.get_dict("foo", "bar")
foo = d["foo"]
bar = d["bar"]
:param keys: The function accepts multiple keys as positional
arguments.
"""
return dict(zip(keys, self.get_many(*keys)))
def set(self, key, value, timeout=None):
"""Adds a new key/value to the cache (overwrites value, if key already
exists in the cache).
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key (if not specified,
it uses the default timeout).
"""
pass
def add(self, key, value, timeout=None):
"""Works like :meth:`set` but does not overwrite the values of already
existing keys.
:param key: the key to set
:param value: the value for the key
:param timeout: the cache timeout for the key or the default
timeout if not specified.
"""
pass
def set_many(self, mapping, timeout=None):
"""Sets multiple keys and values from a mapping.
:param mapping: a mapping with the keys/values to set.
:param timeout: the cache timeout for the key (if not specified,
it uses the default timeout).
"""
for key, value in _items(mapping):
self.set(key, value, timeout)
def delete_many(self, *keys):
"""Deletes multiple keys at once.
:param keys: The function accepts multiple keys as positional
arguments.
"""
for key in keys:
self.delete(key)
def clear(self):
"""Clears the cache. Keep in mind that not all caches support
completely clearing the cache.
"""
pass
def inc(self, key, delta=1):
"""Increments the value of a key by `delta`. If the key does
not yet exist it is initialized with `delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to add.
"""
self.set(key, (self.get(key) or 0) + delta)
def dec(self, key, delta=1):
"""Decrements the value of a key by `delta`. If the key does
not yet exist it is initialized with `-delta`.
For supporting caches this is an atomic operation.
:param key: the key to increment.
:param delta: the delta to subtract.
"""
self.set(key, (self.get(key) or 0) - delta)
class NullCache(BaseCache):
"""A cache that doesn't cache. This can be useful for unit testing.
:param default_timeout: a dummy parameter that is ignored but exists
for API compatibility with other caches.
"""
class SimpleCache(BaseCache):
"""Simple memory cache for single process environments. This class exists
mainly for the development server and is not 100% thread safe. It tries
to use as many atomic operations as possible and no locks for simplicity
but it could happen under heavy load that keys are added multiple times.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
"""
def __init__(self, threshold=500, default_timeout=300):
BaseCache.__init__(self, default_timeout)
self._cache = {}
self.clear = self._cache.clear
self._threshold = threshold
def _prune(self):
if len(self._cache) > self._threshold:
now = time()
for idx, (key, (expires, _)) in enumerate(self._cache.items()):
if expires <= now or idx % 3 == 0:
self._cache.pop(key, None)
def get(self, key):
expires, value = self._cache.get(key, (0, None))
if expires > time():
return pickle.loads(value)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
self._prune()
self._cache[key] = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if len(self._cache) > self._threshold:
self._prune()
item = (time() + timeout, pickle.dumps(value,
pickle.HIGHEST_PROTOCOL))
self._cache.setdefault(key, item)
def delete(self, key):
self._cache.pop(key, None)
_test_memcached_key = re.compile(br'[^\x00-\x21\xff]{1,250}$').match
class MemcachedCache(BaseCache):
"""A cache that uses memcached as backend.
The first argument can either be an object that resembles the API of a
:class:`memcache.Client` or a tuple/list of server addresses. In the
event that a tuple/list is passed, Werkzeug tries to import the best
available memcache library.
Implementation notes: This cache backend works around some limitations in
memcached to simplify the interface. For example unicode keys are encoded
to utf-8 on the fly. Methods such as :meth:`~BaseCache.get_dict` return
the keys in the same format as passed. Furthermore all get methods
silently ignore key errors to not cause problems when untrusted user data
is passed to the get methods which is often the case in web applications.
:param servers: a list or tuple of server addresses or alternatively
a :class:`memcache.Client` or a compatible client.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param key_prefix: a prefix that is added before all keys. This makes it
possible to use the same memcached server for different
applications. Keep in mind that
:meth:`~BaseCache.clear` will also clear keys with a
different prefix.
"""
def __init__(self, servers=None, default_timeout=300, key_prefix=None):
BaseCache.__init__(self, default_timeout)
if servers is None or isinstance(servers, (list, tuple)):
if servers is None:
servers = ['127.0.0.1:11211']
self._client = self.import_preferred_memcache_lib(servers)
if self._client is None:
raise RuntimeError('no memcache module found')
else:
# NOTE: servers is actually an already initialized memcache
# client.
self._client = servers
self.key_prefix = to_bytes(key_prefix)
def get(self, key):
if isinstance(key, text_type):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
# memcached doesn't support keys longer than that. Because often
# checks for so long keys can occour because it's tested from user
# submitted data etc we fail silently for getting.
if _test_memcached_key(key):
return self._client.get(key)
def get_dict(self, *keys):
key_mapping = {}
have_encoded_keys = False
for key in keys:
if isinstance(key, unicode):
encoded_key = key.encode('utf-8')
have_encoded_keys = True
else:
encoded_key = key
if self.key_prefix:
encoded_key = self.key_prefix + encoded_key
if _test_memcached_key(key):
key_mapping[encoded_key] = key
d = rv = self._client.get_multi(key_mapping.keys())
if have_encoded_keys or self.key_prefix:
rv = {}
for key, value in iteritems(d):
rv[key_mapping[key]] = value
if len(rv) < len(keys):
for key in keys:
if key not in rv:
rv[key] = None
return rv
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if isinstance(key, text_type):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.add(key, value, timeout)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
if isinstance(key, text_type):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.set(key, value, timeout)
def get_many(self, *keys):
d = self.get_dict(*keys)
return [d[key] for key in keys]
def set_many(self, mapping, timeout=None):
if timeout is None:
timeout = self.default_timeout
new_mapping = {}
for key, value in _items(mapping):
if isinstance(key, text_type):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
new_mapping[key] = value
self._client.set_multi(new_mapping, timeout)
def delete(self, key):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
if _test_memcached_key(key):
self._client.delete(key)
def delete_many(self, *keys):
new_keys = []
for key in keys:
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
if _test_memcached_key(key):
new_keys.append(key)
self._client.delete_multi(new_keys)
def clear(self):
self._client.flush_all()
def inc(self, key, delta=1):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.incr(key, delta)
def dec(self, key, delta=1):
if isinstance(key, unicode):
key = key.encode('utf-8')
if self.key_prefix:
key = self.key_prefix + key
self._client.decr(key, delta)
def import_preferred_memcache_lib(self, servers):
"""Returns an initialized memcache client. Used by the constructor."""
try:
import pylibmc
except ImportError:
pass
else:
return pylibmc.Client(servers)
try:
from google.appengine.api import memcache
except ImportError:
pass
else:
return memcache.Client()
try:
import memcache
except ImportError:
pass
else:
return memcache.Client(servers)
# backwards compatibility
GAEMemcachedCache = MemcachedCache
class RedisCache(BaseCache):
"""Uses the Redis key-value store as a cache backend.
The first argument can be either a string denoting address of the Redis
server or an object resembling an instance of a redis.Redis class.
Note: Python Redis API already takes care of encoding unicode strings on
the fly.
.. versionadded:: 0.7
.. versionadded:: 0.8
`key_prefix` was added.
.. versionchanged:: 0.8
This cache backend now properly serializes objects.
.. versionchanged:: 0.8.3
This cache backend now supports password authentication.
:param host: address of the Redis server or an object which API is
compatible with the official Python Redis client (redis-py).
:param port: port number on which Redis server listens for connections.
:param password: password authentication for the Redis server.
:param db: db (zero-based numeric index) on Redis Server to connect.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param key_prefix: A prefix that should be added to all keys.
"""
def __init__(self, host='localhost', port=6379, password=None,
db=0, default_timeout=300, key_prefix=None):
BaseCache.__init__(self, default_timeout)
if isinstance(host, string_types):
try:
import redis
except ImportError:
raise RuntimeError('no redis module found')
self._client = redis.Redis(host=host, port=port, password=password, db=db)
else:
self._client = host
self.key_prefix = key_prefix or ''
def dump_object(self, value):
"""Dumps an object into a string for redis. By default it serializes
integers as regular string and pickle dumps everything else.
"""
t = type(value)
if t in integer_types:
return str(value).encode('ascii')
return b'!' + pickle.dumps(value)
def load_object(self, value):
"""The reversal of :meth:`dump_object`. This might be callde with
None.
"""
if value is None:
return None
if value.startswith(b'!'):
return pickle.loads(value[1:])
try:
return int(value)
except ValueError:
# before 0.8 we did not have serialization. Still support that.
return value
def get(self, key):
return self.load_object(self._client.get(self.key_prefix + key))
def get_many(self, *keys):
if self.key_prefix:
keys = [self.key_prefix + key for key in keys]
return [self.load_object(x) for x in self._client.mget(keys)]
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
dump = self.dump_object(value)
self._client.setex(self.key_prefix + key, dump, timeout)
def add(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
dump = self.dump_object(value)
added = self._client.setnx(self.key_prefix + key, dump)
if added:
self._client.expire(self.key_prefix + key, timeout)
def set_many(self, mapping, timeout=None):
if timeout is None:
timeout = self.default_timeout
pipe = self._client.pipeline()
for key, value in _items(mapping):
dump = self.dump_object(value)
pipe.setex(self.key_prefix + key, dump, timeout)
pipe.execute()
def delete(self, key):
self._client.delete(self.key_prefix + key)
def delete_many(self, *keys):
if not keys:
return
if self.key_prefix:
keys = [self.key_prefix + key for key in keys]
self._client.delete(*keys)
def clear(self):
if self.key_prefix:
keys = self._client.keys(self.key_prefix + '*')
if keys:
self._client.delete(*keys)
else:
self._client.flushdb()
def inc(self, key, delta=1):
return self._client.incr(self.key_prefix + key, delta)
def dec(self, key, delta=1):
return self._client.decr(self.key_prefix + key, delta)
class FileSystemCache(BaseCache):
"""A cache that stores the items on the file system. This cache depends
on being the only user of the `cache_dir`. Make absolutely sure that
nobody but this cache stores files there or otherwise the cache will
randomly delete files therein.
:param cache_dir: the directory where cache files are stored.
:param threshold: the maximum number of items the cache stores before
it starts deleting some.
:param default_timeout: the default timeout that is used if no timeout is
specified on :meth:`~BaseCache.set`.
:param mode: the file mode wanted for the cache files, default 0600
"""
#: used for temporary files by the FileSystemCache
_fs_transaction_suffix = '.__wz_cache'
def __init__(self, cache_dir, threshold=500, default_timeout=300, mode=0o600):
BaseCache.__init__(self, default_timeout)
self._path = cache_dir
self._threshold = threshold
self._mode = mode
if not os.path.exists(self._path):
os.makedirs(self._path)
def _list_dir(self):
"""return a list of (fully qualified) cache filenames
"""
return [os.path.join(self._path, fn) for fn in os.listdir(self._path)
if not fn.endswith(self._fs_transaction_suffix)]
def _prune(self):
entries = self._list_dir()
if len(entries) > self._threshold:
now = time()
for idx, fname in enumerate(entries):
remove = False
f = None
try:
try:
f = open(fname, 'rb')
expires = pickle.load(f)
remove = expires <= now or idx % 3 == 0
finally:
if f is not None:
f.close()
except Exception:
pass
if remove:
try:
os.remove(fname)
except (IOError, OSError):
pass
def clear(self):
for fname in self._list_dir():
try:
os.remove(fname)
except (IOError, OSError):
pass
def _get_filename(self, key):
if isinstance(key, text_type):
key = key.encode('utf-8') #XXX unicode review
hash = md5(key).hexdigest()
return os.path.join(self._path, hash)
def get(self, key):
filename = self._get_filename(key)
try:
f = open(filename, 'rb')
try:
if pickle.load(f) >= time():
return pickle.load(f)
finally:
f.close()
os.remove(filename)
except Exception:
return None
def add(self, key, value, timeout=None):
filename = self._get_filename(key)
if not os.path.exists(filename):
self.set(key, value, timeout)
def set(self, key, value, timeout=None):
if timeout is None:
timeout = self.default_timeout
filename = self._get_filename(key)
self._prune()
try:
fd, tmp = tempfile.mkstemp(suffix=self._fs_transaction_suffix,
dir=self._path)
f = os.fdopen(fd, 'wb')
try:
pickle.dump(int(time() + timeout), f, 1)
pickle.dump(value, f, pickle.HIGHEST_PROTOCOL)
finally:
f.close()
rename(tmp, filename)
os.chmod(filename, self._mode)
except (IOError, OSError):
pass
def delete(self, key):
try:
os.remove(self._get_filename(key))
except (IOError, OSError):
pass
| apache-2.0 |
obestwalter/mau-mau | setup.py | 1 | 1341 | import itertools
from setuptools import setup, find_packages
def generate_extras_require():
extras = {
':sys_platform == "win32"': ["win_unicode_console"],
"lint": ["flake8", "black"],
"test": ["pytest"],
"docs": ["mkdocs", "mkdocs-material"],
}
extras.update(dict(all=list(itertools.chain(*extras.values()))))
return extras
setup(
name="mau-mau",
author="Oliver Bestwalter",
url="https://github.com/obestwalter/mau-mau",
license="MIT",
use_scm_version=True,
python_requires=">=3.6",
setup_requires=["setuptools_scm"],
install_requires=["fire"],
extras_require=generate_extras_require(),
packages=find_packages(),
entry_points={
"console_scripts": [
"mau-mau = mau_mau.play:cli",
"mau-mau-stats = mau_mau.stats:cli",
]
},
classifiers=[
"Programming Language :: Python :: 3.6",
"Development Status :: 5 - Mature",
"Environment :: Console",
"Intended Audience :: Developers",
"Intended Audience :: End Users/Desktop",
"Operating System :: POSIX",
"Operating System :: Microsoft :: Windows",
"License :: OSI Approved :: MIT License",
"Topic :: Education",
"Topic :: Games/Entertainment :: Turn Based Strategy",
],
)
| mit |
redhat-openstack/ironic | ironic/tests/drivers/test_pxe.py | 2 | 44100 | # Copyright 2013 Hewlett-Packard Development Company, L.P.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Test class for PXE driver."""
import os
import shutil
import tempfile
import mock
from oslo_config import cfg
from oslo_serialization import jsonutils as json
from oslo_utils import fileutils
from ironic.common import boot_devices
from ironic.common import dhcp_factory
from ironic.common import exception
from ironic.common.glance_service import base_image_service
from ironic.common import pxe_utils
from ironic.common import states
from ironic.common import utils
from ironic.conductor import task_manager
from ironic.drivers.modules import deploy_utils
from ironic.drivers.modules import pxe
from ironic.tests.conductor import utils as mgr_utils
from ironic.tests.db import base as db_base
from ironic.tests.db import utils as db_utils
from ironic.tests.objects import utils as obj_utils
CONF = cfg.CONF
INST_INFO_DICT = db_utils.get_test_pxe_instance_info()
DRV_INFO_DICT = db_utils.get_test_pxe_driver_info()
DRV_INTERNAL_INFO_DICT = db_utils.get_test_pxe_driver_internal_info()
class PXEValidateParametersTestCase(db_base.DbTestCase):
def _test__parse_instance_info(
self, instance_info=INST_INFO_DICT,
driver_info=DRV_INFO_DICT,
driver_internal_info=DRV_INTERNAL_INFO_DICT):
# make sure we get back the expected things
node = obj_utils.create_test_node(
self.context,
driver='fake_pxe',
instance_info=instance_info,
driver_info=driver_info,
driver_internal_info=DRV_INTERNAL_INFO_DICT,
)
info = pxe._parse_instance_info(node)
self.assertIsNotNone(info.get('image_source'))
return info
def test__parse_instance_info_good(self):
self._test__parse_instance_info()
def test__parse_instance_info_good_non_glance_image(self):
instance_info = INST_INFO_DICT.copy()
instance_info['image_source'] = 'http://image'
instance_info['kernel'] = 'http://kernel'
instance_info['ramdisk'] = 'http://ramdisk'
info = self._test__parse_instance_info(instance_info=instance_info)
self.assertIsNotNone(info.get('ramdisk'))
self.assertIsNotNone(info.get('kernel'))
def test__parse_instance_info_non_glance_image_missing_kernel(self):
instance_info = INST_INFO_DICT.copy()
instance_info['image_source'] = 'http://image'
instance_info['ramdisk'] = 'http://ramdisk'
self.assertRaises(
exception.MissingParameterValue,
self._test__parse_instance_info,
instance_info=instance_info)
def test__parse_instance_info_non_glance_image_missing_ramdisk(self):
instance_info = INST_INFO_DICT.copy()
instance_info['image_source'] = 'http://image'
instance_info['kernel'] = 'http://kernel'
self.assertRaises(
exception.MissingParameterValue,
self._test__parse_instance_info,
instance_info=instance_info)
def test__parse_instance_info_missing_image_source(self):
instance_info = INST_INFO_DICT.copy()
del instance_info['image_source']
self.assertRaises(
exception.MissingParameterValue,
self._test__parse_instance_info,
instance_info=instance_info)
def test__parse_instance_info_whole_disk_image(self):
driver_internal_info = DRV_INTERNAL_INFO_DICT.copy()
driver_internal_info['is_whole_disk_image'] = True
self._test__parse_instance_info(
driver_internal_info=driver_internal_info)
class PXEPrivateMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(PXEPrivateMethodsTestCase, self).setUp()
n = {
'driver': 'fake_pxe',
'instance_info': INST_INFO_DICT,
'driver_info': DRV_INFO_DICT,
'driver_internal_info': DRV_INTERNAL_INFO_DICT,
}
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
self.node = obj_utils.create_test_node(self.context, **n)
def _test_get_pxe_conf_option(self, driver, expected_value):
mgr_utils.mock_the_extension_manager(driver=driver)
self.node.driver = driver
self.node.save()
with task_manager.acquire(self.context, self.node.uuid) as task:
returned_value = pxe._get_pxe_conf_option(
task, 'pxe_config_template')
self.assertEqual(expected_value, returned_value)
def test_get_pxe_conf_option_iscsi_deploy(self):
self.config(group='pxe', pxe_config_template='my-pxe-config-template')
self._test_get_pxe_conf_option('fake_pxe',
'my-pxe-config-template')
def test_get_pxe_conf_option_agent_deploy_default(self):
self.config(group='pxe', pxe_config_template='my-pxe-config-template')
self._test_get_pxe_conf_option('fake_agent',
'my-pxe-config-template')
def test_get_pxe_conf_option_agent_deploy_not_default(self):
self.config(group='agent',
agent_pxe_config_template='my-agent-config-template')
self.config(group='pxe', pxe_config_template='my-pxe-config-template')
self._test_get_pxe_conf_option('fake_agent',
'my-agent-config-template')
def test__parse_driver_info_missing_deploy_kernel(self):
del self.node.driver_info['deploy_kernel']
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info, self.node)
def test__parse_driver_info_missing_deploy_ramdisk(self):
del self.node.driver_info['deploy_ramdisk']
self.assertRaises(exception.MissingParameterValue,
pxe._parse_driver_info, self.node)
def test__parse_driver_info(self):
expected_info = {'deploy_ramdisk': 'glance://deploy_ramdisk_uuid',
'deploy_kernel': 'glance://deploy_kernel_uuid'}
image_info = pxe._parse_driver_info(self.node)
self.assertEqual(expected_info, image_info)
def test__get_deploy_image_info(self):
expected_info = {'deploy_ramdisk':
(DRV_INFO_DICT['deploy_ramdisk'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_ramdisk')),
'deploy_kernel':
(DRV_INFO_DICT['deploy_kernel'],
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'deploy_kernel'))}
image_info = pxe._get_deploy_image_info(self.node)
self.assertEqual(expected_info, image_info)
def test__get_deploy_image_info_missing_deploy_kernel(self):
del self.node.driver_info['deploy_kernel']
self.assertRaises(exception.MissingParameterValue,
pxe._get_deploy_image_info, self.node)
def test__get_deploy_image_info_deploy_ramdisk(self):
del self.node.driver_info['deploy_ramdisk']
self.assertRaises(exception.MissingParameterValue,
pxe._get_deploy_image_info, self.node)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def _test__get_instance_image_info(self, show_mock):
properties = {'properties': {u'kernel_id': u'instance_kernel_uuid',
u'ramdisk_id': u'instance_ramdisk_uuid'}}
expected_info = {'ramdisk':
('instance_ramdisk_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'ramdisk')),
'kernel':
('instance_kernel_uuid',
os.path.join(CONF.pxe.tftp_root,
self.node.uuid,
'kernel'))}
show_mock.return_value = properties
self.context.auth_token = 'fake'
image_info = pxe._get_instance_image_info(self.node, self.context)
show_mock.assert_called_once_with(mock.ANY, 'glance://image_uuid',
method='get')
self.assertEqual(expected_info, image_info)
# test with saved info
show_mock.reset_mock()
image_info = pxe._get_instance_image_info(self.node, self.context)
self.assertEqual(expected_info, image_info)
self.assertFalse(show_mock.called)
self.assertEqual('instance_kernel_uuid',
self.node.instance_info.get('kernel'))
self.assertEqual('instance_ramdisk_uuid',
self.node.instance_info.get('ramdisk'))
def test__get_instance_image_info(self):
# Tests when 'is_whole_disk_image' exists in driver_internal_info
self._test__get_instance_image_info()
def test__get_instance_image_info_without_is_whole_disk_image(self):
# Tests when 'is_whole_disk_image' doesn't exists in
# driver_internal_info
del self.node.driver_internal_info['is_whole_disk_image']
self.node.save()
self._test__get_instance_image_info()
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test__get_instance_image_info_whole_disk_image(self, show_mock):
properties = {'properties': None}
show_mock.return_value = properties
self.node.driver_internal_info['is_whole_disk_image'] = True
image_info = pxe._get_instance_image_info(self.node, self.context)
self.assertEqual({}, image_info)
@mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
def _test_build_pxe_config_options(self, build_pxe_mock,
whle_dsk_img=False,
ipxe_enabled=False):
self.config(pxe_append_params='test_param', group='pxe')
# NOTE: right '/' should be removed from url string
self.config(api_url='http://192.168.122.184:6385', group='conductor')
self.config(disk_devices='sda', group='pxe')
driver_internal_info = self.node.driver_internal_info
driver_internal_info['is_whole_disk_image'] = whle_dsk_img
self.node.driver_internal_info = driver_internal_info
self.node.save()
tftp_server = CONF.pxe.tftp_server
if ipxe_enabled:
http_url = 'http://192.1.2.3:1234'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url=http_url, group='deploy')
deploy_kernel = os.path.join(http_url, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(http_url, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(http_url, self.node.uuid, 'kernel')
ramdisk = os.path.join(http_url, self.node.uuid, 'ramdisk')
root_dir = CONF.deploy.http_root
else:
deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_ramdisk')
kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'kernel')
ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'ramdisk')
root_dir = CONF.pxe.tftp_root
if whle_dsk_img:
ramdisk = 'no_ramdisk'
kernel = 'no_kernel'
expected_options = {
'ari_path': ramdisk,
'deployment_ari_path': deploy_ramdisk,
'pxe_append_params': 'test_param',
'aki_path': kernel,
'deployment_aki_path': deploy_kernel,
'tftp_server': tftp_server,
}
image_info = {'deploy_kernel': ('deploy_kernel',
os.path.join(root_dir,
self.node.uuid,
'deploy_kernel')),
'deploy_ramdisk': ('deploy_ramdisk',
os.path.join(root_dir,
self.node.uuid,
'deploy_ramdisk')),
'kernel': ('kernel_id',
os.path.join(root_dir,
self.node.uuid,
'kernel')),
'ramdisk': ('ramdisk_id',
os.path.join(root_dir,
self.node.uuid,
'ramdisk'))}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
options = pxe._build_pxe_config_options(task, image_info)
self.assertEqual(expected_options, options)
def test__build_pxe_config_options(self):
self._test_build_pxe_config_options(whle_dsk_img=True,
ipxe_enabled=False)
def test__build_pxe_config_options_ipxe(self):
self._test_build_pxe_config_options(whle_dsk_img=True,
ipxe_enabled=True)
def test__build_pxe_config_options_without_is_whole_disk_image(self):
del self.node.driver_internal_info['is_whole_disk_image']
self.node.save()
self._test_build_pxe_config_options(whle_dsk_img=False,
ipxe_enabled=False)
@mock.patch.object(pxe_utils, '_build_pxe_config', autospec=True)
def test__build_pxe_config_options_whole_disk_image(self,
build_pxe_mock,
ipxe_enabled=False):
self.config(pxe_append_params='test_param', group='pxe')
# NOTE: right '/' should be removed from url string
self.config(api_url='http://192.168.122.184:6385', group='conductor')
self.config(disk_devices='sda', group='pxe')
tftp_server = CONF.pxe.tftp_server
if ipxe_enabled:
http_url = 'http://192.1.2.3:1234'
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url=http_url, group='deploy')
deploy_kernel = os.path.join(http_url, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(http_url, self.node.uuid,
'deploy_ramdisk')
root_dir = CONF.deploy.http_root
else:
deploy_kernel = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_kernel')
deploy_ramdisk = os.path.join(CONF.pxe.tftp_root, self.node.uuid,
'deploy_ramdisk')
root_dir = CONF.pxe.tftp_root
expected_options = {
'deployment_ari_path': deploy_ramdisk,
'pxe_append_params': 'test_param',
'deployment_aki_path': deploy_kernel,
'tftp_server': tftp_server,
'aki_path': 'no_kernel',
'ari_path': 'no_ramdisk',
}
image_info = {'deploy_kernel': ('deploy_kernel',
os.path.join(root_dir,
self.node.uuid,
'deploy_kernel')),
'deploy_ramdisk': ('deploy_ramdisk',
os.path.join(root_dir,
self.node.uuid,
'deploy_ramdisk')),
}
driver_internal_info = self.node.driver_internal_info
driver_internal_info['is_whole_disk_image'] = True
self.node.driver_internal_info = driver_internal_info
self.node.save()
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
options = pxe._build_pxe_config_options(task, image_info)
self.assertEqual(expected_options, options)
def test__build_pxe_config_options_no_kernel_no_ramdisk(self):
del self.node.driver_internal_info['is_whole_disk_image']
self.node.save()
self.config(group='pxe', tftp_server='my-tftp-server')
self.config(group='pxe', pxe_append_params='my-pxe-append-params')
image_info = {
'deploy_kernel': ('deploy_kernel',
'path-to-deploy_kernel'),
'deploy_ramdisk': ('deploy_ramdisk',
'path-to-deploy_ramdisk')}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
options = pxe._build_pxe_config_options(task, image_info)
expected_options = {
'deployment_aki_path': 'path-to-deploy_kernel',
'deployment_ari_path': 'path-to-deploy_ramdisk',
'pxe_append_params': 'my-pxe-append-params',
'tftp_server': 'my-tftp-server',
'aki_path': 'no_kernel',
'ari_path': 'no_ramdisk'}
self.assertEqual(expected_options, options)
@mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
def test__cache_tftp_images_master_path(self, mock_fetch_image):
temp_dir = tempfile.mkdtemp()
self.config(tftp_root=temp_dir, group='pxe')
self.config(tftp_master_path=os.path.join(temp_dir,
'tftp_master_path'),
group='pxe')
image_path = os.path.join(temp_dir, self.node.uuid,
'deploy_kernel')
image_info = {'deploy_kernel': ('deploy_kernel', image_path)}
fileutils.ensure_tree(CONF.pxe.tftp_master_path)
pxe._cache_ramdisk_kernel(None, self.node, image_info)
mock_fetch_image.assert_called_once_with(None,
mock.ANY,
[('deploy_kernel',
image_path)],
True)
@mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
@mock.patch.object(fileutils, 'ensure_tree', autospec=True)
@mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
def test__cache_ramdisk_kernel(self, mock_fetch_image, mock_ensure_tree):
self.config(ipxe_enabled=False, group='pxe')
fake_pxe_info = {'foo': 'bar'}
expected_path = os.path.join(CONF.pxe.tftp_root, self.node.uuid)
pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
mock_ensure_tree.assert_called_with(expected_path)
mock_fetch_image.assert_called_once_with(
self.context, mock.ANY, list(fake_pxe_info.values()), True)
@mock.patch.object(pxe, 'TFTPImageCache', lambda: None)
@mock.patch.object(fileutils, 'ensure_tree', autospec=True)
@mock.patch.object(deploy_utils, 'fetch_images', autospec=True)
def test__cache_ramdisk_kernel_ipxe(self, mock_fetch_image,
mock_ensure_tree):
self.config(ipxe_enabled=True, group='pxe')
fake_pxe_info = {'foo': 'bar'}
expected_path = os.path.join(CONF.deploy.http_root,
self.node.uuid)
pxe._cache_ramdisk_kernel(self.context, self.node, fake_pxe_info)
mock_ensure_tree.assert_called_with(expected_path)
mock_fetch_image.assert_called_once_with(self.context, mock.ANY,
list(fake_pxe_info.values()),
True)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_option_for_uefi_exc(self, mock_log):
properties = {'capabilities': 'boot_mode:uefi'}
instance_info = {"boot_option": "netboot"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = True
self.assertRaises(exception.InvalidParameterValue,
pxe.validate_boot_option_for_uefi,
self.node)
self.assertTrue(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_option_for_uefi_noexc_one(self, mock_log):
properties = {'capabilities': 'boot_mode:uefi'}
instance_info = {"boot_option": "local"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = True
pxe.validate_boot_option_for_uefi(self.node)
self.assertFalse(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_option_for_uefi_noexc_two(self, mock_log):
properties = {'capabilities': 'boot_mode:bios'}
instance_info = {"boot_option": "local"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = True
pxe.validate_boot_option_for_uefi(self.node)
self.assertFalse(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_option_for_uefi_noexc_three(self, mock_log):
properties = {'capabilities': 'boot_mode:uefi'}
instance_info = {"boot_option": "local"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = False
pxe.validate_boot_option_for_uefi(self.node)
self.assertFalse(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_parameters_for_trusted_boot_one(self, mock_log):
properties = {'capabilities': 'boot_mode:uefi'}
instance_info = {"boot_option": "netboot"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = False
self.assertRaises(exception.InvalidParameterValue,
pxe.validate_boot_parameters_for_trusted_boot,
self.node)
self.assertTrue(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_parameters_for_trusted_boot_two(self, mock_log):
properties = {'capabilities': 'boot_mode:bios'}
instance_info = {"boot_option": "local"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = False
self.assertRaises(exception.InvalidParameterValue,
pxe.validate_boot_parameters_for_trusted_boot,
self.node)
self.assertTrue(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_parameters_for_trusted_boot_three(self, mock_log):
properties = {'capabilities': 'boot_mode:bios'}
instance_info = {"boot_option": "netboot"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = True
self.assertRaises(exception.InvalidParameterValue,
pxe.validate_boot_parameters_for_trusted_boot,
self.node)
self.assertTrue(mock_log.called)
@mock.patch.object(pxe.LOG, 'error', autospec=True)
def test_validate_boot_parameters_for_trusted_boot_pass(self, mock_log):
properties = {'capabilities': 'boot_mode:bios'}
instance_info = {"boot_option": "netboot"}
self.node.properties = properties
self.node.instance_info['capabilities'] = instance_info
self.node.driver_internal_info['is_whole_disk_image'] = False
pxe.validate_boot_parameters_for_trusted_boot(self.node)
self.assertFalse(mock_log.called)
@mock.patch.object(utils, 'unlink_without_raise', autospec=True)
@mock.patch.object(pxe_utils, 'clean_up_pxe_config', autospec=True)
@mock.patch.object(pxe, 'TFTPImageCache', autospec=True)
class CleanUpPxeEnvTestCase(db_base.DbTestCase):
def setUp(self):
super(CleanUpPxeEnvTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
instance_info = INST_INFO_DICT
instance_info['deploy_key'] = 'fake-56789'
self.node = obj_utils.create_test_node(
self.context, driver='fake_pxe',
instance_info=instance_info,
driver_info=DRV_INFO_DICT,
driver_internal_info=DRV_INTERNAL_INFO_DICT,
)
def test__clean_up_pxe_env(self, mock_cache, mock_pxe_clean,
mock_unlink):
image_info = {'label': ['', 'deploy_kernel']}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
pxe._clean_up_pxe_env(task, image_info)
mock_pxe_clean.assert_called_once_with(task)
mock_unlink.assert_any_call('deploy_kernel')
mock_cache.return_value.clean_up.assert_called_once_with()
class PXEBootTestCase(db_base.DbTestCase):
def setUp(self):
super(PXEBootTestCase, self).setUp()
self.context.auth_token = 'fake'
self.temp_dir = tempfile.mkdtemp()
self.config(tftp_root=self.temp_dir, group='pxe')
self.temp_dir = tempfile.mkdtemp()
self.config(images_path=self.temp_dir, group='pxe')
mgr_utils.mock_the_extension_manager(driver="fake_pxe")
instance_info = INST_INFO_DICT
instance_info['deploy_key'] = 'fake-56789'
self.node = obj_utils.create_test_node(
self.context,
driver='fake_pxe',
instance_info=instance_info,
driver_info=DRV_INFO_DICT,
driver_internal_info=DRV_INTERNAL_INFO_DICT)
self.port = obj_utils.create_test_port(self.context,
node_id=self.node.id)
self.config(group='conductor', api_url='http://127.0.0.1:1234/')
def test_get_properties(self):
expected = pxe.COMMON_PROPERTIES
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_good(self, mock_glance):
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.boot.validate(task)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_good_whole_disk_image(self, mock_glance):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.driver_internal_info['is_whole_disk_image'] = True
task.driver.boot.validate(task)
def test_validate_fail_missing_deploy_kernel(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
del task.node.driver_info['deploy_kernel']
self.assertRaises(exception.MissingParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_missing_deploy_ramdisk(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
del task.node.driver_info['deploy_ramdisk']
self.assertRaises(exception.MissingParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_missing_image_source(self):
info = dict(INST_INFO_DICT)
del info['image_source']
self.node.instance_info = json.dumps(info)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node['instance_info'] = json.dumps(info)
self.assertRaises(exception.MissingParameterValue,
task.driver.boot.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_fail_invalid_config_uefi_ipxe(self, mock_glance):
properties = {'capabilities': 'boot_mode:uefi,cap2:value2'}
mock_glance.return_value = {'properties': {'kernel_id': 'fake-kernel',
'ramdisk_id': 'fake-initr'}}
self.config(ipxe_enabled=True, group='pxe')
self.config(http_url='dummy_url', group='deploy')
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_invalid_config_uefi_whole_disk_image(self):
properties = {'capabilities': 'boot_mode:uefi,boot_option:netboot'}
instance_info = {"boot_option": "netboot"}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
task.node.instance_info['capabilities'] = instance_info
task.node.driver_internal_info['is_whole_disk_image'] = True
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_no_port(self):
new_node = obj_utils.create_test_node(
self.context,
uuid='aaaaaaaa-bbbb-cccc-dddd-eeeeeeeeeeee',
driver='fake_pxe', instance_info=INST_INFO_DICT,
driver_info=DRV_INFO_DICT)
with task_manager.acquire(self.context, new_node.uuid,
shared=True) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_trusted_boot_with_secure_boot(self):
instance_info = {"boot_option": "netboot",
"secure_boot": "true",
"trusted_boot": "true"}
properties = {'capabilities': 'trusted_boot:true'}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.instance_info['capabilities'] = instance_info
task.node.properties = properties
task.node.driver_internal_info['is_whole_disk_image'] = False
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
def test_validate_fail_invalid_trusted_boot_value(self):
properties = {'capabilities': 'trusted_boot:value'}
instance_info = {"trusted_boot": "value"}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.node.properties = properties
task.node.instance_info['capabilities'] = instance_info
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_fail_no_image_kernel_ramdisk_props(self, mock_glance):
mock_glance.return_value = {'properties': {}}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.MissingParameterValue,
task.driver.boot.validate,
task)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_fail_glance_image_doesnt_exists(self, mock_glance):
mock_glance.side_effect = iter([exception.ImageNotFound('not found')])
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
@mock.patch.object(base_image_service.BaseImageService, '_show',
autospec=True)
def test_validate_fail_glance_conn_problem(self, mock_glance):
exceptions = (exception.GlanceConnectionFailed('connection fail'),
exception.ImageNotAuthorized('not authorized'),
exception.Invalid('invalid'))
mock_glance.side_effect = iter(exceptions)
for exc in exceptions:
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.boot.validate, task)
@mock.patch.object(dhcp_factory, 'DHCPFactory')
@mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
@mock.patch.object(pxe, '_get_deploy_image_info', autospec=True)
@mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
@mock.patch.object(pxe, '_build_pxe_config_options', autospec=True)
@mock.patch.object(pxe_utils, 'create_pxe_config', autospec=True)
def _test_prepare_ramdisk(self, mock_pxe_config,
mock_build_pxe, mock_cache_r_k,
mock_deploy_img_info,
mock_instance_img_info,
dhcp_factory_mock, uefi=False,
cleaning=False):
mock_build_pxe.return_value = {}
mock_deploy_img_info.return_value = {'deploy_kernel': 'a'}
mock_instance_img_info.return_value = {'kernel': 'b'}
mock_pxe_config.return_value = None
mock_cache_r_k.return_value = None
provider_mock = mock.MagicMock()
dhcp_factory_mock.return_value = provider_mock
with task_manager.acquire(self.context, self.node.uuid) as task:
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
task.driver.boot.prepare_ramdisk(task, {'foo': 'bar'})
mock_deploy_img_info.assert_called_once_with(task.node)
provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
if cleaning is False:
mock_cache_r_k.assert_called_once_with(
self.context, task.node,
{'deploy_kernel': 'a', 'kernel': 'b'})
mock_instance_img_info.assert_called_once_with(task.node,
self.context)
else:
mock_cache_r_k.assert_called_once_with(
self.context, task.node,
{'deploy_kernel': 'a'})
if uefi:
mock_pxe_config.assert_called_once_with(
task, {'foo': 'bar'}, CONF.pxe.uefi_pxe_config_template)
else:
mock_pxe_config.assert_called_once_with(
task, {'foo': 'bar'}, CONF.pxe.pxe_config_template)
def test_prepare_ramdisk(self):
self.node.provision_state = states.DEPLOYING
self.node.save()
self._test_prepare_ramdisk()
def test_prepare_ramdisk_uefi(self):
self.node.provision_state = states.DEPLOYING
self.node.save()
properties = self.node.properties
properties['capabilities'] = 'boot_mode:uefi'
self.node.properties = properties
self.node.save()
self._test_prepare_ramdisk(uefi=True)
@mock.patch.object(shutil, 'copyfile', autospec=True)
def test_prepare_ramdisk_ipxe(self, copyfile_mock):
self.node.provision_state = states.DEPLOYING
self.node.save()
self.config(group='pxe', ipxe_enabled=True)
self.config(group='deploy', http_url='http://myserver')
self._test_prepare_ramdisk()
copyfile_mock.assert_called_once_with(
CONF.pxe.ipxe_boot_script,
os.path.join(
CONF.deploy.http_root,
os.path.basename(CONF.pxe.ipxe_boot_script)))
def test_prepare_ramdisk_cleaning(self):
self.node.provision_state = states.CLEANING
self.node.save()
self._test_prepare_ramdisk(cleaning=True)
@mock.patch.object(pxe, '_clean_up_pxe_env', autospec=True)
@mock.patch.object(pxe, '_get_deploy_image_info', autospec=True)
def test_clean_up_ramdisk(self, get_deploy_image_info_mock,
clean_up_pxe_env_mock):
with task_manager.acquire(self.context, self.node.uuid) as task:
image_info = {'deploy_kernel': ['', '/path/to/deploy_kernel'],
'deploy_ramdisk': ['', '/path/to/deploy_ramdisk']}
get_deploy_image_info_mock.return_value = image_info
task.driver.boot.clean_up_ramdisk(task)
clean_up_pxe_env_mock.assert_called_once_with(task, image_info)
get_deploy_image_info_mock.assert_called_once_with(task.node)
@mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
@mock.patch.object(deploy_utils, 'switch_pxe_config', autospec=True)
@mock.patch.object(dhcp_factory, 'DHCPFactory', autospec=True)
@mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
@mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
def test_prepare_instance_netboot(
self, get_image_info_mock, cache_mock,
dhcp_factory_mock, switch_pxe_config_mock,
set_boot_device_mock):
provider_mock = mock.MagicMock()
dhcp_factory_mock.return_value = provider_mock
image_info = {'kernel': ('', '/path/to/kernel'),
'ramdisk': ('', '/path/to/ramdisk')}
get_image_info_mock.return_value = image_info
with task_manager.acquire(self.context, self.node.uuid) as task:
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
pxe_config_path = pxe_utils.get_pxe_config_file_path(
task.node.uuid)
task.node.properties['capabilities'] = 'boot_mode:bios'
task.node.driver_internal_info['root_uuid_or_disk_id'] = (
"30212642-09d3-467f-8e09-21685826ab50")
task.node.driver_internal_info['is_whole_disk_image'] = False
task.driver.boot.prepare_instance(task)
get_image_info_mock.assert_called_once_with(
task.node, task.context)
cache_mock.assert_called_once_with(
task.context, task.node, image_info)
provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
switch_pxe_config_mock.assert_called_once_with(
pxe_config_path, "30212642-09d3-467f-8e09-21685826ab50",
'bios', False, False)
set_boot_device_mock.assert_called_once_with(task,
boot_devices.PXE)
@mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
@mock.patch.object(deploy_utils, 'switch_pxe_config', autospec=True)
@mock.patch.object(dhcp_factory, 'DHCPFactory')
@mock.patch.object(pxe, '_cache_ramdisk_kernel', autospec=True)
@mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
def test_prepare_instance_netboot_missing_root_uuid(
self, get_image_info_mock, cache_mock,
dhcp_factory_mock, switch_pxe_config_mock,
set_boot_device_mock):
provider_mock = mock.MagicMock()
dhcp_factory_mock.return_value = provider_mock
image_info = {'kernel': ('', '/path/to/kernel'),
'ramdisk': ('', '/path/to/ramdisk')}
get_image_info_mock.return_value = image_info
with task_manager.acquire(self.context, self.node.uuid) as task:
dhcp_opts = pxe_utils.dhcp_options_for_instance(task)
task.node.properties['capabilities'] = 'boot_mode:bios'
task.node.driver_internal_info['is_whole_disk_image'] = False
task.driver.boot.prepare_instance(task)
get_image_info_mock.assert_called_once_with(
task.node, task.context)
cache_mock.assert_called_once_with(
task.context, task.node, image_info)
provider_mock.update_dhcp.assert_called_once_with(task, dhcp_opts)
self.assertFalse(switch_pxe_config_mock.called)
self.assertFalse(set_boot_device_mock.called)
@mock.patch.object(deploy_utils, 'try_set_boot_device', autospec=True)
@mock.patch.object(pxe_utils, 'clean_up_pxe_config', autospec=True)
def test_prepare_instance_localboot(self, clean_up_pxe_config_mock,
set_boot_device_mock):
with task_manager.acquire(self.context, self.node.uuid) as task:
task.node.instance_info['capabilities'] = {'boot_option': 'local'}
task.driver.boot.prepare_instance(task)
clean_up_pxe_config_mock.assert_called_once_with(task)
set_boot_device_mock.assert_called_once_with(task,
boot_devices.DISK)
@mock.patch.object(pxe, '_clean_up_pxe_env', autospec=True)
@mock.patch.object(pxe, '_get_instance_image_info', autospec=True)
def test_clean_up_instance(self, get_image_info_mock,
clean_up_pxe_env_mock):
with task_manager.acquire(self.context, self.node.uuid) as task:
image_info = {'kernel': ['', '/path/to/kernel'],
'ramdisk': ['', '/path/to/ramdisk']}
get_image_info_mock.return_value = image_info
task.driver.boot.clean_up_instance(task)
clean_up_pxe_env_mock.assert_called_once_with(task, image_info)
get_image_info_mock.assert_called_once_with(
task.node, task.context)
| apache-2.0 |
wanghao-xznu/linux-2.6-imx | tools/perf/scripts/python/netdev-times.py | 11271 | 15048 | # Display a process of packets and processed time.
# It helps us to investigate networking or network device.
#
# options
# tx: show only tx chart
# rx: show only rx chart
# dev=: show only thing related to specified device
# debug: work with debug mode. It shows buffer status.
import os
import sys
sys.path.append(os.environ['PERF_EXEC_PATH'] + \
'/scripts/python/Perf-Trace-Util/lib/Perf/Trace')
from perf_trace_context import *
from Core import *
from Util import *
all_event_list = []; # insert all tracepoint event related with this script
irq_dic = {}; # key is cpu and value is a list which stacks irqs
# which raise NET_RX softirq
net_rx_dic = {}; # key is cpu and value include time of NET_RX softirq-entry
# and a list which stacks receive
receive_hunk_list = []; # a list which include a sequence of receive events
rx_skb_list = []; # received packet list for matching
# skb_copy_datagram_iovec
buffer_budget = 65536; # the budget of rx_skb_list, tx_queue_list and
# tx_xmit_list
of_count_rx_skb_list = 0; # overflow count
tx_queue_list = []; # list of packets which pass through dev_queue_xmit
of_count_tx_queue_list = 0; # overflow count
tx_xmit_list = []; # list of packets which pass through dev_hard_start_xmit
of_count_tx_xmit_list = 0; # overflow count
tx_free_list = []; # list of packets which is freed
# options
show_tx = 0;
show_rx = 0;
dev = 0; # store a name of device specified by option "dev="
debug = 0;
# indices of event_info tuple
EINFO_IDX_NAME= 0
EINFO_IDX_CONTEXT=1
EINFO_IDX_CPU= 2
EINFO_IDX_TIME= 3
EINFO_IDX_PID= 4
EINFO_IDX_COMM= 5
# Calculate a time interval(msec) from src(nsec) to dst(nsec)
def diff_msec(src, dst):
return (dst - src) / 1000000.0
# Display a process of transmitting a packet
def print_transmit(hunk):
if dev != 0 and hunk['dev'].find(dev) < 0:
return
print "%7s %5d %6d.%06dsec %12.3fmsec %12.3fmsec" % \
(hunk['dev'], hunk['len'],
nsecs_secs(hunk['queue_t']),
nsecs_nsecs(hunk['queue_t'])/1000,
diff_msec(hunk['queue_t'], hunk['xmit_t']),
diff_msec(hunk['xmit_t'], hunk['free_t']))
# Format for displaying rx packet processing
PF_IRQ_ENTRY= " irq_entry(+%.3fmsec irq=%d:%s)"
PF_SOFT_ENTRY=" softirq_entry(+%.3fmsec)"
PF_NAPI_POLL= " napi_poll_exit(+%.3fmsec %s)"
PF_JOINT= " |"
PF_WJOINT= " | |"
PF_NET_RECV= " |---netif_receive_skb(+%.3fmsec skb=%x len=%d)"
PF_NET_RX= " |---netif_rx(+%.3fmsec skb=%x)"
PF_CPY_DGRAM= " | skb_copy_datagram_iovec(+%.3fmsec %d:%s)"
PF_KFREE_SKB= " | kfree_skb(+%.3fmsec location=%x)"
PF_CONS_SKB= " | consume_skb(+%.3fmsec)"
# Display a process of received packets and interrputs associated with
# a NET_RX softirq
def print_receive(hunk):
show_hunk = 0
irq_list = hunk['irq_list']
cpu = irq_list[0]['cpu']
base_t = irq_list[0]['irq_ent_t']
# check if this hunk should be showed
if dev != 0:
for i in range(len(irq_list)):
if irq_list[i]['name'].find(dev) >= 0:
show_hunk = 1
break
else:
show_hunk = 1
if show_hunk == 0:
return
print "%d.%06dsec cpu=%d" % \
(nsecs_secs(base_t), nsecs_nsecs(base_t)/1000, cpu)
for i in range(len(irq_list)):
print PF_IRQ_ENTRY % \
(diff_msec(base_t, irq_list[i]['irq_ent_t']),
irq_list[i]['irq'], irq_list[i]['name'])
print PF_JOINT
irq_event_list = irq_list[i]['event_list']
for j in range(len(irq_event_list)):
irq_event = irq_event_list[j]
if irq_event['event'] == 'netif_rx':
print PF_NET_RX % \
(diff_msec(base_t, irq_event['time']),
irq_event['skbaddr'])
print PF_JOINT
print PF_SOFT_ENTRY % \
diff_msec(base_t, hunk['sirq_ent_t'])
print PF_JOINT
event_list = hunk['event_list']
for i in range(len(event_list)):
event = event_list[i]
if event['event_name'] == 'napi_poll':
print PF_NAPI_POLL % \
(diff_msec(base_t, event['event_t']), event['dev'])
if i == len(event_list) - 1:
print ""
else:
print PF_JOINT
else:
print PF_NET_RECV % \
(diff_msec(base_t, event['event_t']), event['skbaddr'],
event['len'])
if 'comm' in event.keys():
print PF_WJOINT
print PF_CPY_DGRAM % \
(diff_msec(base_t, event['comm_t']),
event['pid'], event['comm'])
elif 'handle' in event.keys():
print PF_WJOINT
if event['handle'] == "kfree_skb":
print PF_KFREE_SKB % \
(diff_msec(base_t,
event['comm_t']),
event['location'])
elif event['handle'] == "consume_skb":
print PF_CONS_SKB % \
diff_msec(base_t,
event['comm_t'])
print PF_JOINT
def trace_begin():
global show_tx
global show_rx
global dev
global debug
for i in range(len(sys.argv)):
if i == 0:
continue
arg = sys.argv[i]
if arg == 'tx':
show_tx = 1
elif arg =='rx':
show_rx = 1
elif arg.find('dev=',0, 4) >= 0:
dev = arg[4:]
elif arg == 'debug':
debug = 1
if show_tx == 0 and show_rx == 0:
show_tx = 1
show_rx = 1
def trace_end():
# order all events in time
all_event_list.sort(lambda a,b :cmp(a[EINFO_IDX_TIME],
b[EINFO_IDX_TIME]))
# process all events
for i in range(len(all_event_list)):
event_info = all_event_list[i]
name = event_info[EINFO_IDX_NAME]
if name == 'irq__softirq_exit':
handle_irq_softirq_exit(event_info)
elif name == 'irq__softirq_entry':
handle_irq_softirq_entry(event_info)
elif name == 'irq__softirq_raise':
handle_irq_softirq_raise(event_info)
elif name == 'irq__irq_handler_entry':
handle_irq_handler_entry(event_info)
elif name == 'irq__irq_handler_exit':
handle_irq_handler_exit(event_info)
elif name == 'napi__napi_poll':
handle_napi_poll(event_info)
elif name == 'net__netif_receive_skb':
handle_netif_receive_skb(event_info)
elif name == 'net__netif_rx':
handle_netif_rx(event_info)
elif name == 'skb__skb_copy_datagram_iovec':
handle_skb_copy_datagram_iovec(event_info)
elif name == 'net__net_dev_queue':
handle_net_dev_queue(event_info)
elif name == 'net__net_dev_xmit':
handle_net_dev_xmit(event_info)
elif name == 'skb__kfree_skb':
handle_kfree_skb(event_info)
elif name == 'skb__consume_skb':
handle_consume_skb(event_info)
# display receive hunks
if show_rx:
for i in range(len(receive_hunk_list)):
print_receive(receive_hunk_list[i])
# display transmit hunks
if show_tx:
print " dev len Qdisc " \
" netdevice free"
for i in range(len(tx_free_list)):
print_transmit(tx_free_list[i])
if debug:
print "debug buffer status"
print "----------------------------"
print "xmit Qdisc:remain:%d overflow:%d" % \
(len(tx_queue_list), of_count_tx_queue_list)
print "xmit netdevice:remain:%d overflow:%d" % \
(len(tx_xmit_list), of_count_tx_xmit_list)
print "receive:remain:%d overflow:%d" % \
(len(rx_skb_list), of_count_rx_skb_list)
# called from perf, when it finds a correspoinding event
def irq__softirq_entry(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_exit(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__softirq_raise(name, context, cpu, sec, nsec, pid, comm, vec):
if symbol_str("irq__softirq_entry", "vec", vec) != "NET_RX":
return
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, vec)
all_event_list.append(event_info)
def irq__irq_handler_entry(name, context, cpu, sec, nsec, pid, comm,
irq, irq_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
irq, irq_name)
all_event_list.append(event_info)
def irq__irq_handler_exit(name, context, cpu, sec, nsec, pid, comm, irq, ret):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm, irq, ret)
all_event_list.append(event_info)
def napi__napi_poll(name, context, cpu, sec, nsec, pid, comm, napi, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
napi, dev_name)
all_event_list.append(event_info)
def net__netif_receive_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__netif_rx(name, context, cpu, sec, nsec, pid, comm, skbaddr,
skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_queue(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, dev_name)
all_event_list.append(event_info)
def net__net_dev_xmit(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen, rc, dev_name):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen, rc ,dev_name)
all_event_list.append(event_info)
def skb__kfree_skb(name, context, cpu, sec, nsec, pid, comm,
skbaddr, protocol, location):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, protocol, location)
all_event_list.append(event_info)
def skb__consume_skb(name, context, cpu, sec, nsec, pid, comm, skbaddr):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr)
all_event_list.append(event_info)
def skb__skb_copy_datagram_iovec(name, context, cpu, sec, nsec, pid, comm,
skbaddr, skblen):
event_info = (name, context, cpu, nsecs(sec, nsec), pid, comm,
skbaddr, skblen)
all_event_list.append(event_info)
def handle_irq_handler_entry(event_info):
(name, context, cpu, time, pid, comm, irq, irq_name) = event_info
if cpu not in irq_dic.keys():
irq_dic[cpu] = []
irq_record = {'irq':irq, 'name':irq_name, 'cpu':cpu, 'irq_ent_t':time}
irq_dic[cpu].append(irq_record)
def handle_irq_handler_exit(event_info):
(name, context, cpu, time, pid, comm, irq, ret) = event_info
if cpu not in irq_dic.keys():
return
irq_record = irq_dic[cpu].pop()
if irq != irq_record['irq']:
return
irq_record.update({'irq_ext_t':time})
# if an irq doesn't include NET_RX softirq, drop.
if 'event_list' in irq_record.keys():
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_raise(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'sirq_raise'})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_irq_softirq_entry(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
net_rx_dic[cpu] = {'sirq_ent_t':time, 'event_list':[]}
def handle_irq_softirq_exit(event_info):
(name, context, cpu, time, pid, comm, vec) = event_info
irq_list = []
event_list = 0
if cpu in irq_dic.keys():
irq_list = irq_dic[cpu]
del irq_dic[cpu]
if cpu in net_rx_dic.keys():
sirq_ent_t = net_rx_dic[cpu]['sirq_ent_t']
event_list = net_rx_dic[cpu]['event_list']
del net_rx_dic[cpu]
if irq_list == [] or event_list == 0:
return
rec_data = {'sirq_ent_t':sirq_ent_t, 'sirq_ext_t':time,
'irq_list':irq_list, 'event_list':event_list}
# merge information realted to a NET_RX softirq
receive_hunk_list.append(rec_data)
def handle_napi_poll(event_info):
(name, context, cpu, time, pid, comm, napi, dev_name) = event_info
if cpu in net_rx_dic.keys():
event_list = net_rx_dic[cpu]['event_list']
rec_data = {'event_name':'napi_poll',
'dev':dev_name, 'event_t':time}
event_list.append(rec_data)
def handle_netif_rx(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu not in irq_dic.keys() \
or len(irq_dic[cpu]) == 0:
return
irq_record = irq_dic[cpu].pop()
if 'event_list' in irq_record.keys():
irq_event_list = irq_record['event_list']
else:
irq_event_list = []
irq_event_list.append({'time':time, 'event':'netif_rx',
'skbaddr':skbaddr, 'skblen':skblen, 'dev_name':dev_name})
irq_record.update({'event_list':irq_event_list})
irq_dic[cpu].append(irq_record)
def handle_netif_receive_skb(event_info):
global of_count_rx_skb_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
if cpu in net_rx_dic.keys():
rec_data = {'event_name':'netif_receive_skb',
'event_t':time, 'skbaddr':skbaddr, 'len':skblen}
event_list = net_rx_dic[cpu]['event_list']
event_list.append(rec_data)
rx_skb_list.insert(0, rec_data)
if len(rx_skb_list) > buffer_budget:
rx_skb_list.pop()
of_count_rx_skb_list += 1
def handle_net_dev_queue(event_info):
global of_count_tx_queue_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, dev_name) = event_info
skb = {'dev':dev_name, 'skbaddr':skbaddr, 'len':skblen, 'queue_t':time}
tx_queue_list.insert(0, skb)
if len(tx_queue_list) > buffer_budget:
tx_queue_list.pop()
of_count_tx_queue_list += 1
def handle_net_dev_xmit(event_info):
global of_count_tx_xmit_list
(name, context, cpu, time, pid, comm,
skbaddr, skblen, rc, dev_name) = event_info
if rc == 0: # NETDEV_TX_OK
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
skb['xmit_t'] = time
tx_xmit_list.insert(0, skb)
del tx_queue_list[i]
if len(tx_xmit_list) > buffer_budget:
tx_xmit_list.pop()
of_count_tx_xmit_list += 1
return
def handle_kfree_skb(event_info):
(name, context, cpu, time, pid, comm,
skbaddr, protocol, location) = event_info
for i in range(len(tx_queue_list)):
skb = tx_queue_list[i]
if skb['skbaddr'] == skbaddr:
del tx_queue_list[i]
return
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if rec_data['skbaddr'] == skbaddr:
rec_data.update({'handle':"kfree_skb",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
def handle_consume_skb(event_info):
(name, context, cpu, time, pid, comm, skbaddr) = event_info
for i in range(len(tx_xmit_list)):
skb = tx_xmit_list[i]
if skb['skbaddr'] == skbaddr:
skb['free_t'] = time
tx_free_list.append(skb)
del tx_xmit_list[i]
return
def handle_skb_copy_datagram_iovec(event_info):
(name, context, cpu, time, pid, comm, skbaddr, skblen) = event_info
for i in range(len(rx_skb_list)):
rec_data = rx_skb_list[i]
if skbaddr == rec_data['skbaddr']:
rec_data.update({'handle':"skb_copy_datagram_iovec",
'comm':comm, 'pid':pid, 'comm_t':time})
del rx_skb_list[i]
return
| gpl-2.0 |
andrewcmyers/tensorflow | tensorflow/python/layers/pooling.py | 31 | 26540 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Contains the pooling layer classes and their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.framework import tensor_shape
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
from tensorflow.python import framework
class _Pooling1D(base.Layer):
"""Pooling layer for arbitrary pooling functions, for 1D inputs.
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling1D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 1, 'pool_size')
self.strides = utils.normalize_tuple(strides, 1, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.input_spec = base.InputSpec(ndim=3)
def call(self, inputs):
# There is no TF op for 1D pooling, hence we make the inputs 4D.
if self.data_format == 'channels_last':
inputs = array_ops.expand_dims(inputs, 2)
pool_shape = (1,) + self.pool_size + (1, 1)
strides = (1,) + self.strides + (1, 1)
data_format = 'NHWC'
else:
inputs = array_ops.expand_dims(inputs, 1)
pool_shape = (1, 1) + self.pool_size + (1,)
strides = (1, 1) + self.strides + (1,)
data_format = 'NCHW'
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper(),
data_format=data_format)
if self.data_format == 'channels_last':
return array_ops.squeeze(outputs, 2)
else:
return array_ops.squeeze(outputs, 1)
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
length = utils.conv_output_length(input_shape[1], self.pool_size[0],
self.padding, self.strides[0])
return tensor_shape.TensorShape([input_shape[0], length, input_shape[2]])
class AveragePooling1D(_Pooling1D):
"""Average Pooling layer for 1D inputs.
Arguments:
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling1D, self).__init__(
nn.avg_pool,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name,
**kwargs)
def average_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average Pooling layer for 1D inputs.
Arguments:
inputs: The tensor over which to pool. Must have rank 3.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
Returns:
The output tensor, of rank 3.
"""
layer = AveragePooling1D(pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling1D(_Pooling1D):
"""Max Pooling layer for 1D inputs.
Arguments:
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling1D, self).__init__(
nn.max_pool,
pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name,
**kwargs)
def max_pooling1d(inputs, pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max Pooling layer for 1D inputs.
Arguments:
inputs: The tensor over which to pool. Must have rank 3.
pool_size: An integer or tuple/list of a single integer,
representing the size of the pooling window.
strides: An integer or tuple/list of a single integer, specifying the
strides of the pooling operation.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, length, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, length)`.
name: A string, the name of the layer.
Returns:
The output tensor, of rank 3.
"""
layer = MaxPooling1D(pool_size=pool_size,
strides=strides,
padding=padding,
data_format=data_format,
name=name)
return layer.apply(inputs)
class _Pooling2D(base.Layer):
"""Pooling layer for arbitrary pooling functions, for 2D inputs (e.g. images).
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling2D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 2, 'pool_size')
self.strides = utils.normalize_tuple(strides, 2, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.input_spec = base.InputSpec(ndim=4)
def call(self, inputs):
if self.data_format == 'channels_last':
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
else:
pool_shape = (1, 1) + self.pool_size
strides = (1, 1) + self.strides
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper(),
data_format=utils.convert_data_format(self.data_format, 4))
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[2]
cols = input_shape[3]
else:
rows = input_shape[1]
cols = input_shape[2]
rows = utils.conv_output_length(rows, self.pool_size[0], self.padding,
self.strides[0])
cols = utils.conv_output_length(cols, self.pool_size[1], self.padding,
self.strides[1])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], rows, cols])
else:
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, input_shape[3]])
class AveragePooling2D(_Pooling2D):
"""Average pooling layer for 2D inputs (e.g. images).
Arguments:
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling2D, self).__init__(
nn.avg_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def average_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average pooling layer for 2D inputs (e.g. images).
Arguments:
inputs: The tensor over which to pool. Must have rank 4.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = AveragePooling2D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling2D(_Pooling2D):
"""Max pooling layer for 2D inputs (e.g. images).
Arguments:
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling2D, self).__init__(
nn.max_pool,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def max_pooling2d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max pooling layer for 2D inputs (e.g. images).
Arguments:
inputs: The tensor over which to pool. Must have rank 4.
pool_size: An integer or tuple/list of 2 integers: (pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 2 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, height, width, channels)` while `channels_first` corresponds to
inputs with shape `(batch, channels, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = MaxPooling2D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class _Pooling3D(base.Layer):
"""Pooling layer for arbitrary pooling functions, for 3D inputs.
This class only exists for code reuse. It will never be an exposed API.
Arguments:
pool_function: The pooling function to apply, e.g. `tf.nn.max_pool`.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string, one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)`
while `channels_first` corresponds to
inputs with shape `(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_function, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(_Pooling3D, self).__init__(name=name, **kwargs)
self.pool_function = pool_function
self.pool_size = utils.normalize_tuple(pool_size, 3, 'pool_size')
self.strides = utils.normalize_tuple(strides, 3, 'strides')
self.padding = utils.normalize_padding(padding)
self.data_format = utils.normalize_data_format(data_format)
self.input_spec = base.InputSpec(ndim=5)
def call(self, inputs):
pool_shape = (1,) + self.pool_size + (1,)
strides = (1,) + self.strides + (1,)
if self.data_format == 'channels_first':
# TF does not support `channels_first` with 3D pooling operations,
# so we must handle this case manually.
# TODO(fchollet): remove this when TF pooling is feature-complete.
inputs = array_ops.transpose(inputs, (0, 2, 3, 4, 1))
outputs = self.pool_function(
inputs,
ksize=pool_shape,
strides=strides,
padding=self.padding.upper())
if self.data_format == 'channels_first':
outputs = array_ops.transpose(outputs, (0, 4, 1, 2, 3))
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
len_dim1 = input_shape[2]
len_dim2 = input_shape[3]
len_dim3 = input_shape[4]
else:
len_dim1 = input_shape[1]
len_dim2 = input_shape[2]
len_dim3 = input_shape[3]
len_dim1 = utils.conv_output_length(len_dim1, self.pool_size[0],
self.padding, self.strides[0])
len_dim2 = utils.conv_output_length(len_dim2, self.pool_size[1],
self.padding, self.strides[1])
len_dim3 = utils.conv_output_length(len_dim3, self.pool_size[2],
self.padding, self.strides[2])
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], len_dim1, len_dim2, len_dim3])
else:
return tensor_shape.TensorShape(
[input_shape[0], len_dim1, len_dim2, len_dim3, input_shape[4]])
class AveragePooling3D(_Pooling3D):
"""Average pooling layer for 3D inputs (e.g. volumes).
Arguments:
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(AveragePooling3D, self).__init__(
nn.avg_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def average_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Average pooling layer for 3D inputs (e.g. volumes).
Arguments:
inputs: The tensor over which to pool. Must have rank 5.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = AveragePooling3D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
class MaxPooling3D(_Pooling3D):
"""Max pooling layer for 3D inputs (e.g. volumes).
Arguments:
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
"""
def __init__(self, pool_size, strides,
padding='valid', data_format='channels_last',
name=None, **kwargs):
super(MaxPooling3D, self).__init__(
nn.max_pool3d,
pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format, name=name, **kwargs)
def max_pooling3d(inputs,
pool_size, strides,
padding='valid', data_format='channels_last',
name=None):
"""Max pooling layer for 3D inputs (e.g. volumes).
Arguments:
inputs: The tensor over which to pool. Must have rank 5.
pool_size: An integer or tuple/list of 3 integers:
(pool_depth, pool_height, pool_width)
specifying the size of the pooling window.
Can be a single integer to specify the same value for
all spatial dimensions.
strides: An integer or tuple/list of 3 integers,
specifying the strides of the pooling operation.
Can be a single integer to specify the same value for
all spatial dimensions.
padding: A string. The padding method, either 'valid' or 'same'.
Case-insensitive.
data_format: A string. The ordering of the dimensions in the inputs.
`channels_last` (default) and `channels_first` are supported.
`channels_last` corresponds to inputs with shape
`(batch, depth, height, width, channels)` while `channels_first`
corresponds to inputs with shape
`(batch, channels, depth, height, width)`.
name: A string, the name of the layer.
Returns:
Output tensor.
"""
layer = MaxPooling3D(pool_size=pool_size, strides=strides,
padding=padding, data_format=data_format,
name=name)
return layer.apply(inputs)
# Aliases
AvgPool2D = AveragePooling2D
MaxPool2D = MaxPooling2D
max_pool2d = max_pooling2d
avg_pool2d = average_pooling2d
| apache-2.0 |
lipingxue/docker-volume-vsphere | esx_service/cli/vmdkops_post_update.11.1.py | 9 | 7431 | #!/usr/bin/env python
# Copyright 2017 VMware, Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -------------------------------------------------------------------------------------------
# Updates ESX with vSphere Docker Volume Service 0.11 (and earlier)
# to 0.11.1 and further
# -------------------------------------------------------------------------------------------
import os
import os.path
import sqlite3
import sys
import shutil
import vmdk_ops
# vmdkops python utils are in PY_LOC, so add to path.
sys.path.insert(0, vmdk_ops.PY_LOC)
import vmdk_utils
import auth
import auth_data
# Hard coded (in auth package) UUD for default tenant.
STATIC_UUID = auth.DEFAULT_TENANT_UUID
STATIC_NAME = auth.DEFAULT_TENANT
# CLI return codes
OK = 0
ERROR = 1
# do we need to stop and restart the vmdkops service
STOP_SERVICE = True # 'False' is for debug only - makes it faster
def patch_a_store(ds_path, old_uuid):
"""Renames and moves stuff as needed in a single DS/dockvols"""
print("Working on Datastore '{0}'".format(ds_path))
# move stuff from old_uuid to new_uuid ()
old_dir = os.path.join(ds_path, old_uuid)
new_dir = os.path.join(ds_path, STATIC_UUID)
symlink_name = os.path.join(ds_path, STATIC_NAME)
if not os.path.isdir(old_dir):
print(" Skipping {0} - not found".format(old_dir))
return
if os.path.exists(new_dir):
# target exists , move files and remove oldir
print(" Moving from {0}, to {1}".format(old_dir, new_dir))
for f in os.listdir(old_dir):
src = os.path.join(old_dir, f)
dst = os.path.join(new_dir, f)
if os.path.isfile(dst):
print(" File {0} already exists, skipping the move".format(dst))
continue
shutil.move(src, dst)
if not os.listdir(old_dir):
print(" Deleting empty {0}".format(old_dir))
os.rmdir(old_dir)
else:
print(" *** Warning: {0} is not empty after migration. Please check the content.")
else:
print(" Renaming {0} to {1}".format(old_dir, new_dir))
os.rename(old_dir, new_dir)
print(" Adjusting {0} symlink to pont to {1}".format(symlink_name, STATIC_UUID))
try:
os.remove(symlink_name)
except:
pass
os.symlink(STATIC_UUID, symlink_name)
def main():
"""
This code updates ESX with vSphere Docker Volume Service 0.11 (and earlier)
to 0.11.1 and further, by moving _DEFAULT tenant ID to well known and static UUID,
and then correcting directories layout and auth_db tables to comply with new UUID.
Specifically, it does the following:
- Checks if AUTH_DB exists.
If it does not, exit with a message - it means nothing to patch on this ESX
- Gets uuid (aka "old_uuid') for _DEFAULT tenant from DB.
If it already STATIC_UUID , exit with a message - nothing to patch
- Stops the service
- backs up the DB
- scans through all <datastore>/volumes/dockvols and
- mkdir STATIC_UUID, if it does not exist
- move all from old_uuid to STATIC_UUID
- symlinks "_DEFAULT" to STATIC_UUID
In single DB transcation
- replaces old_uuid with STATIC UUID in tenant_id field for all tables:
(privileges, vms, tenants, volumes)
starts the service , and if all good removes backup DB
NOTE: this does not delete any data, so the Docker volumes will stay around
no matter if the code succeeds or fails
"""
dbfile = auth_data.AUTH_DB_PATH
# STEP: check DB presense and fetch old_uuid
if not os.path.isfile(dbfile):
print("Config DB", dbfile, "is not found, nothing to update - exiting.")
sys.exit(0)
cursor = sqlite3.connect(dbfile).cursor()
cursor.execute("select * from tenants where name='{0}'".format(STATIC_NAME))
try:
tenant_id, tenant_name, tenant_desr, tenant_def_ds = cursor.fetchone()
except TypeError:
print("Can't find '{0}' tenant, exiting".format(STATIC_NAME))
sys.exit(ERROR)
print("Found default tenant: {0} {1} {2} {3}".format(tenant_id,
tenant_name, tenant_desr, tenant_def_ds))
old_uuid = tenant_id
if old_uuid == STATIC_UUID:
print("*** DB seems to have been already migrated, exiting ***")
sys.exit(OK)
# STEP: Stop the service and back up the DB
backup = dbfile + ".bck"
if os.path.isfile(backup):
print("Backup file '{0}' already exists - skipping DB backup".format(backup))
else:
print("Backing up Config DB to '{0}'".format(backup))
shutil.copy(dbfile, backup)
if STOP_SERVICE:
print("Stopping vmdk-opsd service")
os.system("/etc/init.d/vmdk-opsd stop")
# STEP : patch a datastore - convert dir names to new UUID if needed and move files
print("Starting conversion of _DEFAULT tenant directory names. old_uid is {0}".format(old_uuid))
stores = vmdk_utils.get_datastores()
if not stores:
print("Docker volume storage is not initialized - skipping directories patching")
else:
for datastore in stores:
ds_path = datastore[2]
patch_a_store(ds_path, old_uuid)
# STEP: patch database
print("Working on DB patch...")
# sql for update the DB
# note that:
# {0} is old_uuid (default tenant uuid pre-upgrade)
# {1} is new_uuid (default tenant uuid post-upgrade)
# {2} is tmp name - we need it to comply with DB constraints
# {3} is default tenant description (from DB)
# {4} is default DB for default tenant (from DB)
# {5} is the name ("_DEFAULT") for default tenant
# TBD - use named params in formatting
sql_query_template = \
"""
-- insert temp record to make foreign key happy
INSERT INTO tenants VALUES ( '{1}', '{2}', '{3}', '{4}' ) ;
-- update the tables
UPDATE vms SET tenant_id = '{1}' WHERE tenant_id = '{0}';
UPDATE volumes SET tenant_id = '{1}' WHERE tenant_id = '{0}';
UPDATE privileges SET tenant_id = '{1}' WHERE tenant_id = '{0}';
-- recover _DEFAULT tenant record
DELETE FROM tenants WHERE id = '{0}';
UPDATE tenants SET name = '{5}' WHERE name = '{2}';
UPDATE versions SET major_ver=1, minor_ver=1;
"""
tmp_tenant_name = "__tmp_name_upgrade_0_11"
sql_query = sql_query_template.format(old_uuid, STATIC_UUID, tmp_tenant_name,
tenant_desr, tenant_def_ds,
STATIC_NAME)
cursor.executescript(sql_query)
# STEP: restart the service
if STOP_SERVICE:
print("Starting vmdk-opsd service")
os.system("/etc/init.d/vmdk-opsd start")
# TBD: remove backup ?
print ("*** ALL DONE ***")
if __name__ == "__main__":
main()
| apache-2.0 |
areski/django | django/contrib/postgres/fields/jsonb.py | 341 | 2994 | import json
from psycopg2.extras import Json
from django.contrib.postgres import forms, lookups
from django.core import exceptions
from django.db.models import Field, Transform
from django.utils.translation import ugettext_lazy as _
__all__ = ['JSONField']
class JSONField(Field):
empty_strings_allowed = False
description = _('A JSON object')
default_error_messages = {
'invalid': _("Value must be valid JSON."),
}
def db_type(self, connection):
return 'jsonb'
def get_transform(self, name):
transform = super(JSONField, self).get_transform(name)
if transform:
return transform
return KeyTransformFactory(name)
def get_prep_value(self, value):
if value is not None:
return Json(value)
return value
def get_prep_lookup(self, lookup_type, value):
if lookup_type in ('has_key', 'has_keys', 'has_any_keys'):
return value
if isinstance(value, (dict, list)):
return Json(value)
return super(JSONField, self).get_prep_lookup(lookup_type, value)
def validate(self, value, model_instance):
super(JSONField, self).validate(value, model_instance)
try:
json.dumps(value)
except TypeError:
raise exceptions.ValidationError(
self.error_messages['invalid'],
code='invalid',
params={'value': value},
)
def value_to_string(self, obj):
value = self.value_from_object(obj)
return value
def formfield(self, **kwargs):
defaults = {'form_class': forms.JSONField}
defaults.update(kwargs)
return super(JSONField, self).formfield(**defaults)
JSONField.register_lookup(lookups.DataContains)
JSONField.register_lookup(lookups.ContainedBy)
JSONField.register_lookup(lookups.HasKey)
JSONField.register_lookup(lookups.HasKeys)
JSONField.register_lookup(lookups.HasAnyKeys)
class KeyTransform(Transform):
def __init__(self, key_name, *args, **kwargs):
super(KeyTransform, self).__init__(*args, **kwargs)
self.key_name = key_name
def as_sql(self, compiler, connection):
key_transforms = [self.key_name]
previous = self.lhs
while isinstance(previous, KeyTransform):
key_transforms.insert(0, previous.key_name)
previous = previous.lhs
lhs, params = compiler.compile(previous)
if len(key_transforms) > 1:
return "{} #> %s".format(lhs), [key_transforms] + params
try:
int(self.key_name)
except ValueError:
lookup = "'%s'" % self.key_name
else:
lookup = "%s" % self.key_name
return "%s -> %s" % (lhs, lookup), params
class KeyTransformFactory(object):
def __init__(self, key_name):
self.key_name = key_name
def __call__(self, *args, **kwargs):
return KeyTransform(self.key_name, *args, **kwargs)
| bsd-3-clause |
sherazkasi/SabreSoftware | Lib/site-packages/numpy/doc/structured_arrays.py | 58 | 7222 | """
=====================================
Structured Arrays (aka Record Arrays)
=====================================
Introduction
============
Numpy provides powerful capabilities to create arrays of structs or records.
These arrays permit one to manipulate the data by the structs or by fields of
the struct. A simple example will show what is meant.: ::
>>> x = np.zeros((2,),dtype=('i4,f4,a10'))
>>> x[:] = [(1,2.,'Hello'),(2,3.,"World")]
>>> x
array([(1, 2.0, 'Hello'), (2, 3.0, 'World')],
dtype=[('f0', '>i4'), ('f1', '>f4'), ('f2', '|S10')])
Here we have created a one-dimensional array of length 2. Each element of
this array is a record that contains three items, a 32-bit integer, a 32-bit
float, and a string of length 10 or less. If we index this array at the second
position we get the second record: ::
>>> x[1]
(2,3.,"World")
Conveniently, one can access any field of the array by indexing using the
string that names that field. In this case the fields have received the
default names 'f0', 'f1' and 'f2'.
>>> y = x['f1']
>>> y
array([ 2., 3.], dtype=float32)
>>> y[:] = 2*y
>>> y
array([ 4., 6.], dtype=float32)
>>> x
array([(1, 4.0, 'Hello'), (2, 6.0, 'World')],
dtype=[('f0', '>i4'), ('f1', '>f4'), ('f2', '|S10')])
In these examples, y is a simple float array consisting of the 2nd field
in the record. But, rather than being a copy of the data in the structured
array, it is a view, i.e., it shares exactly the same memory locations.
Thus, when we updated this array by doubling its values, the structured
array shows the corresponding values as doubled as well. Likewise, if one
changes the record, the field view also changes: ::
>>> x[1] = (-1,-1.,"Master")
>>> x
array([(1, 4.0, 'Hello'), (-1, -1.0, 'Master')],
dtype=[('f0', '>i4'), ('f1', '>f4'), ('f2', '|S10')])
>>> y
array([ 4., -1.], dtype=float32)
Defining Structured Arrays
==========================
One defines a structured array through the dtype object. There are
**several** alternative ways to define the fields of a record. Some of
these variants provide backward compatibility with Numeric, numarray, or
another module, and should not be used except for such purposes. These
will be so noted. One specifies record structure in
one of four alternative ways, using an argument (as supplied to a dtype
function keyword or a dtype object constructor itself). This
argument must be one of the following: 1) string, 2) tuple, 3) list, or
4) dictionary. Each of these is briefly described below.
1) String argument (as used in the above examples).
In this case, the constructor expects a comma-separated list of type
specifiers, optionally with extra shape information.
The type specifiers can take 4 different forms: ::
a) b1, i1, i2, i4, i8, u1, u2, u4, u8, f4, f8, c8, c16, a<n>
(representing bytes, ints, unsigned ints, floats, complex and
fixed length strings of specified byte lengths)
b) int8,...,uint8,...,float32, float64, complex64, complex128
(this time with bit sizes)
c) older Numeric/numarray type specifications (e.g. Float32).
Don't use these in new code!
d) Single character type specifiers (e.g H for unsigned short ints).
Avoid using these unless you must. Details can be found in the
Numpy book
These different styles can be mixed within the same string (but why would you
want to do that?). Furthermore, each type specifier can be prefixed
with a repetition number, or a shape. In these cases an array
element is created, i.e., an array within a record. That array
is still referred to as a single field. An example: ::
>>> x = np.zeros(3, dtype='3int8, float32, (2,3)float64')
>>> x
array([([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]),
([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]]),
([0, 0, 0], 0.0, [[0.0, 0.0, 0.0], [0.0, 0.0, 0.0]])],
dtype=[('f0', '|i1', 3), ('f1', '>f4'), ('f2', '>f8', (2, 3))])
By using strings to define the record structure, it precludes being
able to name the fields in the original definition. The names can
be changed as shown later, however.
2) Tuple argument: The only relevant tuple case that applies to record
structures is when a structure is mapped to an existing data type. This
is done by pairing in a tuple, the existing data type with a matching
dtype definition (using any of the variants being described here). As
an example (using a definition using a list, so see 3) for further
details): ::
>>> x = np.zeros(3, dtype=('i4',[('r','u1'), ('g','u1'), ('b','u1'), ('a','u1')]))
>>> x
array([0, 0, 0])
>>> x['r']
array([0, 0, 0], dtype=uint8)
In this case, an array is produced that looks and acts like a simple int32 array,
but also has definitions for fields that use only one byte of the int32 (a bit
like Fortran equivalencing).
3) List argument: In this case the record structure is defined with a list of
tuples. Each tuple has 2 or 3 elements specifying: 1) The name of the field
('' is permitted), 2) the type of the field, and 3) the shape (optional).
For example:
>>> x = np.zeros(3, dtype=[('x','f4'),('y',np.float32),('value','f4',(2,2))])
>>> x
array([(0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]]),
(0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]]),
(0.0, 0.0, [[0.0, 0.0], [0.0, 0.0]])],
dtype=[('x', '>f4'), ('y', '>f4'), ('value', '>f4', (2, 2))])
4) Dictionary argument: two different forms are permitted. The first consists
of a dictionary with two required keys ('names' and 'formats'), each having an
equal sized list of values. The format list contains any type/shape specifier
allowed in other contexts. The names must be strings. There are two optional
keys: 'offsets' and 'titles'. Each must be a correspondingly matching list to
the required two where offsets contain integer offsets for each field, and
titles are objects containing metadata for each field (these do not have
to be strings), where the value of None is permitted. As an example: ::
>>> x = np.zeros(3, dtype={'names':['col1', 'col2'], 'formats':['i4','f4']})
>>> x
array([(0, 0.0), (0, 0.0), (0, 0.0)],
dtype=[('col1', '>i4'), ('col2', '>f4')])
The other dictionary form permitted is a dictionary of name keys with tuple
values specifying type, offset, and an optional title.
>>> x = np.zeros(3, dtype={'col1':('i1',0,'title 1'), 'col2':('f4',1,'title 2')})
>>> x
array([(0, 0.0), (0, 0.0), (0, 0.0)],
dtype=[(('title 1', 'col1'), '|i1'), (('title 2', 'col2'), '>f4')])
Accessing and modifying field names
===================================
The field names are an attribute of the dtype object defining the record structure.
For the last example: ::
>>> x.dtype.names
('col1', 'col2')
>>> x.dtype.names = ('x', 'y')
>>> x
array([(0, 0.0), (0, 0.0), (0, 0.0)],
dtype=[(('title 1', 'x'), '|i1'), (('title 2', 'y'), '>f4')])
>>> x.dtype.names = ('x', 'y', 'z') # wrong number of names
<type 'exceptions.ValueError'>: must replace all names at once with a sequence of length 2
Accessing field titles
====================================
The field titles provide a standard place to put associated info for fields.
They do not have to be strings.
>>> x.dtype.fields['x'][2]
'title 1'
"""
| gpl-3.0 |
UManPychron/pychron | pychron/core/test_helpers.py | 2 | 2382 | # ===============================================================================
# Copyright 2014 Jake Ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
# ============= enthought library imports =======================
# ============= standard library imports ========================
# ============= local library imports ==========================
from __future__ import absolute_import
import os
def get_data_dir(op):
if not os.path.isdir(op):
op = os.path.join('.', 'data')
return op
def dvc_db_factory(path, remove=True, **kw):
from pychron.dvc.dvc_database import DVCDatabase
from pychron.dvc.dvc_orm import Base
return _db_factory(DVCDatabase, Base, path, remove, **kw)
def _db_factory(klass, base, path, remove, **kw):
db = klass()
# db.verbose_retrieve_query = True
db.trait_set(kind='sqlite', path=path, **kw)
db.connect()
if remove and os.path.isfile(db.path):
os.remove(db.path)
metadata = base.metadata
db.create_all(metadata)
return db
def isotope_db_factory(path, remove=True):
from pychron.database.adapters.isotope_adapter import IsotopeAdapter
from pychron.database.orms.isotope.util import Base
return _db_factory(IsotopeAdapter, Base, path, remove)
def massspec_db_factory(path, remove=True):
from pychron.mass_spec.database.massspec_database_adapter import MassSpecDatabaseAdapter
from pychron.mass_spec.database.massspec_orm import Base
if remove and os.path.isfile(path):
os.remove(path)
db = MassSpecDatabaseAdapter()
# db.verbose_retrieve_query = True
db.trait_set(kind='sqlite', path=path)
db.connect()
metadata = Base.metadata
db.create_all(metadata)
return db
# ============= EOF =============================================
| apache-2.0 |
AaronH92/mProve3.2 | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/generator/eclipse.py | 1825 | 17014 | # Copyright (c) 2012 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""GYP backend that generates Eclipse CDT settings files.
This backend DOES NOT generate Eclipse CDT projects. Instead, it generates XML
files that can be imported into an Eclipse CDT project. The XML file contains a
list of include paths and symbols (i.e. defines).
Because a full .cproject definition is not created by this generator, it's not
possible to properly define the include dirs and symbols for each file
individually. Instead, one set of includes/symbols is generated for the entire
project. This works fairly well (and is a vast improvement in general), but may
still result in a few indexer issues here and there.
This generator has no automated tests, so expect it to be broken.
"""
from xml.sax.saxutils import escape
import os.path
import subprocess
import gyp
import gyp.common
import gyp.msvs_emulation
import shlex
import xml.etree.cElementTree as ET
generator_wants_static_library_dependencies_adjusted = False
generator_default_variables = {
}
for dirname in ['INTERMEDIATE_DIR', 'PRODUCT_DIR', 'LIB_DIR', 'SHARED_LIB_DIR']:
# Some gyp steps fail if these are empty(!), so we convert them to variables
generator_default_variables[dirname] = '$' + dirname
for unused in ['RULE_INPUT_PATH', 'RULE_INPUT_ROOT', 'RULE_INPUT_NAME',
'RULE_INPUT_DIRNAME', 'RULE_INPUT_EXT',
'EXECUTABLE_PREFIX', 'EXECUTABLE_SUFFIX',
'STATIC_LIB_PREFIX', 'STATIC_LIB_SUFFIX',
'SHARED_LIB_PREFIX', 'SHARED_LIB_SUFFIX',
'CONFIGURATION_NAME']:
generator_default_variables[unused] = ''
# Include dirs will occasionally use the SHARED_INTERMEDIATE_DIR variable as
# part of the path when dealing with generated headers. This value will be
# replaced dynamically for each configuration.
generator_default_variables['SHARED_INTERMEDIATE_DIR'] = \
'$SHARED_INTERMEDIATE_DIR'
def CalculateVariables(default_variables, params):
generator_flags = params.get('generator_flags', {})
for key, val in generator_flags.items():
default_variables.setdefault(key, val)
flavor = gyp.common.GetFlavor(params)
default_variables.setdefault('OS', flavor)
if flavor == 'win':
# Copy additional generator configuration data from VS, which is shared
# by the Eclipse generator.
import gyp.generator.msvs as msvs_generator
generator_additional_non_configuration_keys = getattr(msvs_generator,
'generator_additional_non_configuration_keys', [])
generator_additional_path_sections = getattr(msvs_generator,
'generator_additional_path_sections', [])
gyp.msvs_emulation.CalculateCommonVariables(default_variables, params)
def CalculateGeneratorInputInfo(params):
"""Calculate the generator specific info that gets fed to input (called by
gyp)."""
generator_flags = params.get('generator_flags', {})
if generator_flags.get('adjust_static_libraries', False):
global generator_wants_static_library_dependencies_adjusted
generator_wants_static_library_dependencies_adjusted = True
def GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs, config_name, params,
compiler_path):
"""Calculate the set of include directories to be used.
Returns:
A list including all the include_dir's specified for every target followed
by any include directories that were added as cflag compiler options.
"""
gyp_includes_set = set()
compiler_includes_list = []
# Find compiler's default include dirs.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(['-E', '-xc++', '-v', '-'])
proc = subprocess.Popen(args=command, stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
output = proc.communicate()[1]
# Extract the list of include dirs from the output, which has this format:
# ...
# #include "..." search starts here:
# #include <...> search starts here:
# /usr/include/c++/4.6
# /usr/local/include
# End of search list.
# ...
in_include_list = False
for line in output.splitlines():
if line.startswith('#include'):
in_include_list = True
continue
if line.startswith('End of search list.'):
break
if in_include_list:
include_dir = line.strip()
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if config_name in target['configurations']:
config = target['configurations'][config_name]
# Look for any include dirs that were explicitly added via cflags. This
# may be done in gyp files to force certain includes to come at the end.
# TODO(jgreenwald): Change the gyp files to not abuse cflags for this, and
# remove this.
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
cflags = msvs_settings.GetCflags(config_name)
else:
cflags = config['cflags']
for cflag in cflags:
if cflag.startswith('-I'):
include_dir = cflag[2:]
if include_dir not in compiler_includes_list:
compiler_includes_list.append(include_dir)
# Find standard gyp include dirs.
if config.has_key('include_dirs'):
include_dirs = config['include_dirs']
for shared_intermediate_dir in shared_intermediate_dirs:
for include_dir in include_dirs:
include_dir = include_dir.replace('$SHARED_INTERMEDIATE_DIR',
shared_intermediate_dir)
if not os.path.isabs(include_dir):
base_dir = os.path.dirname(target_name)
include_dir = base_dir + '/' + include_dir
include_dir = os.path.abspath(include_dir)
gyp_includes_set.add(include_dir)
# Generate a list that has all the include dirs.
all_includes_list = list(gyp_includes_set)
all_includes_list.sort()
for compiler_include in compiler_includes_list:
if not compiler_include in gyp_includes_set:
all_includes_list.append(compiler_include)
# All done.
return all_includes_list
def GetCompilerPath(target_list, data, options):
"""Determine a command that can be used to invoke the compiler.
Returns:
If this is a gyp project that has explicit make settings, try to determine
the compiler from that. Otherwise, see if a compiler was specified via the
CC_target environment variable.
"""
# First, see if the compiler is configured in make's settings.
build_file, _, _ = gyp.common.ParseQualifiedTarget(target_list[0])
make_global_settings_dict = data[build_file].get('make_global_settings', {})
for key, value in make_global_settings_dict:
if key in ['CC', 'CXX']:
return os.path.join(options.toplevel_dir, value)
# Check to see if the compiler was specified as an environment variable.
for key in ['CC_target', 'CC', 'CXX']:
compiler = os.environ.get(key)
if compiler:
return compiler
return 'gcc'
def GetAllDefines(target_list, target_dicts, data, config_name, params,
compiler_path):
"""Calculate the defines for a project.
Returns:
A dict that includes explict defines declared in gyp files along with all of
the default defines that the compiler uses.
"""
# Get defines declared in the gyp files.
all_defines = {}
flavor = gyp.common.GetFlavor(params)
if flavor == 'win':
generator_flags = params.get('generator_flags', {})
for target_name in target_list:
target = target_dicts[target_name]
if flavor == 'win':
msvs_settings = gyp.msvs_emulation.MsvsSettings(target, generator_flags)
extra_defines = msvs_settings.GetComputedDefines(config_name)
else:
extra_defines = []
if config_name in target['configurations']:
config = target['configurations'][config_name]
target_defines = config['defines']
else:
target_defines = []
for define in target_defines + extra_defines:
split_define = define.split('=', 1)
if len(split_define) == 1:
split_define.append('1')
if split_define[0].strip() in all_defines:
# Already defined
continue
all_defines[split_define[0].strip()] = split_define[1].strip()
# Get default compiler defines (if possible).
if flavor == 'win':
return all_defines # Default defines already processed in the loop above.
if compiler_path:
command = shlex.split(compiler_path)
command.extend(['-E', '-dM', '-'])
cpp_proc = subprocess.Popen(args=command, cwd='.',
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cpp_output = cpp_proc.communicate()[0]
cpp_lines = cpp_output.split('\n')
for cpp_line in cpp_lines:
if not cpp_line.strip():
continue
cpp_line_parts = cpp_line.split(' ', 2)
key = cpp_line_parts[1]
if len(cpp_line_parts) >= 3:
val = cpp_line_parts[2]
else:
val = '1'
all_defines[key] = val
return all_defines
def WriteIncludePaths(out, eclipse_langs, include_dirs):
"""Write the includes section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.IncludePaths">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for include_dir in include_dirs:
out.write(' <includepath workspace_path="false">%s</includepath>\n' %
include_dir)
out.write(' </language>\n')
out.write(' </section>\n')
def WriteMacros(out, eclipse_langs, defines):
"""Write the macros section of a CDT settings export file."""
out.write(' <section name="org.eclipse.cdt.internal.ui.wizards.' \
'settingswizards.Macros">\n')
out.write(' <language name="holder for library settings"></language>\n')
for lang in eclipse_langs:
out.write(' <language name="%s">\n' % lang)
for key in sorted(defines.iterkeys()):
out.write(' <macro><name>%s</name><value>%s</value></macro>\n' %
(escape(key), escape(defines[key])))
out.write(' </language>\n')
out.write(' </section>\n')
def GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name):
options = params['options']
generator_flags = params.get('generator_flags', {})
# build_dir: relative path from source root to our output files.
# e.g. "out/Debug"
build_dir = os.path.join(generator_flags.get('output_dir', 'out'),
config_name)
toplevel_build = os.path.join(options.toplevel_dir, build_dir)
# Ninja uses out/Debug/gen while make uses out/Debug/obj/gen as the
# SHARED_INTERMEDIATE_DIR. Include both possible locations.
shared_intermediate_dirs = [os.path.join(toplevel_build, 'obj', 'gen'),
os.path.join(toplevel_build, 'gen')]
GenerateCdtSettingsFile(target_list,
target_dicts,
data,
params,
config_name,
os.path.join(toplevel_build,
'eclipse-cdt-settings.xml'),
options,
shared_intermediate_dirs)
GenerateClasspathFile(target_list,
target_dicts,
options.toplevel_dir,
toplevel_build,
os.path.join(toplevel_build,
'eclipse-classpath.xml'))
def GenerateCdtSettingsFile(target_list, target_dicts, data, params,
config_name, out_name, options,
shared_intermediate_dirs):
gyp.common.EnsureDirExists(out_name)
with open(out_name, 'w') as out:
out.write('<?xml version="1.0" encoding="UTF-8"?>\n')
out.write('<cdtprojectproperties>\n')
eclipse_langs = ['C++ Source File', 'C Source File', 'Assembly Source File',
'GNU C++', 'GNU C', 'Assembly']
compiler_path = GetCompilerPath(target_list, data, options)
include_dirs = GetAllIncludeDirectories(target_list, target_dicts,
shared_intermediate_dirs,
config_name, params, compiler_path)
WriteIncludePaths(out, eclipse_langs, include_dirs)
defines = GetAllDefines(target_list, target_dicts, data, config_name,
params, compiler_path)
WriteMacros(out, eclipse_langs, defines)
out.write('</cdtprojectproperties>\n')
def GenerateClasspathFile(target_list, target_dicts, toplevel_dir,
toplevel_build, out_name):
'''Generates a classpath file suitable for symbol navigation and code
completion of Java code (such as in Android projects) by finding all
.java and .jar files used as action inputs.'''
gyp.common.EnsureDirExists(out_name)
result = ET.Element('classpath')
def AddElements(kind, paths):
# First, we need to normalize the paths so they are all relative to the
# toplevel dir.
rel_paths = set()
for path in paths:
if os.path.isabs(path):
rel_paths.add(os.path.relpath(path, toplevel_dir))
else:
rel_paths.add(path)
for path in sorted(rel_paths):
entry_element = ET.SubElement(result, 'classpathentry')
entry_element.set('kind', kind)
entry_element.set('path', path)
AddElements('lib', GetJavaJars(target_list, target_dicts, toplevel_dir))
AddElements('src', GetJavaSourceDirs(target_list, target_dicts, toplevel_dir))
# Include the standard JRE container and a dummy out folder
AddElements('con', ['org.eclipse.jdt.launching.JRE_CONTAINER'])
# Include a dummy out folder so that Eclipse doesn't use the default /bin
# folder in the root of the project.
AddElements('output', [os.path.join(toplevel_build, '.eclipse-java-build')])
ET.ElementTree(result).write(out_name)
def GetJavaJars(target_list, target_dicts, toplevel_dir):
'''Generates a sequence of all .jars used as inputs.'''
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get('actions', []):
for input_ in action['inputs']:
if os.path.splitext(input_)[1] == '.jar' and not input_.startswith('$'):
if os.path.isabs(input_):
yield input_
else:
yield os.path.join(os.path.dirname(target_name), input_)
def GetJavaSourceDirs(target_list, target_dicts, toplevel_dir):
'''Generates a sequence of all likely java package root directories.'''
for target_name in target_list:
target = target_dicts[target_name]
for action in target.get('actions', []):
for input_ in action['inputs']:
if (os.path.splitext(input_)[1] == '.java' and
not input_.startswith('$')):
dir_ = os.path.dirname(os.path.join(os.path.dirname(target_name),
input_))
# If there is a parent 'src' or 'java' folder, navigate up to it -
# these are canonical package root names in Chromium. This will
# break if 'src' or 'java' exists in the package structure. This
# could be further improved by inspecting the java file for the
# package name if this proves to be too fragile in practice.
parent_search = dir_
while os.path.basename(parent_search) not in ['src', 'java']:
parent_search, _ = os.path.split(parent_search)
if not parent_search or parent_search == toplevel_dir:
# Didn't find a known root, just return the original path
yield dir_
break
else:
yield parent_search
def GenerateOutput(target_list, target_dicts, data, params):
"""Generate an XML settings file that can be imported into a CDT project."""
if params['options'].generator_output:
raise NotImplementedError("--generator_output not implemented for eclipse")
user_config = params.get('generator_flags', {}).get('config', None)
if user_config:
GenerateOutputForConfig(target_list, target_dicts, data, params,
user_config)
else:
config_names = target_dicts[target_list[0]]['configurations'].keys()
for config_name in config_names:
GenerateOutputForConfig(target_list, target_dicts, data, params,
config_name)
| apache-2.0 |
tangfeixiong/nova | nova/tests/unit/api/openstack/compute/contrib/test_flavorextradata.py | 63 | 3996 | # Copyright 2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
from oslo_serialization import jsonutils
import webob
from nova.compute import flavors
from nova import test
from nova.tests.unit.api.openstack import fakes
def fake_get_flavor_by_flavor_id(flavorid, ctxt=None):
return {
'id': flavorid,
'flavorid': str(flavorid),
'root_gb': 1,
'ephemeral_gb': 1,
'name': u'test',
'deleted': False,
'created_at': datetime.datetime(2012, 1, 1, 1, 1, 1, 1),
'updated_at': None,
'memory_mb': 512,
'vcpus': 1,
'extra_specs': {},
'deleted_at': None,
'vcpu_weight': None,
'swap': 0,
'disabled': False,
}
def fake_get_all_flavors_sorted_list(context=None, inactive=False,
filters=None, sort_key='flavorid',
sort_dir='asc', limit=None, marker=None):
return [
fake_get_flavor_by_flavor_id(1),
fake_get_flavor_by_flavor_id(2)
]
class FlavorExtraDataTestV21(test.NoDBTestCase):
base_url = '/v2/fake/flavors'
def setUp(self):
super(FlavorExtraDataTestV21, self).setUp()
ext = ('nova.api.openstack.compute.contrib'
'.flavorextradata.Flavorextradata')
self.flags(osapi_compute_extension=[ext])
self.stubs.Set(flavors, 'get_flavor_by_flavor_id',
fake_get_flavor_by_flavor_id)
self.stubs.Set(flavors, 'get_all_flavors_sorted_list',
fake_get_all_flavors_sorted_list)
self._setup_app()
def _setup_app(self):
self.app = fakes.wsgi_app_v21(init_only=('flavors'))
def _verify_flavor_response(self, flavor, expected):
for key in expected:
self.assertEqual(flavor[key], expected[key])
def test_show(self):
expected = {
'flavor': {
'id': '1',
'name': 'test',
'ram': 512,
'vcpus': 1,
'disk': 1,
'OS-FLV-EXT-DATA:ephemeral': 1,
}
}
url = self.base_url + '/1'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
res = req.get_response(self.app)
body = jsonutils.loads(res.body)
self._verify_flavor_response(body['flavor'], expected['flavor'])
def test_detail(self):
expected = [
{
'id': '1',
'name': 'test',
'ram': 512,
'vcpus': 1,
'disk': 1,
'OS-FLV-EXT-DATA:ephemeral': 1,
},
{
'id': '2',
'name': 'test',
'ram': 512,
'vcpus': 1,
'disk': 1,
'OS-FLV-EXT-DATA:ephemeral': 1,
},
]
url = self.base_url + '/detail'
req = webob.Request.blank(url)
req.headers['Content-Type'] = 'application/json'
res = req.get_response(self.app)
body = jsonutils.loads(res.body)
for i, flavor in enumerate(body['flavors']):
self._verify_flavor_response(flavor, expected[i])
class FlavorExtraDataTestV2(FlavorExtraDataTestV21):
def _setup_app(self):
self.app = fakes.wsgi_app(init_only=('flavors',))
| apache-2.0 |
viggates/nova | nova/virt/baremetal/db/sqlalchemy/migration.py | 13 | 2924 | # Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import os
from migrate import exceptions as versioning_exceptions
from migrate.versioning import api as versioning_api
from migrate.versioning.repository import Repository
import sqlalchemy
from nova import exception
from nova.i18n import _
from nova.virt.baremetal.db.sqlalchemy import session
INIT_VERSION = 0
_REPOSITORY = None
def db_sync(version=None):
if version is not None:
try:
version = int(version)
except ValueError:
raise exception.NovaException(_("version should be an integer"))
current_version = db_version()
repository = _find_migrate_repo()
if version is None or version > current_version:
return versioning_api.upgrade(session.get_engine(), repository,
version)
else:
return versioning_api.downgrade(session.get_engine(), repository,
version)
def db_version():
repository = _find_migrate_repo()
try:
return versioning_api.db_version(session.get_engine(), repository)
except versioning_exceptions.DatabaseNotControlledError:
meta = sqlalchemy.MetaData()
engine = session.get_engine()
meta.reflect(bind=engine)
tables = meta.tables
if len(tables) == 0:
db_version_control(INIT_VERSION)
return versioning_api.db_version(session.get_engine(), repository)
else:
# Some pre-Essex DB's may not be version controlled.
# Require them to upgrade using Essex first.
raise exception.NovaException(
_("Upgrade DB using Essex release first."))
def db_initial_version():
return INIT_VERSION
def db_version_control(version=None):
repository = _find_migrate_repo()
versioning_api.version_control(session.get_engine(), repository, version)
return version
def _find_migrate_repo():
"""Get the path for the migrate repository."""
global _REPOSITORY
path = os.path.join(os.path.abspath(os.path.dirname(__file__)),
'migrate_repo')
assert os.path.exists(path)
if _REPOSITORY is None:
_REPOSITORY = Repository(path)
return _REPOSITORY
| apache-2.0 |
svost/bitcoin | qa/rpc-tests/assumevalid.py | 45 | 7764 | #!/usr/bin/env python3
# Copyright (c) 2014-2016 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
assumevalid.py
Test logic for skipping signature validation on blocks which we've assumed
valid (https://github.com/bitcoin/bitcoin/pull/9484)
We build a chain that includes and invalid signature for one of the
transactions:
0: genesis block
1: block 1 with coinbase transaction output.
2-101: bury that block with 100 blocks so the coinbase transaction
output can be spent
102: a block containing a transaction spending the coinbase
transaction output. The transaction has an invalid signature.
103-2202: bury the bad block with just over two weeks' worth of blocks
(2100 blocks)
Start three nodes:
- node0 has no -assumevalid parameter. Try to sync to block 2202. It will
reject block 102 and only sync as far as block 101
- node1 has -assumevalid set to the hash of block 102. Try to sync to
block 2202. node1 will sync all the way to block 2202.
- node2 has -assumevalid set to the hash of block 102. Try to sync to
block 200. node2 will reject block 102 since it's assumed valid, but it
isn't buried by at least two weeks' work.
'''
from test_framework.mininode import *
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import *
from test_framework.blocktools import create_block, create_coinbase
from test_framework.key import CECKey
from test_framework.script import *
class BaseNode(SingleNodeConnCB):
def __init__(self):
SingleNodeConnCB.__init__(self)
self.last_inv = None
self.last_headers = None
self.last_block = None
self.last_getdata = None
self.block_announced = False
self.last_getheaders = None
self.disconnected = False
self.last_blockhash_announced = None
def on_close(self, conn):
self.disconnected = True
def wait_for_disconnect(self, timeout=60):
test_function = lambda: self.disconnected
assert(wait_until(test_function, timeout=timeout))
return
def send_header_for_blocks(self, new_blocks):
headers_message = msg_headers()
headers_message.headers = [ CBlockHeader(b) for b in new_blocks ]
self.send_message(headers_message)
class SendHeadersTest(BitcoinTestFramework):
def __init__(self):
super().__init__()
self.setup_clean_chain = True
self.num_nodes = 3
def setup_network(self):
# Start node0. We don't start the other nodes yet since
# we need to pre-mine a block with an invalid transaction
# signature so we can pass in the block hash as assumevalid.
self.nodes = []
self.nodes.append(start_node(0, self.options.tmpdir, ["-debug"]))
def run_test(self):
# Connect to node0
node0 = BaseNode()
connections = []
connections.append(NodeConn('127.0.0.1', p2p_port(0), self.nodes[0], node0))
node0.add_connection(connections[0])
NetworkThread().start() # Start up network handling in another thread
node0.wait_for_verack()
# Build the blockchain
self.tip = int(self.nodes[0].getbestblockhash(), 16)
self.block_time = self.nodes[0].getblock(self.nodes[0].getbestblockhash())['time'] + 1
self.blocks = []
# Get a pubkey for the coinbase TXO
coinbase_key = CECKey()
coinbase_key.set_secretbytes(b"horsebattery")
coinbase_pubkey = coinbase_key.get_pubkey()
# Create the first block with a coinbase output to our key
height = 1
block = create_block(self.tip, create_coinbase(height, coinbase_pubkey), self.block_time)
self.blocks.append(block)
self.block_time += 1
block.solve()
# Save the coinbase for later
self.block1 = block
self.tip = block.sha256
height += 1
# Bury the block 100 deep so the coinbase output is spendable
for i in range(100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.solve()
self.blocks.append(block)
self.tip = block.sha256
self.block_time += 1
height += 1
# Create a transaction spending the coinbase output with an invalid (null) signature
tx = CTransaction()
tx.vin.append(CTxIn(COutPoint(self.block1.vtx[0].sha256, 0), scriptSig=b""))
tx.vout.append(CTxOut(49*100000000, CScript([OP_TRUE])))
tx.calc_sha256()
block102 = create_block(self.tip, create_coinbase(height), self.block_time)
self.block_time += 1
block102.vtx.extend([tx])
block102.hashMerkleRoot = block102.calc_merkle_root()
block102.rehash()
block102.solve()
self.blocks.append(block102)
self.tip = block102.sha256
self.block_time += 1
height += 1
# Bury the assumed valid block 2100 deep
for i in range(2100):
block = create_block(self.tip, create_coinbase(height), self.block_time)
block.nVersion = 4
block.solve()
self.blocks.append(block)
self.tip = block.sha256
self.block_time += 1
height += 1
# Start node1 and node2 with assumevalid so they accept a block with a bad signature.
self.nodes.append(start_node(1, self.options.tmpdir,
["-debug", "-assumevalid=" + hex(block102.sha256)]))
node1 = BaseNode() # connects to node1
connections.append(NodeConn('127.0.0.1', p2p_port(1), self.nodes[1], node1))
node1.add_connection(connections[1])
node1.wait_for_verack()
self.nodes.append(start_node(2, self.options.tmpdir,
["-debug", "-assumevalid=" + hex(block102.sha256)]))
node2 = BaseNode() # connects to node2
connections.append(NodeConn('127.0.0.1', p2p_port(2), self.nodes[2], node2))
node2.add_connection(connections[2])
node2.wait_for_verack()
# send header lists to all three nodes
node0.send_header_for_blocks(self.blocks[0:2000])
node0.send_header_for_blocks(self.blocks[2000:])
node1.send_header_for_blocks(self.blocks[0:2000])
node1.send_header_for_blocks(self.blocks[2000:])
node2.send_header_for_blocks(self.blocks[0:200])
# Send 102 blocks to node0. Block 102 will be rejected.
for i in range(101):
node0.send_message(msg_block(self.blocks[i]))
node0.sync_with_ping() # make sure the most recent block is synced
node0.send_message(msg_block(self.blocks[101]))
assert_equal(self.nodes[0].getblock(self.nodes[0].getbestblockhash())['height'], 101)
# Send 3102 blocks to node1. All blocks will be accepted.
for i in range(2202):
node1.send_message(msg_block(self.blocks[i]))
node1.sync_with_ping() # make sure the most recent block is synced
assert_equal(self.nodes[1].getblock(self.nodes[1].getbestblockhash())['height'], 2202)
# Send 102 blocks to node2. Block 102 will be rejected.
for i in range(101):
node2.send_message(msg_block(self.blocks[i]))
node2.sync_with_ping() # make sure the most recent block is synced
node2.send_message(msg_block(self.blocks[101]))
assert_equal(self.nodes[2].getblock(self.nodes[2].getbestblockhash())['height'], 101)
if __name__ == '__main__':
SendHeadersTest().main()
| mit |
yfried/ansible | lib/ansible/modules/web_infrastructure/ansible_tower/tower_group.py | 27 | 5624 | #!/usr/bin/python
# coding: utf-8 -*-
# (c) 2017, Wayne Witzel III <wayne@riotousliving.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = '''
---
module: tower_group
author: "Wayne Witzel III (@wwitzel3)"
version_added: "2.3"
short_description: create, update, or destroy Ansible Tower group.
description:
- Create, update, or destroy Ansible Tower groups. See
U(https://www.ansible.com/tower) for an overview.
options:
name:
description:
- The name to use for the group.
required: True
description:
description:
- The description to use for the group.
inventory:
description:
- Inventory the group should be made a member of.
required: True
variables:
description:
- Variables to use for the group, use C(@) for a file.
credential:
description:
- Credential to use for the group.
source:
description:
- The source to use for this group.
choices: ["manual", "file", "ec2", "rax", "vmware", "gce", "azure", "azure_rm", "openstack", "satellite6" , "cloudforms", "custom"]
source_regions:
description:
- Regions for cloud provider.
source_vars:
description:
- Override variables from source with variables from this field.
instance_filters:
description:
- Comma-separated list of filter expressions for matching hosts.
group_by:
description:
- Limit groups automatically created from inventory source.
source_script:
description:
- Inventory script to be used when group type is C(custom).
overwrite:
description:
- Delete child groups and hosts not found in source.
type: bool
default: 'no'
overwrite_vars:
description:
- Override vars in child groups and hosts with those from external source.
update_on_launch:
description:
- Refresh inventory data from its source each time a job is run.
type: bool
default: 'no'
state:
description:
- Desired state of the resource.
default: "present"
choices: ["present", "absent"]
extends_documentation_fragment: tower
'''
EXAMPLES = '''
- name: Add tower group
tower_group:
name: localhost
description: "Local Host Group"
inventory: "Local Inventory"
state: present
tower_config_file: "~/tower_cli.cfg"
'''
import os
from ansible.module_utils.ansible_tower import TowerModule, tower_auth_config, tower_check_mode
try:
import tower_cli
import tower_cli.utils.exceptions as exc
from tower_cli.conf import settings
except ImportError:
pass
def main():
argument_spec = dict(
name=dict(required=True),
description=dict(),
inventory=dict(required=True),
variables=dict(),
credential=dict(),
source=dict(choices=["manual", "file", "ec2", "rax", "vmware",
"gce", "azure", "azure_rm", "openstack",
"satellite6", "cloudforms", "custom"], default="manual"),
source_regions=dict(),
source_vars=dict(),
instance_filters=dict(),
group_by=dict(),
source_script=dict(),
overwrite=dict(type='bool', default=False),
overwrite_vars=dict(),
update_on_launch=dict(type='bool', default=False),
state=dict(choices=['present', 'absent'], default='present'),
)
module = TowerModule(argument_spec=argument_spec, supports_check_mode=True)
name = module.params.get('name')
inventory = module.params.get('inventory')
credential = module.params.get('credential')
state = module.params.pop('state')
variables = module.params.get('variables')
if variables:
if variables.startswith('@'):
filename = os.path.expanduser(variables[1:])
with open(filename, 'r') as f:
variables = f.read()
json_output = {'group': name, 'state': state}
tower_auth = tower_auth_config(module)
with settings.runtime_values(**tower_auth):
tower_check_mode(module)
group = tower_cli.get_resource('group')
try:
params = module.params.copy()
params['create_on_missing'] = True
params['variables'] = variables
inv_res = tower_cli.get_resource('inventory')
inv = inv_res.get(name=inventory)
params['inventory'] = inv['id']
if credential:
cred_res = tower_cli.get_resource('credential')
cred = cred_res.get(name=credential)
params['credential'] = cred['id']
if state == 'present':
result = group.modify(**params)
json_output['id'] = result['id']
elif state == 'absent':
result = group.delete(**params)
except (exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update the group, inventory not found: {0}'.format(excinfo), changed=False)
except (exc.ConnectionError, exc.BadRequest, exc.NotFound) as excinfo:
module.fail_json(msg='Failed to update the group: {0}'.format(excinfo), changed=False)
json_output['changed'] = result['changed']
module.exit_json(**json_output)
if __name__ == '__main__':
main()
| gpl-3.0 |
webmasterraj/FogOrNot | flask/lib/python2.7/site-packages/docutils/transforms/misc.py | 183 | 4882 | # $Id: misc.py 6314 2010-04-26 10:04:17Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Miscellaneous transforms.
"""
__docformat__ = 'reStructuredText'
from docutils import nodes
from docutils.transforms import Transform, TransformError
class CallBack(Transform):
"""
Inserts a callback into a document. The callback is called when the
transform is applied, which is determined by its priority.
For use with `nodes.pending` elements. Requires a ``details['callback']``
entry, a bound method or function which takes one parameter: the pending
node. Other data can be stored in the ``details`` attribute or in the
object hosting the callback method.
"""
default_priority = 990
def apply(self):
pending = self.startnode
pending.details['callback'](pending)
pending.parent.remove(pending)
class ClassAttribute(Transform):
"""
Move the "class" attribute specified in the "pending" node into the
immediately following non-comment element.
"""
default_priority = 210
def apply(self):
pending = self.startnode
parent = pending.parent
child = pending
while parent:
# Check for appropriate following siblings:
for index in range(parent.index(child) + 1, len(parent)):
element = parent[index]
if (isinstance(element, nodes.Invisible) or
isinstance(element, nodes.system_message)):
continue
element['classes'] += pending.details['class']
pending.parent.remove(pending)
return
else:
# At end of section or container; apply to sibling
child = parent
parent = parent.parent
error = self.document.reporter.error(
'No suitable element following "%s" directive'
% pending.details['directive'],
nodes.literal_block(pending.rawsource, pending.rawsource),
line=pending.line)
pending.replace_self(error)
class Transitions(Transform):
"""
Move transitions at the end of sections up the tree. Complain
on transitions after a title, at the beginning or end of the
document, and after another transition.
For example, transform this::
<section>
...
<transition>
<section>
...
into this::
<section>
...
<transition>
<section>
...
"""
default_priority = 830
def apply(self):
for node in self.document.traverse(nodes.transition):
self.visit_transition(node)
def visit_transition(self, node):
index = node.parent.index(node)
error = None
if (index == 0 or
isinstance(node.parent[0], nodes.title) and
(index == 1 or
isinstance(node.parent[1], nodes.subtitle) and
index == 2)):
assert (isinstance(node.parent, nodes.document) or
isinstance(node.parent, nodes.section))
error = self.document.reporter.error(
'Document or section may not begin with a transition.',
source=node.source, line=node.line)
elif isinstance(node.parent[index - 1], nodes.transition):
error = self.document.reporter.error(
'At least one body element must separate transitions; '
'adjacent transitions are not allowed.',
source=node.source, line=node.line)
if error:
# Insert before node and update index.
node.parent.insert(index, error)
index += 1
assert index < len(node.parent)
if index != len(node.parent) - 1:
# No need to move the node.
return
# Node behind which the transition is to be moved.
sibling = node
# While sibling is the last node of its parent.
while index == len(sibling.parent) - 1:
sibling = sibling.parent
# If sibling is the whole document (i.e. it has no parent).
if sibling.parent is None:
# Transition at the end of document. Do not move the
# transition up, and place an error behind.
error = self.document.reporter.error(
'Document may not end with a transition.',
line=node.line)
node.parent.insert(node.parent.index(node) + 1, error)
return
index = sibling.parent.index(sibling)
# Remove the original transition node.
node.parent.remove(node)
# Insert the transition after the sibling.
sibling.parent.insert(index + 1, node)
| gpl-2.0 |
Fusion-Devices/android_kernel_motorola_msm8916 | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/EventClass.py | 4653 | 3596 | # EventClass.py
#
# This is a library defining some events types classes, which could
# be used by other scripts to analyzing the perf samples.
#
# Currently there are just a few classes defined for examples,
# PerfEvent is the base class for all perf event sample, PebsEvent
# is a HW base Intel x86 PEBS event, and user could add more SW/HW
# event classes based on requirements.
import struct
# Event types, user could add more here
EVTYPE_GENERIC = 0
EVTYPE_PEBS = 1 # Basic PEBS event
EVTYPE_PEBS_LL = 2 # PEBS event with load latency info
EVTYPE_IBS = 3
#
# Currently we don't have good way to tell the event type, but by
# the size of raw buffer, raw PEBS event with load latency data's
# size is 176 bytes, while the pure PEBS event's size is 144 bytes.
#
def create_event(name, comm, dso, symbol, raw_buf):
if (len(raw_buf) == 144):
event = PebsEvent(name, comm, dso, symbol, raw_buf)
elif (len(raw_buf) == 176):
event = PebsNHM(name, comm, dso, symbol, raw_buf)
else:
event = PerfEvent(name, comm, dso, symbol, raw_buf)
return event
class PerfEvent(object):
event_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_GENERIC):
self.name = name
self.comm = comm
self.dso = dso
self.symbol = symbol
self.raw_buf = raw_buf
self.ev_type = ev_type
PerfEvent.event_num += 1
def show(self):
print "PMU event: name=%12s, symbol=%24s, comm=%8s, dso=%12s" % (self.name, self.symbol, self.comm, self.dso)
#
# Basic Intel PEBS (Precise Event-based Sampling) event, whose raw buffer
# contains the context info when that event happened: the EFLAGS and
# linear IP info, as well as all the registers.
#
class PebsEvent(PerfEvent):
pebs_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS):
tmp_buf=raw_buf[0:80]
flags, ip, ax, bx, cx, dx, si, di, bp, sp = struct.unpack('QQQQQQQQQQ', tmp_buf)
self.flags = flags
self.ip = ip
self.ax = ax
self.bx = bx
self.cx = cx
self.dx = dx
self.si = si
self.di = di
self.bp = bp
self.sp = sp
PerfEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsEvent.pebs_num += 1
del tmp_buf
#
# Intel Nehalem and Westmere support PEBS plus Load Latency info which lie
# in the four 64 bit words write after the PEBS data:
# Status: records the IA32_PERF_GLOBAL_STATUS register value
# DLA: Data Linear Address (EIP)
# DSE: Data Source Encoding, where the latency happens, hit or miss
# in L1/L2/L3 or IO operations
# LAT: the actual latency in cycles
#
class PebsNHM(PebsEvent):
pebs_nhm_num = 0
def __init__(self, name, comm, dso, symbol, raw_buf, ev_type=EVTYPE_PEBS_LL):
tmp_buf=raw_buf[144:176]
status, dla, dse, lat = struct.unpack('QQQQ', tmp_buf)
self.status = status
self.dla = dla
self.dse = dse
self.lat = lat
PebsEvent.__init__(self, name, comm, dso, symbol, raw_buf, ev_type)
PebsNHM.pebs_nhm_num += 1
del tmp_buf
| gpl-2.0 |
lmtierney/watir-snake | tests/browser/user_editable_tests.py | 1 | 9594 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import pytest
from nerodia.exception import UnknownObjectException, ObjectReadOnlyException, Error
pytestmark = pytest.mark.page('forms_with_input_elements.html')
class TestUserEditableAppend(object):
def test_appends_the_text_to_the_text_field(self, browser):
browser.text_field(name='new_user_occupation').append(' Append This')
assert browser.text_field(name='new_user_occupation').value == 'Developer Append This'
def test_appends_multi_byte_characters(self, browser):
browser.text_field(name='new_user_occupation').append(' ijij')
assert browser.text_field(name='new_user_occupation').value == 'Developer ijij'
def test_raises_notimplementederror_if_the_object_is_content_editable_element(self, browser):
with pytest.raises(NotImplementedError) as e:
browser.div(id='contenteditable').append('bar')
assert e.value.args[0] == '#append method is not supported with contenteditable element'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_append_if_the_object_is_readonly(self, browser):
with pytest.raises(ObjectReadOnlyException):
browser.text_field(id='new_user_code').append('Append This')
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_append_if_the_object_is_disabled(self, browser):
from nerodia.exception import ObjectDisabledException
with pytest.raises(ObjectDisabledException):
browser.text_field(name='new_user_species').append('Append This')
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_append_if_the_object_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(name='no_such_name').append('Append This')
class TestUserEditableClear(object):
def test_removes_all_text_from_the_text_field(self, browser):
browser.text_field(name='new_user_occupation').clear()
assert browser.text_field(name='new_user_occupation').value == ''
browser.textarea(id='delete_user_comment').clear()
assert browser.textarea(id='delete_user_comment').value == ''
def test_removes_all_text_from_the_content_editable_element(self, browser):
el = browser.div(id='contenteditable')
el.clear()
assert el.text == ''
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_clear_if_the_object_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(id='no_such_id').clear()
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_clear_if_the_object_is_readonly(self, browser):
with pytest.raises(ObjectReadOnlyException):
browser.text_field(id='new_user_code').clear()
class TestUserEditableValue(object):
def test_sets_the_value_of_the_element(self, browser):
browser.text_field(id='new_user_email').value = 'Hello Cruel World'
assert browser.text_field(id='new_user_email').value == 'Hello Cruel World'
def test_is_able_to_set_multi_byte_characters(self, browser):
browser.text_field(name='new_user_occupation').value = 'ijij'
assert browser.text_field(name='new_user_occupation').value == 'ijij'
def test_sets_the_value_of_a_textarea_element(self, browser):
browser.textarea(id='delete_user_comment').value = 'Hello Cruel World'
assert browser.textarea(id='delete_user_comment').value == 'Hello Cruel World'
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_clear_if_the_element_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(name='no_such_name').clear()
class TestUserEditableSet(object):
def test_sets_the_value_of_the_element(self, browser):
browser.text_field(id='new_user_email').set('Hello Cruel World')
assert browser.text_field(id='new_user_email').value == 'Hello Cruel World'
def test_sets_the_value_of_a_textarea_element(self, browser):
browser.textarea(id='delete_user_comment').set('Hello Cruel World')
assert browser.textarea(id='delete_user_comment').value == 'Hello Cruel World'
def test_sets_the_value_of_a_content_editable_element(self, browser):
el = browser.div(id='contenteditable')
el.set('Bar')
assert el.text == 'Bar'
def test_fires_events(self, browser):
browser.text_field(id='new_user_username').set('Hello World')
assert browser.span(id='current_length').text == '11'
def test_sets_the_value_of_a_password_field(self, browser):
browser.text_field(name='new_user_password').set('secret')
assert browser.text_field(name='new_user_password').value == 'secret'
def test_sets_the_value_when_accessed_through_the_enclosing_form(self, browser):
browser.form(id='new_user').text_field(name='new_user_password').set('secret')
assert browser.form(id='new_user').text_field(name='new_user_password').value == 'secret'
def test_is_able_to_set_multi_byte_characters(self, browser):
browser.text_field(name='new_user_occupation').set('ijij')
assert browser.text_field(name='new_user_occupation').value == 'ijij'
def test_sets_the_value_to_a_concatenation_of_multiple_arguments(self, browser):
browser.text_field(id='new_user_email').set('Bye', 'Cruel', 'World')
assert browser.text_field(id='new_user_email').value == 'ByeCruelWorld'
def test_sets_the_value_to_blank_when_no_arguments_are_provided(self, browser):
browser.text_field(id='new_user_email').set()
assert browser.text_field(id='new_user_email').value == ''
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_set_if_the_object_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(id='no_such_id').set('secret')
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_if_the_object_is_read_only(self, browser):
with pytest.raises(ObjectReadOnlyException):
browser.text_field(id='new_user_code').set('Foo')
class TestUserEditableJsSet(object):
def test_sets_the_value_of_the_element(self, browser):
browser.text_field(id='new_user_email').js_set('Bye Cruel World')
assert browser.text_field(id='new_user_email').value == 'Bye Cruel World'
def test_sets_the_value_of_a_textarea_element(self, browser):
browser.textarea(id='delete_user_comment').js_set('Hello Cruel World')
assert browser.textarea(id='delete_user_comment').value == 'Hello Cruel World'
def test_sets_the_value_of_a_content_editable_element(self, browser):
el = browser.div(id='contenteditable')
el.js_set('foo')
assert el.text == 'foo'
def test_fires_events(self, browser):
browser.text_field(id='new_user_username').js_set('Hello World')
assert browser.span(id='current_length').text == '11'
def test_sets_the_value_of_a_password_field(self, browser):
browser.text_field(name='new_user_password').js_set('secret')
assert browser.text_field(name='new_user_password').value == 'secret'
def test_sets_the_value_when_accessed_through_the_enclosing_form(self, browser):
browser.form(id='new_user').text_field(name='new_user_password').js_set('secret')
assert browser.form(id='new_user').text_field(name='new_user_password').value == 'secret'
def test_is_able_to_set_multi_byte_characters(self, browser):
browser.text_field(name='new_user_occupation').js_set('ijij')
assert browser.text_field(name='new_user_occupation').value == 'ijij'
def test_sets_the_value_to_a_concatenation_of_multiple_arguments(self, browser):
browser.text_field(id='new_user_email').js_set('Bye', 'Cruel', 'World')
assert browser.text_field(id='new_user_email').value == 'ByeCruelWorld'
def test_sets_the_value_to_blank_when_no_arguments_are_provided(self, browser):
browser.text_field(id='new_user_email').js_set()
assert browser.text_field(id='new_user_email').value == ''
@pytest.mark.usefixtures('quick_timeout')
def test_raises_correct_exception_for_set_if_the_object_doesnt_exist(self, browser):
with pytest.raises(UnknownObjectException):
browser.text_field(id='no_such_id').js_set('secret')
def test_raises_exception_if_the_value_of_text_field_doesnt_match(self, browser, mocker):
mock = mocker.patch('nerodia.user_editable.UserEditable.value',
new_callable=mocker.PropertyMock)
mock.return_value = 'wrong'
el = browser.text_field(id='new_user_password')
with pytest.raises(Error) as e:
el.js_set('secret')
assert e.value.args[0] == "#js_set value: 'wrong' does not match expected input: 'secret'"
def test_raises_exception_if_the_text_of_content_editable_doesnt_match(self, browser, mocker):
mock = mocker.patch('nerodia.elements.element.Element.text',
new_callable=mocker.PropertyMock)
mock.return_value = 'wrong'
el = browser.div(id='contenteditable')
with pytest.raises(Error) as e:
el.js_set('secret')
assert e.value.args[0] == "#js_set text: 'wrong' does not match expected input: 'secret'"
| mit |
trishnaguha/ansible | lib/ansible/modules/network/aci/aci_domain_to_encap_pool.py | 12 | 11442 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2017, Dag Wieers <dag@wieers.com>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: aci_domain_to_encap_pool
short_description: Bind Domain to Encap Pools (infra:RsVlanNs)
description:
- Bind Domain to Encap Pools on Cisco ACI fabrics.
notes:
- The C(domain) and C(encap_pool) parameters should exist before using this module.
The M(aci_domain) and M(aci_encap_pool) can be used for these.
seealso:
- module: aci_domain
- module: aci_encap_pool
- name: APIC Management Information Model reference
description: More information about the internal APIC class B(infra:RsVlanNs).
link: https://developer.cisco.com/docs/apic-mim-ref/
author:
- Dag Wieers (@dagwieers)
version_added: '2.5'
options:
domain:
description:
- Name of the domain being associated with the Encap Pool.
type: str
aliases: [ domain_name, domain_profile ]
domain_type:
description:
- Determines if the Domain is physical (phys) or virtual (vmm).
type: str
choices: [ fc, l2dom, l3dom, phys, vmm ]
pool:
description:
- The name of the pool.
type: str
aliases: [ pool_name ]
pool_allocation_mode:
description:
- The method used for allocating encaps to resources.
- Only vlan and vsan support allocation modes.
type: str
choices: [ dynamic, static]
aliases: [ allocation_mode, mode ]
pool_type:
description:
- The encap type of C(pool).
type: str
required: yes
choices: [ vlan, vsan, vxlan ]
state:
description:
- Use C(present) or C(absent) for adding or removing.
- Use C(query) for listing an object or multiple objects.
type: str
choices: [ absent, present, query ]
default: present
vm_provider:
description:
- The VM platform for VMM Domains.
- Support for Kubernetes was added in ACI v3.0.
- Support for CloudFoundry, OpenShift and Red Hat was added in ACI v3.1.
type: str
choices: [ cloudfoundry, kubernetes, microsoft, openshift, openstack, redhat, vmware ]
extends_documentation_fragment: aci
'''
EXAMPLES = r'''
- name: Add domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: present
delegate_to: localhost
- name: Remove domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
domain_type: phys
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: absent
delegate_to: localhost
- name: Query our domain to VLAN pool binding
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain: phys_dom
pool: test_pool
pool_type: vlan
pool_allocation_mode: dynamic
state: query
delegate_to: localhost
register: query_result
- name: Query all domain to VLAN pool bindings
aci_domain_to_encap_pool:
host: apic
username: admin
password: SomeSecretPassword
domain_type: phys
pool_type: vlan
pool_allocation_mode: dynamic
state: query
delegate_to: localhost
register: query_result
'''
RETURN = r'''
current:
description: The existing configuration from the APIC after the module has finished
returned: success
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
error:
description: The error information as returned from the APIC
returned: failure
type: dict
sample:
{
"code": "122",
"text": "unknown managed object class foo"
}
raw:
description: The raw output returned by the APIC REST API (xml or json)
returned: parse error
type: str
sample: '<?xml version="1.0" encoding="UTF-8"?><imdata totalCount="1"><error code="122" text="unknown managed object class foo"/></imdata>'
sent:
description: The actual/minimal configuration pushed to the APIC
returned: info
type: list
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment"
}
}
}
previous:
description: The original configuration from the APIC before the module has started
returned: info
type: list
sample:
[
{
"fvTenant": {
"attributes": {
"descr": "Production",
"dn": "uni/tn-production",
"name": "production",
"nameAlias": "",
"ownerKey": "",
"ownerTag": ""
}
}
}
]
proposed:
description: The assembled configuration from the user-provided parameters
returned: info
type: dict
sample:
{
"fvTenant": {
"attributes": {
"descr": "Production environment",
"name": "production"
}
}
}
filter_string:
description: The filter string used for the request
returned: failure or debug
type: str
sample: ?rsp-prop-include=config-only
method:
description: The HTTP method used for the request to the APIC
returned: failure or debug
type: str
sample: POST
response:
description: The HTTP response from the APIC
returned: failure or debug
type: str
sample: OK (30 bytes)
status:
description: The HTTP status from the APIC
returned: failure or debug
type: int
sample: 200
url:
description: The HTTP url used for the request to the APIC
returned: failure or debug
type: str
sample: https://10.11.12.13/api/mo/uni/tn-production.json
'''
from ansible.module_utils.network.aci.aci import ACIModule, aci_argument_spec
from ansible.module_utils.basic import AnsibleModule
VM_PROVIDER_MAPPING = dict(
cloudfoundry='CloudFoundry',
kubernetes='Kubernetes',
microsoft='Microsoft',
openshift='OpenShift',
openstack='OpenStack',
redhat='Redhat',
vmware='VMware',
)
POOL_MAPPING = dict(
vlan=dict(
aci_mo='uni/infra/vlanns-{0}',
child_class='infraRsVlanNs',
),
vxlan=dict(
aci_mo='uni/infra/vxlanns-{0}',
child_class='vmmRsVxlanNs',
),
vsan=dict(
aci_mo='uni/infra/vsanns-{0}',
child_class='fcRsVsanNs',
),
)
def main():
argument_spec = aci_argument_spec()
argument_spec.update(
domain=dict(type='str', aliases=['domain_name', 'domain_profile']),
domain_type=dict(type='str', choices=['fc', 'l2dom', 'l3dom', 'phys', 'vmm']),
pool=dict(type='str', aliases=['pool_name']),
pool_allocation_mode=dict(type='str', aliases=['allocation_mode', 'mode'], choices=['dynamic', 'static']),
pool_type=dict(type='str', required=True, choices=['vlan', 'vsan', 'vxlan']),
state=dict(type='str', default='present', choices=['absent', 'present', 'query']),
vm_provider=dict(type='str', choices=['cloudfoundry', 'kubernetes', 'microsoft', 'openshift', 'openstack', 'redhat', 'vmware']),
)
module = AnsibleModule(
argument_spec=argument_spec,
supports_check_mode=True,
required_if=[
['domain_type', 'vmm', ['vm_provider']],
['state', 'absent', ['domain', 'domain_type', 'pool', 'pool_type']],
['state', 'present', ['domain', 'domain_type', 'pool', 'pool_type']],
],
)
domain = module.params['domain']
domain_type = module.params['domain_type']
pool = module.params['pool']
pool_allocation_mode = module.params['pool_allocation_mode']
pool_type = module.params['pool_type']
vm_provider = module.params['vm_provider']
state = module.params['state']
# Report when vm_provider is set when type is not virtual
if domain_type != 'vmm' and vm_provider is not None:
module.fail_json(msg="Domain type '{0}' cannot have a 'vm_provider'".format(domain_type))
# ACI Pool URL requires the allocation mode for vlan and vsan pools (ex: uni/infra/vlanns-[poolname]-static)
pool_name = pool
if pool_type != 'vxlan' and pool is not None:
if pool_allocation_mode is not None:
pool_name = '[{0}]-{1}'.format(pool, pool_allocation_mode)
else:
module.fail_json(msg="ACI requires the 'pool_allocation_mode' for 'pool_type' of 'vlan' and 'vsan' when 'pool' is provided")
# Vxlan pools do not support allocation modes
if pool_type == 'vxlan' and pool_allocation_mode is not None:
module.fail_json(msg='vxlan pools do not support setting the allocation_mode; please remove this parameter from the task')
# Compile the full domain for URL building
if domain_type == 'fc':
domain_class = 'fcDomP'
domain_mo = 'uni/fc-{0}'.format(domain)
domain_rn = 'fc-{0}'.format(domain)
elif domain_type == 'l2ext':
domain_class = 'l2extDomP'
domain_mo = 'uni/l2dom-{0}'.format(domain)
domain_rn = 'l2dom-{0}'.format(domain)
elif domain_type == 'l3ext':
domain_class = 'l3extDomP'
domain_mo = 'uni/l3dom-{0}'.format(domain)
domain_rn = 'l3dom-{0}'.format(domain)
elif domain_type == 'phys':
domain_class = 'physDomP'
domain_mo = 'uni/phys-{0}'.format(domain)
domain_rn = 'phys-{0}'.format(domain)
elif domain_type == 'vmm':
domain_class = 'vmmDomP'
domain_mo = 'uni/vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
domain_rn = 'vmmp-{0}/dom-{1}'.format(VM_PROVIDER_MAPPING[vm_provider], domain)
# Ensure that querying all objects works when only domain_type is provided
if domain is None:
domain_mo = None
pool_mo = POOL_MAPPING[pool_type]['aci_mo'].format(pool_name)
child_class = POOL_MAPPING[pool_type]['child_class']
aci = ACIModule(module)
aci.construct_url(
root_class=dict(
aci_class=domain_class,
aci_rn=domain_rn,
module_object=domain_mo,
target_filter={'name': domain},
),
child_classes=[child_class],
)
aci.get_existing()
if state == 'present':
# Filter out module params with null values
aci.payload(
aci_class=domain_class,
class_config=dict(name=domain),
child_configs=[
{child_class: {'attributes': {'tDn': pool_mo}}},
]
)
# Generate config diff which will be used as POST request body
aci.get_diff(aci_class=domain_class)
# Submit changes if module not in check_mode and the proposed is different than existing
aci.post_config()
elif state == 'absent':
aci.delete_config()
aci.exit_json()
if __name__ == "__main__":
main()
| gpl-3.0 |
nickpandolfi/Cyther | cyther/project.py | 1 | 2068 |
"""
This module deals with operations regarding individual cyther projects
"""
import os
from .tools import get_input
from .pathway import path, ISDIR
from .definitions import CACHE_NAME
def assure_cache(project_path=None):
"""
Assure that a project directory has a cache folder.
If not, it will create it.
"""
project_path = path(project_path, ISDIR)
cache_path = os.path.join(project_path, CACHE_NAME)
if not os.path.isdir(cache_path):
os.mkdir(cache_path)
def clean_project():
"""
Clean a project of anything cyther related that is not essential to a build
"""
pass
def purge_project():
"""
Purge a directory of anything cyther related
"""
print('Current Directory: {}'.format(os.getcwd()))
directories = os.listdir(os.getcwd())
if CACHE_NAME in directories:
response = get_input("Would you like to delete the cache and"
"everything in it? [y/n]: ", ('y', 'n'))
if response == 'y':
print("Listing local '__cythercache__':")
cache_dir = os.path.join(os.getcwd(), "__cythercache__")
to_delete = []
contents = os.listdir(cache_dir)
if contents:
for filename in contents:
print('\t' + filename)
filepath = os.path.join(cache_dir, filename)
to_delete.append(filepath)
else:
print("\tNothing was found in the cache")
check_response = get_input("Delete all these files? (^)"
"[y/n]: ", ('y', 'n'))
if check_response == 'y':
for filepath in to_delete:
os.remove(filepath)
os.rmdir(cache_dir)
else:
print("Skipping the deletion... all files are fine!")
else:
print("Skipping deletion of the cache")
else:
print("Couldn't find a cache file ('{}') in this "
"directory".format(CACHE_NAME))
| mit |
noba3/KoTos | addons/script.module.youtube.dl/lib/youtube_dl/extractor/firstpost.py | 124 | 1785 | from __future__ import unicode_literals
from .common import InfoExtractor
class FirstpostIE(InfoExtractor):
_VALID_URL = r'http://(?:www\.)?firstpost\.com/[^/]+/.*-(?P<id>[0-9]+)\.html'
_TEST = {
'url': 'http://www.firstpost.com/india/india-to-launch-indigenous-aircraft-carrier-monday-1025403.html',
'md5': 'ee9114957692f01fb1263ed87039112a',
'info_dict': {
'id': '1025403',
'ext': 'mp4',
'title': 'India to launch indigenous aircraft carrier INS Vikrant today',
'description': 'md5:feef3041cb09724e0bdc02843348f5f4',
}
}
def _real_extract(self, url):
video_id = self._match_id(url)
page = self._download_webpage(url, video_id)
title = self._html_search_meta('twitter:title', page, 'title', fatal=True)
description = self._html_search_meta('twitter:description', page, 'title')
data = self._download_xml(
'http://www.firstpost.com/getvideoxml-%s.xml' % video_id, video_id,
'Downloading video XML')
item = data.find('./playlist/item')
thumbnail = item.find('./image').text
formats = [
{
'url': details.find('./file').text,
'format_id': details.find('./label').text.strip(),
'width': int(details.find('./width').text.strip()),
'height': int(details.find('./height').text.strip()),
} for details in item.findall('./source/file_details') if details.find('./file').text
]
self._sort_formats(formats)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': thumbnail,
'formats': formats,
}
| gpl-2.0 |
zetaops/ulakbus | ulakbus/services/personel/hitap/hizmet_istisnai_ilgi_ekle.py | 1 | 1237 | # -*- coding: utf-8 -*-
# Copyright (C) 2015 ZetaOps Inc.
#
# This file is licensed under the GNU General Public License v3
# (GPLv3). See LICENSE.txt for details.
"""HITAP Istisnai Ilgi Ekle
Hitap'a personelin Istisnai Ilgi bilgilerinin eklemesini yapar.
"""
from ulakbus.services.personel.hitap.hitap_service import ZatoHitapService
class HizmetIstisnaiIlgiEkle(ZatoHitapService):
"""
HITAP Ekleme servisinden kalıtılmış Hizmet Istisnai Bilgi Ekleme servisi
"""
HAS_CHANNEL = True
service_dict = {
'service_name': 'hizmetIstisnaiIlgiInsert',
'fields': {
'kayitNo': 'kayit_no',
'tckn': 'tckn',
'istisnaiIlgiNevi': 'istisnai_ilgi_nevi',
'baslamaTarihi': 'baslama_tarihi',
'bitisTarihi': 'bitis_tarihi',
'gunSayisi': 'gun_sayisi',
'khaDurum': 'kha_durum',
'kurumOnayTarihi': 'kurum_onay_tarihi'
},
'date_filter': ['baslama_tarihi', 'bitis_tarihi', 'kurum_onay_tarihi'],
'long_to_string': ['kayit_no'],
'required_fields': ['tckn', 'istisnaiIlgiNevi', 'baslamaTarihi',
'bitisTarihi', 'gunSayisi', 'khaDurum', 'kurumOnayTarihi']
}
| gpl-3.0 |
cernops/rally | tests/unit/plugins/openstack/scenarios/ec2/test_servers.py | 15 | 1232 | # All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from rally.plugins.openstack.scenarios.ec2 import servers
from tests.unit import test
class EC2ServersTestCase(test.ScenarioTestCase):
def test_list_servers(self):
scenario = servers.EC2Servers()
scenario._list_servers = mock.MagicMock()
scenario.list_servers()
scenario._list_servers.assert_called_once_with()
def test_boot_server(self):
scenario = servers.EC2Servers(self.context)
scenario._boot_servers = mock.Mock()
scenario.boot_server("foo_image", "foo_flavor", foo="bar")
scenario._boot_servers.assert_called_once_with(
"foo_image", "foo_flavor", foo="bar")
| apache-2.0 |
callmeonlyashu/MEANJS | node_modules/accessibility-developer-tools/scripts/parse_aria_schemas.py | 381 | 3069 | import json
import re
import urllib
import xml.etree.ElementTree as ET
def parse_attributes():
schema = urllib.urlopen('http://www.w3.org/MarkUp/SCHEMA/aria-attributes-1.xsd')
tree = ET.parse(schema)
for node in tree.iter():
node.tag = re.sub(r'{.*}', r'', node.tag)
type_map = {
'states': 'state',
'props': 'property'
}
properties = {}
groups = tree.getroot().findall('attributeGroup')
print groups
for group in groups:
print(group.get('name'))
name_match = re.match(r'ARIA\.(\w+)\.attrib', group.get('name'))
if not name_match:
continue
group_type = name_match.group(1)
print group_type
if group_type not in type_map:
continue
type = type_map[group_type]
for child in group:
name = re.sub(r'aria-', r'', child.attrib['name'])
property = {}
property['type'] = type
if 'type' in child.attrib:
valueType = re.sub(r'xs:', r'', child.attrib['type'])
if valueType == 'IDREF':
property['valueType'] = 'idref'
elif valueType == 'IDREFS':
property['valueType'] = 'idref_list'
else:
property['valueType'] = valueType
else:
type_spec = child.findall('simpleType')[0]
restriction_spec = type_spec.findall('restriction')[0]
base = restriction_spec.attrib['base']
if base == 'xs:NMTOKENS':
property['valueType'] = 'token_list'
elif base == 'xs:NMTOKEN':
property['valueType'] = 'token'
else:
raise Exception('Unknown value type: %s' % base)
values = []
for value_type in restriction_spec:
values.append(value_type.get('value'))
property['values'] = values
if 'default' in child.attrib:
property['defaultValue'] = child.attrib['default']
properties[name] = property
return json.dumps(properties, sort_keys=True, indent=4, separators=(',', ': '))
if __name__ == "__main__":
attributes_json = parse_attributes()
constants_file = open('src/js/Constants.js', 'r')
new_constants_file = open('src/js/Constants.new.js', 'w')
in_autogen_block = False
for line in constants_file:
if not in_autogen_block:
new_constants_file.write('%s' % line)
if re.match(r'// BEGIN ARIA_PROPERTIES_AUTOGENERATED', line):
in_autogen_block = True
if re.match(r'// END ARIA_PROPERTIES_AUTOGENERATED', line):
break
new_constants_file.write('/** @type {Object.<string, Object>} */\n')
new_constants_file.write('axs.constants.ARIA_PROPERTIES = %s;\n' % attributes_json)
new_constants_file.write('// END ARIA_PROPERTIES_AUTOGENERATED\n')
for line in constants_file:
new_constants_file.write('%s' % line)
| mit |
Teagan42/home-assistant | homeassistant/components/modem_callerid/sensor.py | 5 | 3580 | """A sensor for incoming calls using a USB modem that supports caller ID."""
import logging
from basicmodem.basicmodem import BasicModem as bm
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
CONF_DEVICE,
CONF_NAME,
EVENT_HOMEASSISTANT_STOP,
STATE_IDLE,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "Modem CallerID"
ICON = "mdi:phone-classic"
DEFAULT_DEVICE = "/dev/ttyACM0"
STATE_RING = "ring"
STATE_CALLERID = "callerid"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_DEVICE, default=DEFAULT_DEVICE): cv.string,
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up modem caller ID sensor platform."""
name = config.get(CONF_NAME)
port = config.get(CONF_DEVICE)
modem = bm(port)
if modem.state == modem.STATE_FAILED:
_LOGGER.error("Unable to initialize modem.")
return
add_entities([ModemCalleridSensor(hass, name, port, modem)])
class ModemCalleridSensor(Entity):
"""Implementation of USB modem caller ID sensor."""
def __init__(self, hass, name, port, modem):
"""Initialize the sensor."""
self._attributes = {"cid_time": 0, "cid_number": "", "cid_name": ""}
self._name = name
self.port = port
self.modem = modem
self._state = STATE_IDLE
modem.registercallback(self._incomingcallcallback)
hass.bus.listen_once(EVENT_HOMEASSISTANT_STOP, self._stop_modem)
def set_state(self, state):
"""Set the state."""
self._state = state
def set_attributes(self, attributes):
"""Set the state attributes."""
self._attributes = attributes
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def icon(self):
"""Return icon."""
return ICON
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def device_state_attributes(self):
"""Return the state attributes."""
return self._attributes
def _stop_modem(self, event):
"""HA is shutting down, close modem port."""
if self.modem:
self.modem.close()
self.modem = None
def _incomingcallcallback(self, newstate):
"""Handle new states."""
if newstate == self.modem.STATE_RING:
if self.state == self.modem.STATE_IDLE:
att = {
"cid_time": self.modem.get_cidtime,
"cid_number": "",
"cid_name": "",
}
self.set_attributes(att)
self._state = STATE_RING
self.schedule_update_ha_state()
elif newstate == self.modem.STATE_CALLERID:
att = {
"cid_time": self.modem.get_cidtime,
"cid_number": self.modem.get_cidnumber,
"cid_name": self.modem.get_cidname,
}
self.set_attributes(att)
self._state = STATE_CALLERID
self.schedule_update_ha_state()
elif newstate == self.modem.STATE_IDLE:
self._state = STATE_IDLE
self.schedule_update_ha_state()
| apache-2.0 |
telefonicaid/fiware-sdc | test/acceptance/commons/provisioning_steps.py | 2 | 8547 | __author__ = 'jfernandez'
from lettuce import step, world
from commons.rest_utils import RestUtils
from commons.utils import body_model_to_body_request, response_body_to_dict, generate_product_instance_id, \
wait_for_task_finished
from commons.constants import *
from commons.configuration import CONFIG_VM_HOSTNAME, CONFIG_VM_IP, CONFIG_VM_FQN
from nose.tools import assert_equals, assert_true, assert_in
from commons.installation_body import simple_installation_body, installation_body_with_attributes
import re
class ProvisioningSteps():
api_utils = RestUtils()
def __init__(self):
None
@staticmethod
def and_a_vm_with_this_parameters(step):
"""
Given step. Configure a VM parameters. Load data from config file if value is "CONFIG_FILE"
Set values from hash to world:
world.vm_hostname
world.vm_ip
world.vm_fqn
world.ostype
:param step: Lettuce step data
:return: Values in lettuce world.
"""
if "hostname" in step.hashes[0]:
hostname = step.hashes[0]['hostname']
world.vm_hostname = CONFIG_VM_HOSTNAME if CONFIG_FILE == hostname else hostname
if "ip" in step.hashes[0]:
ip = step.hashes[0]['ip']
world.vm_ip = CONFIG_VM_IP if CONFIG_FILE == ip else ip
if "fqn" in step.hashes[0]:
fqn = step.hashes[0]['fqn']
world.vm_fqn = CONFIG_VM_FQN if CONFIG_FILE == fqn else fqn
if "ostype" in step.hashes[0]:
world.ostype = step.hashes[0]['ostype']
def i_install_the_product_in_the_vm(self, step):
"""
Install a product in a virtual machine.
:param step: Lettuce step data
:return: The response is set into world.response
"""
if world.instance_attributes is None:
body_model = simple_installation_body(product_name=world.product_name, product_version=world.product_version,
hostname=world.vm_hostname, ip=world.vm_ip, fqn=world.vm_fqn,
ostype=world.vm_ostype)
else:
body_model = installation_body_with_attributes(product_name=world.product_name,
product_version=world.product_version,
hostname=world.vm_hostname, ip=world.vm_ip,
fqn=world.vm_fqn, ostype=world.vm_ostype,
attributes=world.instance_attributes)
body = body_model_to_body_request(body_model, world.headers[CONTENT_TYPE],
body_model_root_element=PRODUCT_INSTANCE)
world.response = self.api_utils.install_product(headers=world.headers, vdc_id=world.tenant_id, body=body)
@staticmethod
def task_is_created(step):
"""
Checks if the task is created with the correct info
:param step: Lettuce step
:return: In world.task_id, the task id created
"""
assert_true(world.response.ok, 'RESPONSE: {}'.format(world.response.content))
response_headers = world.response.headers
assert_in(response_headers[CONTENT_TYPE], world.headers[ACCEPT_HEADER],
'RESPONSE HEADERS: {}'.format(world.response.headers))
response_body = response_body_to_dict(world.response, world.headers[ACCEPT_HEADER], with_attributes=True,
xml_root_element_name=TASK)
assert_equals(response_body[TASK_STATUS], TASK_STATUS_VALUE_RUNNING)
assert_in(world.product_name, response_body[DESCRIPTION])
assert_in(world.vm_hostname, response_body[DESCRIPTION])
assert_equals(world.tenant_id, response_body[TASK_VDC])
m = re.search('/task/(.*)$', response_body[TASK_HREF])
world.task_id = m.group(1)
def the_product_is_instantiated(self, step):
"""
Checks if the product is instantiated with the correct data (values from world)
:param step: Lettuce step data
:return: world.instance_status with the installation status
"""
world.instance_id = generate_product_instance_id(world.vm_fqn, world.product_name, world.product_version)
response = self.api_utils.retrieve_product_instance(headers=world.headers, vdc_id=world.tenant_id,
product_instance_id=world.instance_id)
assert_true(response.ok, 'RESPONSE: {}'.format(response))
response_body = response_body_to_dict(response, world.headers[ACCEPT_HEADER],
xml_root_element_name=PRODUCT_INSTANCE)
assert_equals(response_body[PRODUCT_INSTANCE_NAME], world.instance_id)
assert_true(response_body[PRODUCT_INSTANCE_STATUS] != "")
assert_equals(response_body[PRODUCT_INSTANCE_VM][PRODUCT_INSTANCE_VM_FQN], world.vm_fqn)
assert_equals(response_body[PRODUCT_INSTANCE_VM][PRODUCT_INSTANCE_VM_HOSTNAME], world.vm_hostname)
ip_aux = "" if world.vm_ip is None else world.vm_ip
assert_equals(response_body[PRODUCT_INSTANCE_VM][PRODUCT_INSTANCE_VM_IP], ip_aux)
assert_equals(response_body[PRODUCT][VERSION], world.product_version)
assert_equals(response_body[PRODUCT][PRODUCT_NAME], world.product_name)
# If the instance has been created with attributes, check it.
if world.instance_attributes is not None:
# Check if attributes have got type.
# Else, add plain type before check it (default type)
for attribute in world.instance_attributes:
if ATTRIBUTE_TYPE not in attribute:
attribute.update({ATTRIBUTE_TYPE: ATTRIBUTE_TYPE_PLAIN})
world.instance_attributes = world.instance_attributes[0] \
if len(world.instance_attributes) == 1 else world.instance_attributes
assert_equals(response_body[PRODUCT_INSTANCE_ATTRIBUTES], world.instance_attributes)
world.instance_status = response_body[PRODUCT_INSTANCE_STATUS]
def the_product_installation_status_is(self, step, status):
"""
Checks the product instalation status.
:param step: Lettuce data
:param status: Expected status to check
:return:
"""
if world.instance_status is None:
world.instance_id = "{}_{}_{}".format(world.vm_fqn, world.product_name, world.product_version)
response = self.api_utils.retrieve_product_instance(headers=world.headers, vdc_id=world.tenant_id,
product_instance_id=world.instance_id)
assert_true(response.ok, 'RESPONSE: {}'.format(response))
response_body = response_body_to_dict(response, world.headers[ACCEPT_HEADER],
xml_root_element_name=PRODUCT_INSTANCE)
world.instance_status = response_body[PRODUCT_INSTANCE_STATUS]
assert_equals(world.instance_status, status)
def i_uninstall_a_installed_product_and_release(self, step):
"""
Uninstal a product. Use data from world
world.vm_fqn, world.product_name, world.product_version, world.tenant_id
:return: Response in world.response and instance_id in world.instance_id
"""
world.instance_id = generate_product_instance_id(world.vm_fqn, world.product_name, world.product_version)
world.response = self.api_utils.uninstall_product_by_product_instance_id(headers=world.headers,
vdc_id=world.tenant_id,
product_instance_id=world.instance_id)
@staticmethod
def the_task_has_finished_with_status_group1(step, status):
"""
Wait for task's result and check that this one is the expected status
:param step: Lettuce steps
:param status: Expected status
:return:
"""
finished = wait_for_task_finished(vdc_id=world.tenant_id, task_id=world.task_id,
status_to_be_finished=status, headers=world.headers)
assert_true(finished, 'Task is not in the correct status. Expected: {}'.format(status))
| apache-2.0 |
Neurosim-lab/netpyne | doc/source/conf.py | 1 | 8550 | # -*- coding: utf-8 -*-
#
# netpyne_doc documentation build configuration file, created by
# sphinx-quickstart on Tue Dec 29 13:37:18 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.viewcode',
'sphinx.ext.napoleon',
'sphinx.ext.autosummary',
'autodocsumm',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'NetPyNE Documentation'
copyright = '' #2019, Salvador Dura-Bernal (Neurosim lab)'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '1.0.0.2'
# The full version, including alpha/beta/rc tags.
release = '1.0.0.2'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
html_theme_options = {'body_max_width': None,
'display_version': False}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = 'NetPyNE documentation'
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
html_logo = 'figs/logo.png'
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
html_show_sphinx = False
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
html_show_copyright = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'netpyne_docdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'netpyne_doc.tex', 'netpyne\\_doc Documentation',
'Salvador Dura-Bernal (Neurosim lab)', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'netpyne_doc', 'netpyne_doc Documentation',
['Salvador Dura-Bernal (Neurosim lab)'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'netpyne_doc', 'netpyne_doc Documentation',
'Salvador Dura-Bernal (Neurosim lab)', 'netpyne_doc', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| mit |
saurabhbajaj207/CarpeDiem | venv/Lib/site-packages/pip/_vendor/lockfile/mkdirlockfile.py | 536 | 3096 | from __future__ import absolute_import, division
import time
import os
import sys
import errno
from . import (LockBase, LockFailed, NotLocked, NotMyLock, LockTimeout,
AlreadyLocked)
class MkdirLockFile(LockBase):
"""Lock file by creating a directory."""
def __init__(self, path, threaded=True, timeout=None):
"""
>>> lock = MkdirLockFile('somefile')
>>> lock = MkdirLockFile('somefile', threaded=False)
"""
LockBase.__init__(self, path, threaded, timeout)
# Lock file itself is a directory. Place the unique file name into
# it.
self.unique_name = os.path.join(self.lock_file,
"%s.%s%s" % (self.hostname,
self.tname,
self.pid))
def acquire(self, timeout=None):
timeout = timeout if timeout is not None else self.timeout
end_time = time.time()
if timeout is not None and timeout > 0:
end_time += timeout
if timeout is None:
wait = 0.1
else:
wait = max(0, timeout / 10)
while True:
try:
os.mkdir(self.lock_file)
except OSError:
err = sys.exc_info()[1]
if err.errno == errno.EEXIST:
# Already locked.
if os.path.exists(self.unique_name):
# Already locked by me.
return
if timeout is not None and time.time() > end_time:
if timeout > 0:
raise LockTimeout("Timeout waiting to acquire"
" lock for %s" %
self.path)
else:
# Someone else has the lock.
raise AlreadyLocked("%s is already locked" %
self.path)
time.sleep(wait)
else:
# Couldn't create the lock for some other reason
raise LockFailed("failed to create %s" % self.lock_file)
else:
open(self.unique_name, "wb").close()
return
def release(self):
if not self.is_locked():
raise NotLocked("%s is not locked" % self.path)
elif not os.path.exists(self.unique_name):
raise NotMyLock("%s is locked, but not by me" % self.path)
os.unlink(self.unique_name)
os.rmdir(self.lock_file)
def is_locked(self):
return os.path.exists(self.lock_file)
def i_am_locking(self):
return (self.is_locked() and
os.path.exists(self.unique_name))
def break_lock(self):
if os.path.exists(self.lock_file):
for name in os.listdir(self.lock_file):
os.unlink(os.path.join(self.lock_file, name))
os.rmdir(self.lock_file)
| mit |
oy-vey/algorithms-and-data-structures | 6-GenomeAssemblyProgrammingChallenge/Week2/universal_bstring.py | 1 | 3197 | # python3
import queue
class Edge:
def __init__(self, u, v, value):
self.u = u
self.v = v
self.value = value
class Graph:
def __init__(self):
self.edges = []
#self.incoming_edges = []
self.graph = dict () #[[] for _ in range(n)]
# self.incoming_graph = [[] for _ in range(n)]
def add_edge(self, from_, to, value):
edge = Edge(from_, to, value)
if self.graph.get(from_) is not None:
self.graph[from_].append(len(self.edges))
else:
self.graph[from_] = [len(self.edges)]
self.edges.append(edge)
#self.incoming_graph[to].append(len(self.incoming_edges))
#self.incoming_edges.append(edge)
def size(self):
return len(self.graph)
def get_ids(self, from_):
return self.graph[from_]
# def get_incoming_ids(self, to):
# return self.incoming_graph[to]
def get_edge(self, id):
return self.edges[id]
# def get_incoming_edge(self, id):
# return self.incoming_edges[id]
def read_data():
k = int(input())
frmt = '{0:0' + str(k) + 'b}'
graph = Graph()
for i in range(2 ** k):
kmer = frmt.format(i)
u, v, value = kmer[:-1], kmer[1:], kmer
graph.add_edge(u,v, value)
return graph, 2 ** k, graph.size(), k
def find_next_start(used_vertices):
for vertex, is_used in used_vertices.items():
if is_used and find_unused_edge(vertex) is not None:
return vertex
def find_unused_edge(vertex):
global used_edges
for o_id in graph.get_ids(vertex):
if not used_edges[o_id]:
return o_id
def find_a_cycle(start):
global used_edges
global used_vertices
v = '-1'
s = start
used_vertices[s] = True
cycle = [s]
while v != start:
o_id = find_unused_edge(s)
v = graph.get_edge(o_id).v
used_edges[o_id] = True
used_vertices[v] = True
s = v
cycle.append(s)
cycle = cycle[:-1]
return cycle
def get_result(order_dict, start):
res = []
res.append(start)
for v in order_dict[start][1:]:
if order_dict.get(v) is None or v in res:
res.append(v)
else:
res.extend(get_result(order_dict, v))
res.append(start)
return res
def eulerian_cycle(graph, k):
global used_edges
global used_vertices
global order
#zero_kmer = '0' * (k - 1)
start = '0' * (k - 1)
#order.append(str(start))
while False in used_edges:
cycle = find_a_cycle(start)
if order.get(start) is None:
order[start] = cycle
else:
order[start] = order[start] + cycle
start = find_next_start(used_vertices)
if start is None:
break
result = get_result(order, '0' * (k-1))[:-1]
for idx, r in enumerate(result):
if idx != 0:
result[idx] = result[idx][-1]
print(''.join(result))
graph, edge_count, vertex_count, k = read_data()
used_edges = [False] * edge_count
used_vertices = {}
for key in graph.graph.keys():
used_vertices[key] = False
order = {}
eulerian_cycle(graph, k) | mit |
Azure/azure-sdk-for-python | sdk/resources/azure-mgmt-resource/azure/mgmt/resource/resources/v2021_01_01/aio/_resource_management_client.py | 1 | 6210 | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Optional, TYPE_CHECKING
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core import AsyncARMPipelineClient
from msrest import Deserializer, Serializer
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
from ._configuration import ResourceManagementClientConfiguration
from .operations import Operations
from .operations import DeploymentsOperations
from .operations import ProvidersOperations
from .operations import ProviderResourceTypesOperations
from .operations import ResourcesOperations
from .operations import ResourceGroupsOperations
from .operations import TagsOperations
from .operations import DeploymentOperationsOperations
from .. import models
class ResourceManagementClient(object):
"""Provides operations for working with resources and resource groups.
:ivar operations: Operations operations
:vartype operations: azure.mgmt.resource.resources.v2021_01_01.aio.operations.Operations
:ivar deployments: DeploymentsOperations operations
:vartype deployments: azure.mgmt.resource.resources.v2021_01_01.aio.operations.DeploymentsOperations
:ivar providers: ProvidersOperations operations
:vartype providers: azure.mgmt.resource.resources.v2021_01_01.aio.operations.ProvidersOperations
:ivar provider_resource_types: ProviderResourceTypesOperations operations
:vartype provider_resource_types: azure.mgmt.resource.resources.v2021_01_01.aio.operations.ProviderResourceTypesOperations
:ivar resources: ResourcesOperations operations
:vartype resources: azure.mgmt.resource.resources.v2021_01_01.aio.operations.ResourcesOperations
:ivar resource_groups: ResourceGroupsOperations operations
:vartype resource_groups: azure.mgmt.resource.resources.v2021_01_01.aio.operations.ResourceGroupsOperations
:ivar tags: TagsOperations operations
:vartype tags: azure.mgmt.resource.resources.v2021_01_01.aio.operations.TagsOperations
:ivar deployment_operations: DeploymentOperationsOperations operations
:vartype deployment_operations: azure.mgmt.resource.resources.v2021_01_01.aio.operations.DeploymentOperationsOperations
:param credential: Credential needed for the client to connect to Azure.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription.
:type subscription_id: str
:param str base_url: Service URL
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
"""
def __init__(
self,
credential: "AsyncTokenCredential",
subscription_id: str,
base_url: Optional[str] = None,
**kwargs: Any
) -> None:
if not base_url:
base_url = 'https://management.azure.com'
self._config = ResourceManagementClientConfiguration(credential, subscription_id, **kwargs)
self._client = AsyncARMPipelineClient(base_url=base_url, config=self._config, **kwargs)
client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)}
self._serialize = Serializer(client_models)
self._serialize.client_side_validation = False
self._deserialize = Deserializer(client_models)
self.operations = Operations(
self._client, self._config, self._serialize, self._deserialize)
self.deployments = DeploymentsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.providers = ProvidersOperations(
self._client, self._config, self._serialize, self._deserialize)
self.provider_resource_types = ProviderResourceTypesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.resources = ResourcesOperations(
self._client, self._config, self._serialize, self._deserialize)
self.resource_groups = ResourceGroupsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.tags = TagsOperations(
self._client, self._config, self._serialize, self._deserialize)
self.deployment_operations = DeploymentOperationsOperations(
self._client, self._config, self._serialize, self._deserialize)
async def _send_request(self, http_request: HttpRequest, **kwargs: Any) -> AsyncHttpResponse:
"""Runs the network request through the client's chained policies.
:param http_request: The network request you want to make. Required.
:type http_request: ~azure.core.pipeline.transport.HttpRequest
:keyword bool stream: Whether the response payload will be streamed. Defaults to True.
:return: The response of your network call. Does not do error handling on your response.
:rtype: ~azure.core.pipeline.transport.AsyncHttpResponse
"""
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
http_request.url = self._client.format_url(http_request.url, **path_format_arguments)
stream = kwargs.pop("stream", True)
pipeline_response = await self._client._pipeline.run(http_request, stream=stream, **kwargs)
return pipeline_response.http_response
async def close(self) -> None:
await self._client.close()
async def __aenter__(self) -> "ResourceManagementClient":
await self._client.__aenter__()
return self
async def __aexit__(self, *exc_details) -> None:
await self._client.__aexit__(*exc_details)
| mit |
jmankoff/data | Assignments/jmankoff-rss/lib/werkzeug/debug/repr.py | 254 | 9354 | # -*- coding: utf-8 -*-
"""
werkzeug.debug.repr
~~~~~~~~~~~~~~~~~~~
This module implements object representations for debugging purposes.
Unlike the default repr these reprs expose a lot more information and
produce HTML instead of ASCII.
Together with the CSS and JavaScript files of the debugger this gives
a colorful and more compact output.
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD.
"""
import sys
import re
import codecs
from traceback import format_exception_only
try:
from collections import deque
except ImportError: # pragma: no cover
deque = None
from werkzeug.utils import escape
from werkzeug._compat import iteritems, PY2, text_type, integer_types, \
string_types
missing = object()
_paragraph_re = re.compile(r'(?:\r\n|\r|\n){2,}')
RegexType = type(_paragraph_re)
HELP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
<pre class=help>%(text)s</pre>
</div>\
'''
OBJECT_DUMP_HTML = '''\
<div class=box>
<h3>%(title)s</h3>
%(repr)s
<table>%(items)s</table>
</div>\
'''
def debug_repr(obj):
"""Creates a debug repr of an object as HTML unicode string."""
return DebugReprGenerator().repr(obj)
def dump(obj=missing):
"""Print the object details to stdout._write (for the interactive
console of the web debugger.
"""
gen = DebugReprGenerator()
if obj is missing:
rv = gen.dump_locals(sys._getframe(1).f_locals)
else:
rv = gen.dump_object(obj)
sys.stdout._write(rv)
class _Helper(object):
"""Displays an HTML version of the normal help, for the interactive
debugger only because it requires a patched sys.stdout.
"""
def __repr__(self):
return 'Type help(object) for help about object.'
def __call__(self, topic=None):
if topic is None:
sys.stdout._write('<span class=help>%s</span>' % repr(self))
return
import pydoc
pydoc.help(topic)
rv = sys.stdout.reset()
if isinstance(rv, bytes):
rv = rv.decode('utf-8', 'ignore')
paragraphs = _paragraph_re.split(rv)
if len(paragraphs) > 1:
title = paragraphs[0]
text = '\n\n'.join(paragraphs[1:])
else: # pragma: no cover
title = 'Help'
text = paragraphs[0]
sys.stdout._write(HELP_HTML % {'title': title, 'text': text})
helper = _Helper()
def _add_subclass_info(inner, obj, base):
if isinstance(base, tuple):
for base in base:
if type(obj) is base:
return inner
elif type(obj) is base:
return inner
module = ''
if obj.__class__.__module__ not in ('__builtin__', 'exceptions'):
module = '<span class="module">%s.</span>' % obj.__class__.__module__
return '%s%s(%s)' % (module, obj.__class__.__name__, inner)
class DebugReprGenerator(object):
def __init__(self):
self._stack = []
def _sequence_repr_maker(left, right, base=object(), limit=8):
def proxy(self, obj, recursive):
if recursive:
return _add_subclass_info(left + '...' + right, obj, base)
buf = [left]
have_extended_section = False
for idx, item in enumerate(obj):
if idx:
buf.append(', ')
if idx == limit:
buf.append('<span class="extended">')
have_extended_section = True
buf.append(self.repr(item))
if have_extended_section:
buf.append('</span>')
buf.append(right)
return _add_subclass_info(u''.join(buf), obj, base)
return proxy
list_repr = _sequence_repr_maker('[', ']', list)
tuple_repr = _sequence_repr_maker('(', ')', tuple)
set_repr = _sequence_repr_maker('set([', '])', set)
frozenset_repr = _sequence_repr_maker('frozenset([', '])', frozenset)
if deque is not None:
deque_repr = _sequence_repr_maker('<span class="module">collections.'
'</span>deque([', '])', deque)
del _sequence_repr_maker
def regex_repr(self, obj):
pattern = repr(obj.pattern)
if PY2:
pattern = pattern.decode('string-escape', 'ignore')
else:
pattern = codecs.decode(pattern, 'unicode-escape', 'ignore')
if pattern[:1] == 'u':
pattern = 'ur' + pattern[1:]
else:
pattern = 'r' + pattern
return u're.compile(<span class="string regex">%s</span>)' % pattern
def string_repr(self, obj, limit=70):
buf = ['<span class="string">']
escaped = escape(obj)
a = repr(escaped[:limit])
b = repr(escaped[limit:])
if isinstance(obj, text_type) and PY2:
buf.append('u')
a = a[1:]
b = b[1:]
if b != "''":
buf.extend((a[:-1], '<span class="extended">', b[1:], '</span>'))
else:
buf.append(a)
buf.append('</span>')
return _add_subclass_info(u''.join(buf), obj, (bytes, text_type))
def dict_repr(self, d, recursive, limit=5):
if recursive:
return _add_subclass_info(u'{...}', d, dict)
buf = ['{']
have_extended_section = False
for idx, (key, value) in enumerate(iteritems(d)):
if idx:
buf.append(', ')
if idx == limit - 1:
buf.append('<span class="extended">')
have_extended_section = True
buf.append('<span class="pair"><span class="key">%s</span>: '
'<span class="value">%s</span></span>' %
(self.repr(key), self.repr(value)))
if have_extended_section:
buf.append('</span>')
buf.append('}')
return _add_subclass_info(u''.join(buf), d, dict)
def object_repr(self, obj):
r = repr(obj)
if PY2:
r = r.decode('utf-8', 'replace')
return u'<span class="object">%s</span>' % escape(r)
def dispatch_repr(self, obj, recursive):
if obj is helper:
return u'<span class="help">%r</span>' % helper
if isinstance(obj, (integer_types, float, complex)):
return u'<span class="number">%r</span>' % obj
if isinstance(obj, string_types):
return self.string_repr(obj)
if isinstance(obj, RegexType):
return self.regex_repr(obj)
if isinstance(obj, list):
return self.list_repr(obj, recursive)
if isinstance(obj, tuple):
return self.tuple_repr(obj, recursive)
if isinstance(obj, set):
return self.set_repr(obj, recursive)
if isinstance(obj, frozenset):
return self.frozenset_repr(obj, recursive)
if isinstance(obj, dict):
return self.dict_repr(obj, recursive)
if deque is not None and isinstance(obj, deque):
return self.deque_repr(obj, recursive)
return self.object_repr(obj)
def fallback_repr(self):
try:
info = ''.join(format_exception_only(*sys.exc_info()[:2]))
except Exception: # pragma: no cover
info = '?'
if PY2:
info = info.decode('utf-8', 'ignore')
return u'<span class="brokenrepr"><broken repr (%s)>' \
u'</span>' % escape(info.strip())
def repr(self, obj):
recursive = False
for item in self._stack:
if item is obj:
recursive = True
break
self._stack.append(obj)
try:
try:
return self.dispatch_repr(obj, recursive)
except Exception:
return self.fallback_repr()
finally:
self._stack.pop()
def dump_object(self, obj):
repr = items = None
if isinstance(obj, dict):
title = 'Contents of'
items = []
for key, value in iteritems(obj):
if not isinstance(key, string_types):
items = None
break
items.append((key, self.repr(value)))
if items is None:
items = []
repr = self.repr(obj)
for key in dir(obj):
try:
items.append((key, self.repr(getattr(obj, key))))
except Exception:
pass
title = 'Details for'
title += ' ' + object.__repr__(obj)[1:-1]
return self.render_object_dump(items, title, repr)
def dump_locals(self, d):
items = [(key, self.repr(value)) for key, value in d.items()]
return self.render_object_dump(items, 'Local variables in frame')
def render_object_dump(self, items, title, repr=None):
html_items = []
for key, value in items:
html_items.append('<tr><th>%s<td><pre class=repr>%s</pre>' %
(escape(key), value))
if not html_items:
html_items.append('<tr><td><em>Nothing</em>')
return OBJECT_DUMP_HTML % {
'title': escape(title),
'repr': repr and '<pre class=repr>%s</pre>' % repr or '',
'items': '\n'.join(html_items)
}
| gpl-3.0 |
weblabdeusto/weblabdeusto | tools/wcloud/deploy.py | 4 | 3933 | import os
import argparse
import getpass
import sqlalchemy
from sqlalchemy.orm import sessionmaker
from wcloud import db, app
def connect_to_database(user, passwd):
"""
Connects to the MySQL database using the specified username and password.
Assumes the DB is in localhost and listening on port 3306.
@param user: Username, which will need to be root to create new databases.
@param passwd: Password for the Username.
@return: Connection object and the Session() maker.
"""
conn_string = 'mysql://%s:%s@%s:%d' % (user, passwd, '127.0.0.1', 3306)
engine = sqlalchemy.create_engine(conn_string)
connection = engine.connect()
connection.execute("SELECT 1")
Session = sessionmaker(bind=connection)
return connection, Session
def main():
parser = argparse.ArgumentParser(description='Deploy the database')
parser.add_argument('-c', '--create-credentials', dest='create_credentials', action='store_true',
default=False, help='Create the database users and grant privileges on the tables.')
args = parser.parse_args()
if args.create_credentials:
accname = raw_input("SQL root account name (default: root): ")
if not accname:
accname = 'root'
password = getpass.getpass("SQL root account password: ")
engine, Session = connect_to_database(accname, password)
session = Session()
# Check if wcloud_creator exists and delete it if so
result = session.execute("SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'wcloud_creator')")
if result.first() == (1,):
session.execute("DROP USER 'wcloud_creator'@'localhost'")
# Check if wcloud exists and delete it if so
result = session.execute("SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'wcloud')")
if result.first() == (1,):
session.execute("DROP USER 'wcloud'@'localhost'")
# Check if wcloud exists and delete it if so
result = session.execute("SELECT EXISTS(SELECT 1 FROM mysql.user WHERE user = 'wcloudtest')")
if result.first() == (1,):
session.execute("DROP USER 'wcloudtest'@'localhost'")
print "[1/9] Previous users wcloud, wcloud_creator and wcloudtest cleared if present"
session.execute("CREATE DATABASE IF NOT EXISTS %s DEFAULT CHARACTER SET utf8" % app.config["DB_NAME"])
print "[2/9] Central wcloud database created if it didn't exist."
session.execute("CREATE USER 'wcloud_creator'@'localhost' identified by '%s'" % app.config["DB_WCLOUD_CREATOR_PASSWORD"])
print "[3/9] User wcloud_creator created."
session.execute("GRANT CREATE ON `wcloud%`.* to 'wcloud_creator'@'localhost'")
print "[4/9] Database creation privileges granted on wcloud_creator"
session.execute("GRANT ALL PRIVILEGES ON `wcloud%`.* TO 'wcloud_creator'@'localhost'")
print "[5/9] Wcloud databases read/write privileges granted on wcloud_creator."
session.execute("CREATE USER 'wcloud'@'localhost' IDENTIFIED BY '%s'" % app.config["DB_WCLOUD_PASSWORD"])
print "[6/9] User wcloud created."
session.execute("GRANT ALL PRIVILEGES ON `wcloud%`.* TO 'wcloud'@'localhost'")
print "[7/9] Wcloud databases read/write privileges granted on wcloud."
# For now, the testuser has a default password.
session.execute("CREATE USER 'wcloudtest'@'localhost' IDENTIFIED BY 'password'")
session.execute("GRANT ALL PRIVILEGES ON `wcloudtest`.* TO 'wcloud'@'localhost'")
print "[8/9] Wcloudtest user created and granted privileges for wcloudtest DB"
else:
print "Credentials not added. Use -c to also create the database user"
db.drop_all()
db.session.execute("drop table if exists alembic_version")
os.system("alembic upgrade head")
print "[9/9] Tables added"
print "DONE."
main()
| bsd-2-clause |
freakboy3742/django | tests/generic_views/urls.py | 21 | 13825 | from django.contrib.auth import views as auth_views
from django.contrib.auth.decorators import login_required
from django.urls import path, re_path
from django.views.decorators.cache import cache_page
from django.views.generic import TemplateView, dates
from . import views
from .models import Book
urlpatterns = [
# TemplateView
path('template/no_template/', TemplateView.as_view()),
path('template/login_required/', login_required(TemplateView.as_view())),
path('template/simple/<foo>/', TemplateView.as_view(template_name='generic_views/about.html')),
path('template/custom/<foo>/', views.CustomTemplateView.as_view(template_name='generic_views/about.html')),
path(
'template/content_type/',
TemplateView.as_view(template_name='generic_views/robots.txt', content_type='text/plain'),
),
path(
'template/cached/<foo>/',
cache_page(2.0)(TemplateView.as_view(template_name='generic_views/about.html')),
),
path(
'template/extra_context/',
TemplateView.as_view(template_name='generic_views/about.html', extra_context={'title': 'Title'}),
),
# DetailView
path('detail/obj/', views.ObjectDetail.as_view()),
path('detail/artist/<int:pk>/', views.ArtistDetail.as_view(), name='artist_detail'),
path('detail/author/<int:pk>/', views.AuthorDetail.as_view(), name='author_detail'),
path('detail/author/bycustompk/<foo>/', views.AuthorDetail.as_view(pk_url_kwarg='foo')),
path('detail/author/byslug/<slug>/', views.AuthorDetail.as_view()),
path('detail/author/bycustomslug/<foo>/', views.AuthorDetail.as_view(slug_url_kwarg='foo')),
path('detail/author/bypkignoreslug/<int:pk>-<slug>/', views.AuthorDetail.as_view()),
path('detail/author/bypkandslug/<int:pk>-<slug>/', views.AuthorDetail.as_view(query_pk_and_slug=True)),
path('detail/author/<int:pk>/template_name_suffix/', views.AuthorDetail.as_view(template_name_suffix='_view')),
path(
'detail/author/<int:pk>/template_name/',
views.AuthorDetail.as_view(template_name='generic_views/about.html'),
),
path('detail/author/<int:pk>/context_object_name/', views.AuthorDetail.as_view(context_object_name='thingy')),
path('detail/author/<int:pk>/custom_detail/', views.AuthorCustomDetail.as_view()),
path('detail/author/<int:pk>/dupe_context_object_name/', views.AuthorDetail.as_view(context_object_name='object')),
path('detail/page/<int:pk>/field/', views.PageDetail.as_view()),
path(r'detail/author/invalid/url/', views.AuthorDetail.as_view()),
path('detail/author/invalid/qs/', views.AuthorDetail.as_view(queryset=None)),
path('detail/nonmodel/1/', views.NonModelDetail.as_view()),
path('detail/doesnotexist/<pk>/', views.ObjectDoesNotExistDetail.as_view()),
# FormView
path('contact/', views.ContactView.as_view()),
path('late-validation/', views.LateValidationView.as_view()),
# Create/UpdateView
path('edit/artists/create/', views.ArtistCreate.as_view()),
path('edit/artists/<int:pk>/update/', views.ArtistUpdate.as_view()),
path('edit/authors/create/naive/', views.NaiveAuthorCreate.as_view()),
path('edit/authors/create/redirect/', views.NaiveAuthorCreate.as_view(success_url='/edit/authors/create/')),
path(
'edit/authors/create/interpolate_redirect/',
views.NaiveAuthorCreate.as_view(success_url='/edit/author/{id}/update/'),
),
path(
'edit/authors/create/interpolate_redirect_nonascii/',
views.NaiveAuthorCreate.as_view(success_url='/%C3%A9dit/author/{id}/update/'),
),
path('edit/authors/create/restricted/', views.AuthorCreateRestricted.as_view()),
re_path('^[eé]dit/authors/create/$', views.AuthorCreate.as_view()),
path('edit/authors/create/special/', views.SpecializedAuthorCreate.as_view()),
path('edit/author/<int:pk>/update/naive/', views.NaiveAuthorUpdate.as_view()),
path(
'edit/author/<int:pk>/update/redirect/',
views.NaiveAuthorUpdate.as_view(success_url='/edit/authors/create/')
),
path(
'edit/author/<int:pk>/update/interpolate_redirect/',
views.NaiveAuthorUpdate.as_view(success_url='/edit/author/{id}/update/')
),
path(
'edit/author/<int:pk>/update/interpolate_redirect_nonascii/',
views.NaiveAuthorUpdate.as_view(success_url='/%C3%A9dit/author/{id}/update/'),
),
re_path('^[eé]dit/author/(?P<pk>[0-9]+)/update/$', views.AuthorUpdate.as_view()),
path('edit/author/update/', views.OneAuthorUpdate.as_view()),
path('edit/author/<int:pk>/update/special/', views.SpecializedAuthorUpdate.as_view()),
path('edit/author/<int:pk>/delete/naive/', views.NaiveAuthorDelete.as_view()),
path(
'edit/author/<int:pk>/delete/redirect/',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/'),
),
path(
'edit/author/<int:pk>/delete/interpolate_redirect/',
views.NaiveAuthorDelete.as_view(success_url='/edit/authors/create/?deleted={id}')
),
path(
'edit/author/<int:pk>/delete/interpolate_redirect_nonascii/',
views.NaiveAuthorDelete.as_view(success_url='/%C3%A9dit/authors/create/?deleted={id}')
),
path('edit/author/<int:pk>/delete/', views.AuthorDelete.as_view()),
path('edit/author/<int:pk>/delete/special/', views.SpecializedAuthorDelete.as_view()),
# ArchiveIndexView
path('dates/books/', views.BookArchive.as_view()),
path('dates/books/context_object_name/', views.BookArchive.as_view(context_object_name='thingies')),
path('dates/books/allow_empty/', views.BookArchive.as_view(allow_empty=True)),
path('dates/books/template_name/', views.BookArchive.as_view(template_name='generic_views/list.html')),
path('dates/books/template_name_suffix/', views.BookArchive.as_view(template_name_suffix='_detail')),
path('dates/books/invalid/', views.BookArchive.as_view(queryset=None)),
path('dates/books/paginated/', views.BookArchive.as_view(paginate_by=10)),
path('dates/books/reverse/', views.BookArchive.as_view(queryset=Book.objects.order_by('pubdate'))),
path('dates/books/by_month/', views.BookArchive.as_view(date_list_period='month')),
path('dates/booksignings/', views.BookSigningArchive.as_view()),
path('dates/books/sortedbyname/', views.BookArchive.as_view(ordering='name')),
path('dates/books/sortedbynamedec/', views.BookArchive.as_view(ordering='-name')),
path('dates/books/without_date_field/', views.BookArchiveWithoutDateField.as_view()),
# ListView
path('list/dict/', views.DictList.as_view()),
path('list/dict/paginated/', views.DictList.as_view(paginate_by=1)),
path('list/artists/', views.ArtistList.as_view(), name='artists_list'),
path('list/authors/', views.AuthorList.as_view(), name='authors_list'),
path('list/authors/paginated/', views.AuthorList.as_view(paginate_by=30)),
path('list/authors/paginated/<int:page>/', views.AuthorList.as_view(paginate_by=30)),
path('list/authors/paginated-orphaned/', views.AuthorList.as_view(paginate_by=30, paginate_orphans=2)),
path('list/authors/notempty/', views.AuthorList.as_view(allow_empty=False)),
path('list/authors/notempty/paginated/', views.AuthorList.as_view(allow_empty=False, paginate_by=2)),
path('list/authors/template_name/', views.AuthorList.as_view(template_name='generic_views/list.html')),
path('list/authors/template_name_suffix/', views.AuthorList.as_view(template_name_suffix='_objects')),
path('list/authors/context_object_name/', views.AuthorList.as_view(context_object_name='author_list')),
path('list/authors/dupe_context_object_name/', views.AuthorList.as_view(context_object_name='object_list')),
path('list/authors/invalid/', views.AuthorList.as_view(queryset=None)),
path(
'list/authors/get_queryset/',
views.AuthorListGetQuerysetReturnsNone.as_view(),
),
path(
'list/authors/paginated/custom_class/',
views.AuthorList.as_view(paginate_by=5, paginator_class=views.CustomPaginator),
),
path('list/authors/paginated/custom_page_kwarg/', views.AuthorList.as_view(paginate_by=30, page_kwarg='pagina')),
path('list/authors/paginated/custom_constructor/', views.AuthorListCustomPaginator.as_view()),
path('list/books/sorted/', views.BookList.as_view(ordering='name')),
path('list/books/sortedbypagesandnamedec/', views.BookList.as_view(ordering=('pages', '-name'))),
# YearArchiveView
# Mixing keyword and positional captures below is intentional; the views
# ought to be able to accept either.
path('dates/books/<int:year>/', views.BookYearArchive.as_view()),
path('dates/books/<int:year>/make_object_list/', views.BookYearArchive.as_view(make_object_list=True)),
path('dates/books/<int:year>/allow_empty/', views.BookYearArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/allow_future/', views.BookYearArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/paginated/', views.BookYearArchive.as_view(make_object_list=True, paginate_by=30)),
path(
'dates/books/<int:year>/sortedbyname/',
views.BookYearArchive.as_view(make_object_list=True, ordering='name'),
),
path(
'dates/books/<int:year>/sortedbypageandnamedec/',
views.BookYearArchive.as_view(make_object_list=True, ordering=('pages', '-name')),
),
path('dates/books/no_year/', views.BookYearArchive.as_view()),
path('dates/books/<int:year>/reverse/', views.BookYearArchive.as_view(queryset=Book.objects.order_by('pubdate'))),
path('dates/booksignings/<int:year>/', views.BookSigningYearArchive.as_view()),
# MonthArchiveView
path('dates/books/<int:year>/<int:month>/', views.BookMonthArchive.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/', views.BookMonthArchive.as_view()),
path('dates/books/without_month/<int:year>/', views.BookMonthArchive.as_view()),
path('dates/books/<int:year>/<month>/allow_empty/', views.BookMonthArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/<month>/allow_future/', views.BookMonthArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/<month>/paginated/', views.BookMonthArchive.as_view(paginate_by=30)),
path('dates/books/<int:year>/no_month/', views.BookMonthArchive.as_view()),
path('dates/booksignings/<int:year>/<month>/', views.BookSigningMonthArchive.as_view()),
# WeekArchiveView
path('dates/books/<int:year>/week/<int:week>/', views.BookWeekArchive.as_view()),
path('dates/books/<int:year>/week/<int:week>/allow_empty/', views.BookWeekArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/week/<int:week>/allow_future/', views.BookWeekArchive.as_view(allow_future=True)),
path('dates/books/<int:year>/week/<int:week>/paginated/', views.BookWeekArchive.as_view(paginate_by=30)),
path('dates/books/<int:year>/week/no_week/', views.BookWeekArchive.as_view()),
path('dates/books/<int:year>/week/<int:week>/monday/', views.BookWeekArchive.as_view(week_format='%W')),
path(
'dates/books/<int:year>/week/<int:week>/unknown_week_format/',
views.BookWeekArchive.as_view(week_format='%T'),
),
path(
'dates/books/<int:year>/week/<int:week>/iso_format/',
views.BookWeekArchive.as_view(year_format='%G', week_format='%V'),
),
path(
'dates/books/<int:year>/week/<int:week>/invalid_iso_week_year_format/',
views.BookWeekArchive.as_view(week_format='%V'),
),
path('dates/booksignings/<int:year>/week/<int:week>/', views.BookSigningWeekArchive.as_view()),
# DayArchiveView
path('dates/books/<int:year>/<int:month>/<int:day>/', views.BookDayArchive.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/<int:day>/', views.BookDayArchive.as_view()),
path('dates/books/<int:year>/<month>/<int:day>/allow_empty/', views.BookDayArchive.as_view(allow_empty=True)),
path('dates/books/<int:year>/<month>/<int:day>/allow_future/', views.BookDayArchive.as_view(allow_future=True)),
path(
'dates/books/<int:year>/<month>/<int:day>/allow_empty_and_future/',
views.BookDayArchive.as_view(allow_empty=True, allow_future=True),
),
path('dates/books/<int:year>/<month>/<int:day>/paginated/', views.BookDayArchive.as_view(paginate_by=True)),
path('dates/books/<int:year>/<month>/no_day/', views.BookDayArchive.as_view()),
path('dates/booksignings/<int:year>/<month>/<int:day>/', views.BookSigningDayArchive.as_view()),
# TodayArchiveView
path('dates/books/today/', views.BookTodayArchive.as_view()),
path('dates/books/today/allow_empty/', views.BookTodayArchive.as_view(allow_empty=True)),
path('dates/booksignings/today/', views.BookSigningTodayArchive.as_view()),
# DateDetailView
path('dates/books/<int:year>/<int:month>/<day>/<int:pk>/', views.BookDetail.as_view(month_format='%m')),
path('dates/books/<int:year>/<month>/<day>/<int:pk>/', views.BookDetail.as_view()),
path(
'dates/books/<int:year>/<month>/<int:day>/<int:pk>/allow_future/',
views.BookDetail.as_view(allow_future=True),
),
path('dates/books/<int:year>/<month>/<int:day>/nopk/', views.BookDetail.as_view()),
path('dates/books/<int:year>/<month>/<int:day>/byslug/<slug:slug>/', views.BookDetail.as_view()),
path(
'dates/books/get_object_custom_queryset/<int:year>/<month>/<int:day>/<int:pk>/',
views.BookDetailGetObjectCustomQueryset.as_view(),
),
path('dates/booksignings/<int:year>/<month>/<int:day>/<int:pk>/', views.BookSigningDetail.as_view()),
# Useful for testing redirects
path('accounts/login/', auth_views.LoginView.as_view()),
path('BaseDateListViewTest/', dates.BaseDateListView.as_view()),
]
| bsd-3-clause |
hrashk/sympy | sympy/functions/special/tests/test_zeta_functions.py | 13 | 5241 | from sympy import (Symbol, zeta, nan, Rational, Float, pi, dirichlet_eta, log,
zoo, expand_func, polylog, lerchphi, S, exp, sqrt, I,
exp_polar, polar_lift, O)
from sympy.utilities.randtest import (test_derivative_numerically as td,
random_complex_number as randcplx, test_numerically as tn)
from sympy.utilities.pytest import XFAIL
x = Symbol('x')
a = Symbol('a')
z = Symbol('z')
s = Symbol('s')
def test_zeta_eval():
assert zeta(nan) == nan
assert zeta(x, nan) == nan
assert zeta(0) == Rational(-1, 2)
assert zeta(0, x) == Rational(1, 2) - x
assert zeta(1) == zoo
assert zeta(1, 2) == zoo
assert zeta(1, -7) == zoo
assert zeta(1, x) == zoo
assert zeta(2, 1) == pi**2/6
assert zeta(2) == pi**2/6
assert zeta(4) == pi**4/90
assert zeta(6) == pi**6/945
assert zeta(2, 2) == pi**2/6 - 1
assert zeta(4, 3) == pi**4/90 - Rational(17, 16)
assert zeta(6, 4) == pi**6/945 - Rational(47449, 46656)
assert zeta(2, -2) == pi**2/6 + Rational(5, 4)
assert zeta(4, -3) == pi**4/90 + Rational(1393, 1296)
assert zeta(6, -4) == pi**6/945 + Rational(3037465, 2985984)
assert zeta(-1) == -Rational(1, 12)
assert zeta(-2) == 0
assert zeta(-3) == Rational(1, 120)
assert zeta(-4) == 0
assert zeta(-5) == -Rational(1, 252)
assert zeta(-1, 3) == -Rational(37, 12)
assert zeta(-1, 7) == -Rational(253, 12)
assert zeta(-1, -4) == Rational(119, 12)
assert zeta(-1, -9) == Rational(539, 12)
assert zeta(-4, 3) == -17
assert zeta(-4, -8) == 8772
assert zeta(0, 1) == -Rational(1, 2)
assert zeta(0, -1) == Rational(1, 2)
assert zeta(0, 2) == -Rational(3, 2)
assert zeta(0, -2) == Rational(3, 2)
assert zeta(
3).evalf(20).epsilon_eq(Float("1.2020569031595942854", 20), 1e-19)
def test_zeta_series():
assert zeta(x, a).series(a, 0, 2) == \
zeta(x, 0) - x*a*zeta(x + 1, 0) + O(a**2)
def test_dirichlet_eta_eval():
assert dirichlet_eta(0) == Rational(1, 2)
assert dirichlet_eta(-1) == Rational(1, 4)
assert dirichlet_eta(1) == log(2)
assert dirichlet_eta(2) == pi**2/12
assert dirichlet_eta(4) == pi**4*Rational(7, 720)
def test_rewriting():
assert dirichlet_eta(x).rewrite(zeta) == (1 - 2**(1 - x))*zeta(x)
assert zeta(x).rewrite(dirichlet_eta) == dirichlet_eta(x)/(1 - 2**(1 - x))
assert tn(dirichlet_eta(x), dirichlet_eta(x).rewrite(zeta), x)
assert tn(zeta(x), zeta(x).rewrite(dirichlet_eta), x)
assert zeta(x, a).rewrite(lerchphi) == lerchphi(1, x, a)
assert polylog(s, z).rewrite(lerchphi) == lerchphi(z, s, 1)*z
assert lerchphi(1, x, a).rewrite(zeta) == zeta(x, a)
assert z*lerchphi(z, s, 1).rewrite(polylog) == polylog(s, z)
def test_derivatives():
from sympy import Derivative
assert zeta(x, a).diff(x) == Derivative(zeta(x, a), x)
assert zeta(x, a).diff(a) == -x*zeta(x + 1, a)
assert lerchphi(
z, s, a).diff(z) == (lerchphi(z, s - 1, a) - a*lerchphi(z, s, a))/z
assert lerchphi(z, s, a).diff(a) == -s*lerchphi(z, s + 1, a)
assert polylog(s, z).diff(z) == polylog(s - 1, z)/z
b = randcplx()
c = randcplx()
assert td(zeta(b, x), x)
assert td(polylog(b, z), z)
assert td(lerchphi(c, b, x), x)
assert td(lerchphi(x, b, c), x)
def myexpand(func, target):
expanded = expand_func(func)
if target is not None:
return expanded == target
if expanded == func: # it didn't expand
return False
# check to see that the expanded and original evaluate to the same value
subs = {}
for a in func.free_symbols:
subs[a] = randcplx()
return abs(func.subs(subs).n()
- expanded.replace(exp_polar, exp).subs(subs).n()) < 1e-10
def test_polylog_expansion():
from sympy import factor, log
assert polylog(s, 0) == 0
assert polylog(s, 1) == zeta(s)
assert polylog(s, -1) == dirichlet_eta(s)
assert myexpand(polylog(1, z), -log(1 + exp_polar(-I*pi)*z))
assert myexpand(polylog(0, z), z/(1 - z))
assert myexpand(polylog(-1, z), z**2/(1 - z)**2 + z/(1 - z))
assert myexpand(polylog(-5, z), None)
def test_lerchphi_expansion():
assert myexpand(lerchphi(1, s, a), zeta(s, a))
assert myexpand(lerchphi(z, s, 1), polylog(s, z)/z)
# direct summation
assert myexpand(lerchphi(z, -1, a), a/(1 - z) + z/(1 - z)**2)
assert myexpand(lerchphi(z, -3, a), None)
# polylog reduction
assert myexpand(lerchphi(z, s, S(1)/2),
2**(s - 1)*(polylog(s, sqrt(z))/sqrt(z)
- polylog(s, polar_lift(-1)*sqrt(z))/sqrt(z)))
assert myexpand(lerchphi(z, s, 2), -1/z + polylog(s, z)/z**2)
assert myexpand(lerchphi(z, s, S(3)/2), None)
assert myexpand(lerchphi(z, s, S(7)/3), None)
assert myexpand(lerchphi(z, s, -S(1)/3), None)
assert myexpand(lerchphi(z, s, -S(5)/2), None)
# hurwitz zeta reduction
assert myexpand(lerchphi(-1, s, a),
2**(-s)*zeta(s, a/2) - 2**(-s)*zeta(s, (a + 1)/2))
assert myexpand(lerchphi(I, s, a), None)
assert myexpand(lerchphi(-I, s, a), None)
assert myexpand(lerchphi(exp(2*I*pi/5), s, a), None)
| bsd-3-clause |
ryfeus/lambda-packs | Tensorflow_OpenCV_Nightly/source/tensorflow/python/layers/core.py | 4 | 12444 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
# pylint: disable=unused-import,g-bad-import-order
"""Contains the core layers: Dense, Dropout.
Also contains their functional aliases.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from six.moves import xrange # pylint: disable=redefined-builtin
import numpy as np
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import nn
from tensorflow.python.ops import standard_ops
from tensorflow.python.ops import variable_scope as vs
from tensorflow.python.layers import base
from tensorflow.python.layers import utils
class Dense(base.Layer):
"""Densely-connected layer class.
This layer implements the operation:
`outputs = activation(inputs.kernel + bias)`
Where `activation` is the activation function passed as the `activation`
argument (if not `None`), `kernel` is a weights matrix created by the layer,
and `bias` is a bias vector created by the layer
(only if `use_bias` is `True`).
Note: if the input to the layer has a rank greater than 2, then it is
flattened prior to the initial matrix multiply by `kernel`.
Arguments:
units: Integer or Long, dimensionality of the output space.
activation: Activation function (callable). Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer function for the weight matrix.
bias_initializer: Initializer function for the bias.
kernel_regularizer: Regularizer function for the weight matrix.
bias_regularizer: Regularizer function for the bias.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer. Layers with the same name will
share weights, but to avoid mistakes we require reuse=True in such cases.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Properties:
units: Python integer, dimensionality of the output space.
activation: Activation function (callable).
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer instance (or name) for the weight matrix.
bias_initializer: Initializer instance (or name) for the bias.
kernel_regularizer: Regularizer instance for the weight matrix (callable)
bias_regularizer: Regularizer instance for the bias (callable).
activity_regularizer: Regularizer instance for the output (callable)
kernel: Weight matrix (TensorFlow variable or tensor).
bias: Bias vector, if applicable (TensorFlow variable or tensor).
"""
def __init__(self, units,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
**kwargs):
super(Dense, self).__init__(trainable=trainable, name=name, **kwargs)
self.units = units
self.activation = activation
self.use_bias = use_bias
self.kernel_initializer = kernel_initializer
self.bias_initializer = bias_initializer
self.kernel_regularizer = kernel_regularizer
self.bias_regularizer = bias_regularizer
self.activity_regularizer = activity_regularizer
self.input_spec = base.InputSpec(min_ndim=2)
def build(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
if input_shape[-1].value is None:
raise ValueError('The last dimension of the inputs to `Dense` '
'should be defined. Found `None`.')
self.input_spec = base.InputSpec(min_ndim=2,
axes={-1: input_shape[-1].value})
self.kernel = self.add_variable('kernel',
shape=[input_shape[-1].value, self.units],
initializer=self.kernel_initializer,
regularizer=self.kernel_regularizer,
dtype=self.dtype,
trainable=True)
if self.use_bias:
self.bias = self.add_variable('bias',
shape=[self.units,],
initializer=self.bias_initializer,
regularizer=self.bias_regularizer,
dtype=self.dtype,
trainable=True)
else:
self.bias = None
self.built = True
def call(self, inputs):
inputs = ops.convert_to_tensor(inputs, dtype=self.dtype)
shape = inputs.get_shape().as_list()
output_shape = shape[:-1] + [self.units]
if len(output_shape) > 2:
# Broadcasting is required for the inputs.
outputs = standard_ops.tensordot(inputs, self.kernel, [[len(shape) - 1],
[0]])
# Reshape the output back to the original ndim of the input.
outputs.set_shape(output_shape)
else:
outputs = standard_ops.matmul(inputs, self.kernel)
if self.use_bias:
outputs = nn.bias_add(outputs, self.bias)
if self.activation is not None:
return self.activation(outputs) # pylint: disable=not-callable
return outputs
def _compute_output_shape(self, input_shape):
input_shape = tensor_shape.TensorShape(input_shape)
input_shape = input_shape.with_rank_at_least(2)
if input_shape[-1].value is None:
raise ValueError(
'The innermost dimension of input_shape must be defined, but saw: %s'
% input_shape)
return input_shape[:-1].concatenate(self.units)
def dense(
inputs, units,
activation=None,
use_bias=True,
kernel_initializer=None,
bias_initializer=init_ops.zeros_initializer(),
kernel_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
trainable=True,
name=None,
reuse=None):
"""Functional interface for the densely-connected layer.
This layer implements the operation:
`outputs = activation(inputs.kernel + bias)`
Where `activation` is the activation function passed as the `activation`
argument (if not `None`), `kernel` is a weights matrix created by the layer,
and `bias` is a bias vector created by the layer
(only if `use_bias` is `True`).
Note: if the `inputs` tensor has a rank greater than 2, then it is
flattened prior to the initial matrix multiply by `kernel`.
Arguments:
inputs: Tensor input.
units: Integer or Long, dimensionality of the output space.
activation: Activation function (callable). Set it to None to maintain a
linear activation.
use_bias: Boolean, whether the layer uses a bias.
kernel_initializer: Initializer function for the weight matrix.
bias_initializer: Initializer function for the bias.
kernel_regularizer: Regularizer function for the weight matrix.
bias_regularizer: Regularizer function for the bias.
activity_regularizer: Regularizer function for the output.
trainable: Boolean, if `True` also add variables to the graph collection
`GraphKeys.TRAINABLE_VARIABLES` (see `tf.Variable`).
name: String, the name of the layer.
reuse: Boolean, whether to reuse the weights of a previous layer
by the same name.
Returns:
Output tensor.
"""
layer = Dense(units,
activation=activation,
use_bias=use_bias,
kernel_initializer=kernel_initializer,
bias_initializer=bias_initializer,
kernel_regularizer=kernel_regularizer,
bias_regularizer=bias_regularizer,
activity_regularizer=activity_regularizer,
trainable=trainable,
name=name,
dtype=inputs.dtype.base_dtype,
_scope=name,
_reuse=reuse)
return layer.apply(inputs)
class Dropout(base.Layer):
"""Applies Dropout to the input.
Dropout consists in randomly setting a fraction `rate` of input units to 0
at each update during training time, which helps prevent overfitting.
The units that are kept are scaled by `1 / (1 - rate)`, so that their
sum is unchanged at training time and inference time.
Arguments:
rate: The dropout rate, between 0 and 1. E.g. `rate=0.1` would drop out
10% of input units.
noise_shape: 1D tensor of type `int32` representing the shape of the
binary dropout mask that will be multiplied with the input.
For instance, if your inputs have shape
`(batch_size, timesteps, features)`, and you want the dropout mask
to be the same for all timesteps, you can use
`noise_shape=[batch_size, 1, features]`.
seed: A Python integer. Used to create random seeds. See
@{tf.set_random_seed}.
for behavior.
name: The name of the layer (string).
"""
def __init__(self, rate=0.5,
noise_shape=None,
seed=None,
name=None,
**kwargs):
super(Dropout, self).__init__(name=name, **kwargs)
self.rate = min(1., max(0., rate))
self.noise_shape = noise_shape
self.seed = seed
def _get_noise_shape(self, _):
# Subclasses of `Dropout` may implement `_get_noise_shape(self, inputs)`,
# which will override `self.noise_shape`, and allows for custom noise
# shapes with dynamically sized inputs.
return self.noise_shape
def call(self, inputs, training=False):
def dropped_inputs():
return nn.dropout(inputs, 1 - self.rate,
noise_shape=self._get_noise_shape(inputs),
seed=self.seed)
return utils.smart_cond(training,
dropped_inputs,
lambda: array_ops.identity(inputs))
def dropout(inputs,
rate=0.5,
noise_shape=None,
seed=None,
training=False,
name=None):
"""Applies Dropout to the input.
Dropout consists in randomly setting a fraction `rate` of input units to 0
at each update during training time, which helps prevent overfitting.
The units that are kept are scaled by `1 / (1 - rate)`, so that their
sum is unchanged at training time and inference time.
Arguments:
inputs: Tensor input.
rate: The dropout rate, between 0 and 1. E.g. "rate=0.1" would drop out
10% of input units.
noise_shape: 1D tensor of type `int32` representing the shape of the
binary dropout mask that will be multiplied with the input.
For instance, if your inputs have shape
`(batch_size, timesteps, features)`, and you want the dropout mask
to be the same for all timesteps, you can use
`noise_shape=[batch_size, 1, features]`.
seed: A Python integer. Used to create random seeds. See
@{tf.set_random_seed}
for behavior.
training: Either a Python boolean, or a TensorFlow boolean scalar tensor
(e.g. a placeholder). Whether to return the output in training mode
(apply dropout) or in inference mode (return the input untouched).
name: The name of the layer (string).
Returns:
Output tensor.
"""
layer = Dropout(rate, noise_shape=noise_shape, seed=seed, name=name)
return layer.apply(inputs, training=training)
# Aliases
FullyConnected = Dense
fully_connected = dense
| mit |
Wuguanping/Server_Manage_Plugin | Openstack_Plugin/ironic-plugin-pike/ironic/drivers/modules/ucs/management.py | 4 | 5632 | # Copyright 2015, Cisco Systems.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Ironic Cisco UCSM interfaces.
Provides Management interface operations of servers managed by Cisco UCSM using
PyUcs Sdk.
"""
from oslo_log import log as logging
from oslo_utils import importutils
from ironic.common import boot_devices
from ironic.common import exception
from ironic.common.i18n import _
from ironic.drivers import base
from ironic.drivers.modules.ucs import helper as ucs_helper
ucs_error = importutils.try_import('UcsSdk.utils.exception')
ucs_mgmt = importutils.try_import('UcsSdk.utils.management')
LOG = logging.getLogger(__name__)
UCS_TO_IRONIC_BOOT_DEVICE = {
'storage': boot_devices.DISK,
'disk': boot_devices.DISK,
'pxe': boot_devices.PXE,
'read-only-vm': boot_devices.CDROM,
'cdrom': boot_devices.CDROM
}
class UcsManagement(base.ManagementInterface):
def get_properties(self):
return ucs_helper.COMMON_PROPERTIES
def validate(self, task):
"""Check that 'driver_info' contains UCSM login credentials.
Validates whether the 'driver_info' property of the supplied
task's node contains the required credentials information.
:param task: a task from TaskManager.
:raises: MissingParameterValue if a required parameter is missing
"""
ucs_helper.parse_driver_info(task.node)
def get_supported_boot_devices(self, task):
"""Get a list of the supported boot devices.
:param task: a task from TaskManager.
:returns: A list with the supported boot devices defined
in :mod:`ironic.common.boot_devices`.
"""
return list(set(UCS_TO_IRONIC_BOOT_DEVICE.values()))
@ucs_helper.requires_ucs_client
def set_boot_device(self, task, device, persistent=False, helper=None):
"""Set the boot device for the task's node.
Set the boot device to use on next reboot of the node.
:param task: a task from TaskManager.
:param device: the boot device, one of 'PXE, DISK or CDROM'.
:param persistent: Boolean value. True if the boot device will
persist to all future boots, False if not.
Default: False. Ignored by this driver.
:param helper: ucs helper instance.
:raises: MissingParameterValue if required CiscoDriver parameters
are missing.
:raises: UcsOperationError on error from UCS client.
setting the boot device.
"""
try:
mgmt_handle = ucs_mgmt.BootDeviceHelper(helper)
mgmt_handle.set_boot_device(device, persistent)
except ucs_error.UcsOperationError as ucs_exception:
LOG.error("%(driver)s: client failed to set boot device "
"%(device)s for node %(uuid)s.",
{'driver': task.node.driver, 'device': device,
'uuid': task.node.uuid})
operation = _('setting boot device')
raise exception.UcsOperationError(operation=operation,
error=ucs_exception,
node=task.node.uuid)
LOG.debug("Node %(uuid)s set to boot from %(device)s.",
{'uuid': task.node.uuid, 'device': device})
@ucs_helper.requires_ucs_client
def get_boot_device(self, task, helper=None):
"""Get the current boot device for the task's node.
Provides the current boot device of the node.
:param task: a task from TaskManager.
:param helper: ucs helper instance.
:returns: a dictionary containing:
:boot_device: the boot device, one of
:mod:`ironic.common.boot_devices` [PXE, DISK, CDROM] or
None if it is unknown.
:persistent: Whether the boot device will persist to all
future boots or not, None if it is unknown.
:raises: MissingParameterValue if a required UCS parameter is missing.
:raises: UcsOperationError on error from UCS client, while setting the
boot device.
"""
try:
mgmt_handle = ucs_mgmt.BootDeviceHelper(helper)
boot_device = mgmt_handle.get_boot_device()
except ucs_error.UcsOperationError as ucs_exception:
LOG.error("%(driver)s: client failed to get boot device for "
"node %(uuid)s.",
{'driver': task.node.driver, 'uuid': task.node.uuid})
operation = _('getting boot device')
raise exception.UcsOperationError(operation=operation,
error=ucs_exception,
node=task.node.uuid)
boot_device['boot_device'] = (
UCS_TO_IRONIC_BOOT_DEVICE[boot_device['boot_device']])
return boot_device
def get_sensors_data(self, task):
"""Get sensors data.
Not implemented by this driver.
:param task: a TaskManager instance.
"""
raise NotImplementedError()
| apache-2.0 |
apophys/freeipa | ipaclient/remote_plugins/2_114/misc.py | 32 | 2753 | #
# Copyright (C) 2016 FreeIPA Contributors see COPYING for license
#
# pylint: disable=unused-import
import six
from . import Command, Method, Object
from ipalib import api, parameters, output
from ipalib.parameters import DefaultFrom
from ipalib.plugable import Registry
from ipalib.text import _
from ipapython.dn import DN
from ipapython.dnsutil import DNSName
if six.PY3:
unicode = str
__doc__ = _("""
Misc plug-ins
""")
register = Registry()
@register()
class env(Command):
__doc__ = _("Show environment variables.")
takes_args = (
parameters.Str(
'variables',
required=False,
multivalue=True,
),
)
takes_options = (
parameters.Flag(
'server',
required=False,
doc=_(u'Forward to server instead of running locally'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=True,
autofill=True,
),
)
has_output = (
output.Output(
'result',
dict,
doc=_(u'Dictionary mapping variable name to value'),
),
output.Output(
'total',
int,
doc=_(u'Total number of variables env (>= count)'),
),
output.Output(
'count',
int,
doc=_(u'Number of variables returned (<= total)'),
),
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
)
@register()
class plugins(Command):
__doc__ = _("Show all loaded plugins.")
takes_options = (
parameters.Flag(
'server',
required=False,
doc=_(u'Forward to server instead of running locally'),
default=False,
autofill=True,
),
parameters.Flag(
'all',
doc=_(u'retrieve and print all attributes from the server. Affects command output.'),
exclude=('webui',),
default=True,
autofill=True,
),
)
has_output = (
output.Output(
'result',
dict,
doc=_(u'Dictionary mapping plugin names to bases'),
),
output.Output(
'count',
int,
doc=_(u'Number of plugins loaded'),
),
output.Output(
'summary',
(unicode, type(None)),
doc=_(u'User-friendly description of action performed'),
),
)
| gpl-3.0 |
vfulco/ansible | v1/ansible/runner/__init__.py | 77 | 69625 | # (c) 2012-2014, Michael DeHaan <michael.dehaan@gmail.com>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import multiprocessing
import signal
import os
import pwd
import Queue
import random
import traceback
import tempfile
import time
import collections
import socket
import base64
import sys
import pipes
import jinja2
import subprocess
import getpass
import ansible.constants as C
import ansible.inventory
from ansible import utils
from ansible.utils import template
from ansible.utils import check_conditional
from ansible.utils import string_functions
from ansible import errors
from ansible import module_common
import poller
import connection
from return_data import ReturnData
from ansible.callbacks import DefaultRunnerCallbacks, vv
from ansible.module_common import ModuleReplacer
from ansible.module_utils.splitter import split_args, unquote
from ansible.cache import FactCache
from ansible.utils import update_hash
module_replacer = ModuleReplacer(strip_comments=False)
try:
from hashlib import sha1
except ImportError:
from sha import sha as sha1
HAS_ATFORK=True
try:
from Crypto.Random import atfork
except ImportError:
HAS_ATFORK=False
multiprocessing_runner = None
OUTPUT_LOCKFILE = tempfile.TemporaryFile()
PROCESS_LOCKFILE = tempfile.TemporaryFile()
################################################
def _executor_hook(job_queue, result_queue, new_stdin):
# attempt workaround of https://github.com/newsapps/beeswithmachineguns/issues/17
# this function also not present in CentOS 6
if HAS_ATFORK:
atfork()
signal.signal(signal.SIGINT, signal.SIG_IGN)
while not job_queue.empty():
try:
host = job_queue.get(block=False)
return_data = multiprocessing_runner._executor(host, new_stdin)
result_queue.put(return_data)
except Queue.Empty:
pass
except:
traceback.print_exc()
class HostVars(dict):
''' A special view of vars_cache that adds values from the inventory when needed. '''
def __init__(self, vars_cache, inventory, vault_password=None):
self.vars_cache = vars_cache
self.inventory = inventory
self.lookup = {}
self.update(vars_cache)
self.vault_password = vault_password
def __getitem__(self, host):
if host not in self.lookup:
result = self.inventory.get_variables(host, vault_password=self.vault_password).copy()
result.update(self.vars_cache.get(host, {}))
self.lookup[host] = template.template('.', result, self.vars_cache)
return self.lookup[host]
class Runner(object):
''' core API interface to ansible '''
# see bin/ansible for how this is used...
def __init__(self,
host_list=C.DEFAULT_HOST_LIST, # ex: /etc/ansible/hosts, legacy usage
module_path=None, # ex: /usr/share/ansible
module_name=C.DEFAULT_MODULE_NAME, # ex: copy
module_args=C.DEFAULT_MODULE_ARGS, # ex: "src=/tmp/a dest=/tmp/b"
forks=C.DEFAULT_FORKS, # parallelism level
timeout=C.DEFAULT_TIMEOUT, # SSH timeout
pattern=C.DEFAULT_PATTERN, # which hosts? ex: 'all', 'acme.example.org'
remote_user=C.DEFAULT_REMOTE_USER, # ex: 'username'
remote_pass=C.DEFAULT_REMOTE_PASS, # ex: 'password123' or None if using key
remote_port=None, # if SSH on different ports
private_key_file=C.DEFAULT_PRIVATE_KEY_FILE, # if not using keys/passwords
background=0, # async poll every X seconds, else 0 for non-async
basedir=None, # directory of playbook, if applicable
setup_cache=None, # used to share fact data w/ other tasks
vars_cache=None, # used to store variables about hosts
transport=C.DEFAULT_TRANSPORT, # 'ssh', 'paramiko', 'local'
conditional='True', # run only if this fact expression evals to true
callbacks=None, # used for output
module_vars=None, # a playbooks internals thing
play_vars=None, #
play_file_vars=None, #
role_vars=None, #
role_params=None, #
default_vars=None, #
extra_vars=None, # extra vars specified with he playbook(s)
is_playbook=False, # running from playbook or not?
inventory=None, # reference to Inventory object
subset=None, # subset pattern
check=False, # don't make any changes, just try to probe for potential changes
diff=False, # whether to show diffs for template files that change
environment=None, # environment variables (as dict) to use inside the command
complex_args=None, # structured data in addition to module_args, must be a dict
error_on_undefined_vars=C.DEFAULT_UNDEFINED_VAR_BEHAVIOR, # ex. False
accelerate=False, # use accelerated connection
accelerate_ipv6=False, # accelerated connection w/ IPv6
accelerate_port=None, # port to use with accelerated connection
vault_pass=None,
run_hosts=None, # an optional list of pre-calculated hosts to run on
no_log=False, # option to enable/disable logging for a given task
run_once=False, # option to enable/disable host bypass loop for a given task
become=False, # whether to run privilege escalation or not
become_method=C.DEFAULT_BECOME_METHOD,
become_user=C.DEFAULT_BECOME_USER, # ex: 'root'
become_pass=C.DEFAULT_BECOME_PASS, # ex: 'password123' or None
become_exe=C.DEFAULT_BECOME_EXE, # ex: /usr/local/bin/sudo
):
# used to lock multiprocess inputs and outputs at various levels
self.output_lockfile = OUTPUT_LOCKFILE
self.process_lockfile = PROCESS_LOCKFILE
if not complex_args:
complex_args = {}
# storage & defaults
self.check = check
self.diff = diff
self.setup_cache = utils.default(setup_cache, lambda: ansible.cache.FactCache())
self.vars_cache = utils.default(vars_cache, lambda: collections.defaultdict(dict))
self.basedir = utils.default(basedir, lambda: os.getcwd())
self.callbacks = utils.default(callbacks, lambda: DefaultRunnerCallbacks())
self.generated_jid = str(random.randint(0, 999999999999))
self.transport = transport
self.inventory = utils.default(inventory, lambda: ansible.inventory.Inventory(host_list))
self.module_vars = utils.default(module_vars, lambda: {})
self.play_vars = utils.default(play_vars, lambda: {})
self.play_file_vars = utils.default(play_file_vars, lambda: {})
self.role_vars = utils.default(role_vars, lambda: {})
self.role_params = utils.default(role_params, lambda: {})
self.default_vars = utils.default(default_vars, lambda: {})
self.extra_vars = utils.default(extra_vars, lambda: {})
self.always_run = None
self.connector = connection.Connector(self)
self.conditional = conditional
self.delegate_to = None
self.module_name = module_name
self.forks = int(forks)
self.pattern = pattern
self.module_args = module_args
self.timeout = timeout
self.remote_user = remote_user
self.remote_pass = remote_pass
self.remote_port = remote_port
self.private_key_file = private_key_file
self.background = background
self.become = become
self.become_method = become_method
self.become_user_var = become_user
self.become_user = None
self.become_pass = become_pass
self.become_exe = become_exe
self.is_playbook = is_playbook
self.environment = environment
self.complex_args = complex_args
self.error_on_undefined_vars = error_on_undefined_vars
self.accelerate = accelerate
self.accelerate_port = accelerate_port
self.accelerate_ipv6 = accelerate_ipv6
self.callbacks.runner = self
self.omit_token = '__omit_place_holder__%s' % sha1(os.urandom(64)).hexdigest()
self.vault_pass = vault_pass
self.no_log = no_log
self.run_once = run_once
if self.transport == 'smart':
# If the transport is 'smart', check to see if certain conditions
# would prevent us from using ssh, and fallback to paramiko.
# 'smart' is the default since 1.2.1/1.3
self.transport = "ssh"
if sys.platform.startswith('darwin') and self.remote_pass:
# due to a current bug in sshpass on OSX, which can trigger
# a kernel panic even for non-privileged users, we revert to
# paramiko on that OS when a SSH password is specified
self.transport = "paramiko"
else:
# see if SSH can support ControlPersist if not use paramiko
cmd = subprocess.Popen(['ssh','-o','ControlPersist'], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(out, err) = cmd.communicate()
if "Bad configuration option" in err:
self.transport = "paramiko"
# save the original transport, in case it gets
# changed later via options like accelerate
self.original_transport = self.transport
# misc housekeeping
if subset and self.inventory._subset is None:
# don't override subset when passed from playbook
self.inventory.subset(subset)
# If we get a pre-built list of hosts to run on, from say a playbook, use them.
# Also where we will store the hosts to run on once discovered
self.run_hosts = run_hosts
if self.transport == 'local':
self.remote_user = pwd.getpwuid(os.geteuid())[0]
if module_path is not None:
for i in module_path.split(os.pathsep):
utils.plugins.module_finder.add_directory(i)
utils.plugins.push_basedir(self.basedir)
# ensure we are using unique tmp paths
random.seed()
# *****************************************************
def _complex_args_hack(self, complex_args, module_args):
"""
ansible-playbook both allows specifying key=value string arguments and complex arguments
however not all modules use our python common module system and cannot
access these. An example might be a Bash module. This hack allows users to still pass "args"
as a hash of simple scalars to those arguments and is short term. We could technically
just feed JSON to the module, but that makes it hard on Bash consumers. The way this is implemented
it does mean values in 'args' have LOWER priority than those on the key=value line, allowing
args to provide yet another way to have pluggable defaults.
"""
if complex_args is None:
return module_args
if not isinstance(complex_args, dict):
raise errors.AnsibleError("complex arguments are not a dictionary: %s" % complex_args)
for (k,v) in complex_args.iteritems():
if isinstance(v, basestring):
module_args = "%s=%s %s" % (k, pipes.quote(v), module_args)
return module_args
# *****************************************************
def _transfer_str(self, conn, tmp, name, data):
''' transfer string to remote file '''
if type(data) == dict:
data = utils.jsonify(data)
afd, afile = tempfile.mkstemp()
afo = os.fdopen(afd, 'w')
try:
if not isinstance(data, unicode):
#ensure the data is valid UTF-8
data.decode('utf-8')
else:
data = data.encode('utf-8')
afo.write(data)
except:
raise errors.AnsibleError("failure encoding into utf-8")
afo.flush()
afo.close()
remote = conn.shell.join_path(tmp, name)
try:
conn.put_file(afile, remote)
finally:
os.unlink(afile)
return remote
# *****************************************************
def _compute_environment_string(self, conn, inject=None):
''' what environment variables to use when running the command? '''
enviro = {}
if self.environment:
enviro = template.template(self.basedir, self.environment, inject, convert_bare=True)
enviro = utils.safe_eval(enviro)
if type(enviro) != dict:
raise errors.AnsibleError("environment must be a dictionary, received %s" % enviro)
return conn.shell.env_prefix(**enviro)
# *****************************************************
def _compute_delegate(self, password, remote_inject):
""" Build a dictionary of all attributes for the delegate host """
delegate = {}
# allow delegated host to be templated
delegate['inject'] = remote_inject.copy()
# set any interpreters
interpreters = []
for i in delegate['inject']:
if i.startswith("ansible_") and i.endswith("_interpreter"):
interpreters.append(i)
for i in interpreters:
del delegate['inject'][i]
port = C.DEFAULT_REMOTE_PORT
# get the vars for the delegate by its name
try:
this_info = delegate['inject']['hostvars'][self.delegate_to]
except:
# make sure the inject is empty for non-inventory hosts
this_info = {}
# get the real ssh_address for the delegate
# and allow ansible_ssh_host to be templated
delegate['ssh_host'] = template.template(
self.basedir,
this_info.get('ansible_ssh_host', self.delegate_to),
this_info,
fail_on_undefined=True
)
delegate['port'] = this_info.get('ansible_ssh_port', port)
delegate['user'] = self._compute_delegate_user(self.delegate_to, delegate['inject'])
delegate['pass'] = this_info.get('ansible_ssh_pass', password)
delegate['private_key_file'] = this_info.get('ansible_ssh_private_key_file', self.private_key_file)
delegate['transport'] = this_info.get('ansible_connection', self.transport)
delegate['become_pass'] = this_info.get('ansible_become_pass', this_info.get('ansible_ssh_pass', self.become_pass))
# Last chance to get private_key_file from global variables.
# this is useful if delegated host is not defined in the inventory
if delegate['private_key_file'] is None:
delegate['private_key_file'] = remote_inject.get('ansible_ssh_private_key_file', None)
if delegate['private_key_file'] is not None:
delegate['private_key_file'] = os.path.expanduser(delegate['private_key_file'])
for i in this_info:
if i.startswith("ansible_") and i.endswith("_interpreter"):
delegate['inject'][i] = this_info[i]
return delegate
def _compute_delegate_user(self, host, inject):
""" Calculate the remote user based on an order of preference """
# inventory > playbook > original_host
actual_user = inject.get('ansible_ssh_user', self.remote_user)
thisuser = None
try:
if host in inject['hostvars']:
if inject['hostvars'][host].get('ansible_ssh_user'):
# user for delegate host in inventory
thisuser = inject['hostvars'][host].get('ansible_ssh_user')
else:
# look up the variables for the host directly from inventory
host_vars = self.inventory.get_variables(host, vault_password=self.vault_pass)
if 'ansible_ssh_user' in host_vars:
thisuser = host_vars['ansible_ssh_user']
except errors.AnsibleError, e:
# the hostname was not found in the inventory, so
# we just ignore this and try the next method
pass
if thisuser is None and self.remote_user:
# user defined by play/runner
thisuser = self.remote_user
if thisuser is not None:
actual_user = thisuser
else:
# fallback to the inventory user of the play host
#actual_user = inject.get('ansible_ssh_user', actual_user)
actual_user = inject.get('ansible_ssh_user', self.remote_user)
return actual_user
def _count_module_args(self, args, allow_dupes=False):
'''
Count the number of k=v pairs in the supplied module args. This is
basically a specialized version of parse_kv() from utils with a few
minor changes.
'''
options = {}
if args is not None:
try:
vargs = split_args(args)
except Exception, e:
if "unbalanced jinja2 block or quotes" in str(e):
raise errors.AnsibleError("error parsing argument string '%s', try quoting the entire line." % args)
else:
raise
for x in vargs:
quoted = x.startswith('"') and x.endswith('"') or x.startswith("'") and x.endswith("'")
if "=" in x and not quoted:
k, v = x.split("=",1)
is_shell_module = self.module_name in ('command', 'shell')
is_shell_param = k in ('creates', 'removes', 'chdir', 'executable')
if k in options and not allow_dupes:
if not(is_shell_module and not is_shell_param):
raise errors.AnsibleError("a duplicate parameter was found in the argument string (%s)" % k)
if is_shell_module and is_shell_param or not is_shell_module:
options[k] = v
return len(options)
# *****************************************************
def _execute_module(self, conn, tmp, module_name, args,
async_jid=None, async_module=None, async_limit=None, inject=None, persist_files=False, complex_args=None, delete_remote_tmp=True):
''' transfer and run a module along with its arguments on the remote side'''
# hack to support fireball mode
if module_name == 'fireball':
args = "%s password=%s" % (args, base64.b64encode(str(utils.key_for_hostname(conn.host))))
if 'port' not in args:
args += " port=%s" % C.ZEROMQ_PORT
(
module_style,
shebang,
module_data
) = self._configure_module(conn, module_name, args, inject, complex_args)
# a remote tmp path may be necessary and not already created
if self._late_needs_tmp_path(conn, tmp, module_style):
tmp = self._make_tmp_path(conn)
remote_module_path = conn.shell.join_path(tmp, module_name)
if (module_style != 'new'
or async_jid is not None
or not conn.has_pipelining
or not C.ANSIBLE_SSH_PIPELINING
or C.DEFAULT_KEEP_REMOTE_FILES
or self.become_method == 'su'):
self._transfer_str(conn, tmp, module_name, module_data)
environment_string = self._compute_environment_string(conn, inject)
if "tmp" in tmp and (self.become and self.become_user != 'root'):
# deal with possible umask issues once you become another user
self._remote_chmod(conn, 'a+r', remote_module_path, tmp)
cmd = ""
in_data = None
if module_style != 'new':
if 'CHECKMODE=True' in args:
# if module isn't using AnsibleModuleCommon infrastructure we can't be certain it knows how to
# do --check mode, so to be safe we will not run it.
return ReturnData(conn=conn, result=dict(skipped=True, msg="cannot yet run check mode against old-style modules"))
elif 'NO_LOG' in args:
return ReturnData(conn=conn, result=dict(skipped=True, msg="cannot use no_log: with old-style modules"))
args = template.template(self.basedir, args, inject)
# decide whether we need to transfer JSON or key=value
argsfile = None
if module_style == 'non_native_want_json':
if complex_args:
complex_args.update(utils.parse_kv(args))
argsfile = self._transfer_str(conn, tmp, 'arguments', utils.jsonify(complex_args))
else:
argsfile = self._transfer_str(conn, tmp, 'arguments', utils.jsonify(utils.parse_kv(args)))
else:
argsfile = self._transfer_str(conn, tmp, 'arguments', args)
if self.become and self.become_user != 'root':
# deal with possible umask issues once become another user
self._remote_chmod(conn, 'a+r', argsfile, tmp)
if async_jid is None:
cmd = "%s %s" % (remote_module_path, argsfile)
else:
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module, argsfile]])
else:
if async_jid is None:
if conn.has_pipelining and C.ANSIBLE_SSH_PIPELINING and not C.DEFAULT_KEEP_REMOTE_FILES and not self.become_method == 'su':
in_data = module_data
else:
cmd = "%s" % (remote_module_path)
else:
cmd = " ".join([str(x) for x in [remote_module_path, async_jid, async_limit, async_module]])
if not shebang:
raise errors.AnsibleError("module is missing interpreter line")
rm_tmp = None
if "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
if not self.become or self.become_user == 'root':
# not sudoing or sudoing to root, so can cleanup files in the same step
rm_tmp = tmp
cmd = conn.shell.build_module_command(environment_string, shebang, cmd, rm_tmp)
cmd = cmd.strip()
sudoable = True
if module_name == "accelerate":
# always run the accelerate module as the user
# specified in the play, not the become_user
sudoable = False
res = self._low_level_exec_command(conn, cmd, tmp, become=self.become, sudoable=sudoable, in_data=in_data)
if "tmp" in tmp and not C.DEFAULT_KEEP_REMOTE_FILES and not persist_files and delete_remote_tmp:
if self.become and self.become_user != 'root':
# not becoming root, so maybe can't delete files as that other user
# have to clean up temp files as original user in a second step
cmd2 = conn.shell.remove(tmp, recurse=True)
self._low_level_exec_command(conn, cmd2, tmp, sudoable=False)
data = utils.parse_json(res['stdout'], from_remote=True, no_exceptions=True)
if 'parsed' in data and data['parsed'] == False:
data['msg'] += res['stderr']
return ReturnData(conn=conn, result=data)
# *****************************************************
def _executor(self, host, new_stdin):
''' handler for multiprocessing library '''
try:
fileno = sys.stdin.fileno()
except ValueError:
fileno = None
try:
self._new_stdin = new_stdin
if not new_stdin and fileno is not None:
try:
self._new_stdin = os.fdopen(os.dup(fileno))
except OSError, e:
# couldn't dupe stdin, most likely because it's
# not a valid file descriptor, so we just rely on
# using the one that was passed in
pass
exec_rc = self._executor_internal(host, new_stdin)
if type(exec_rc) != ReturnData:
raise Exception("unexpected return type: %s" % type(exec_rc))
# redundant, right?
if not exec_rc.comm_ok:
self.callbacks.on_unreachable(host, exec_rc.result)
return exec_rc
except errors.AnsibleError, ae:
msg = str(ae)
self.callbacks.on_unreachable(host, msg)
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
except Exception:
msg = traceback.format_exc()
self.callbacks.on_unreachable(host, msg)
return ReturnData(host=host, comm_ok=False, result=dict(failed=True, msg=msg))
# *****************************************************
def get_combined_cache(self):
# merge the VARS and SETUP caches for this host
combined_cache = self.setup_cache.copy()
return utils.merge_hash(combined_cache, self.vars_cache)
def get_inject_vars(self, host):
host_variables = self.inventory.get_variables(host, vault_password=self.vault_pass)
combined_cache = self.get_combined_cache()
# use combined_cache and host_variables to template the module_vars
# we update the inject variables with the data we're about to template
# since some of the variables we'll be replacing may be contained there too
module_vars_inject = utils.combine_vars(host_variables, combined_cache.get(host, {}))
module_vars_inject = utils.combine_vars(self.module_vars, module_vars_inject)
module_vars = template.template(self.basedir, self.module_vars, module_vars_inject)
# remove bad variables from the module vars, which may be in there due
# the way role declarations are specified in playbooks
if 'tags' in module_vars:
del module_vars['tags']
if 'when' in module_vars:
del module_vars['when']
# start building the dictionary of injected variables
inject = {}
# default vars are the lowest priority
inject = utils.combine_vars(inject, self.default_vars)
# next come inventory variables for the host
inject = utils.combine_vars(inject, host_variables)
# then the setup_cache which contains facts gathered
inject = utils.combine_vars(inject, self.setup_cache.get(host, {}))
# next come variables from vars and vars files
inject = utils.combine_vars(inject, self.play_vars)
inject = utils.combine_vars(inject, self.play_file_vars)
# next come variables from role vars/main.yml files
inject = utils.combine_vars(inject, self.role_vars)
# then come the module variables
inject = utils.combine_vars(inject, module_vars)
# followed by vars_cache things (set_fact, include_vars, and
# vars_files which had host-specific templating done)
inject = utils.combine_vars(inject, self.vars_cache.get(host, {}))
# role parameters next
inject = utils.combine_vars(inject, self.role_params)
# and finally -e vars are the highest priority
inject = utils.combine_vars(inject, self.extra_vars)
# and then special vars
inject.setdefault('ansible_ssh_user', self.remote_user)
inject['group_names'] = host_variables.get('group_names', [])
inject['groups'] = self.inventory.groups_list()
inject['vars'] = self.module_vars
inject['defaults'] = self.default_vars
inject['environment'] = self.environment
inject['playbook_dir'] = os.path.abspath(self.basedir)
inject['omit'] = self.omit_token
inject['combined_cache'] = combined_cache
return inject
def _executor_internal(self, host, new_stdin):
''' executes any module one or more times '''
# We build the proper injected dictionary for all future
# templating operations in this run
inject = self.get_inject_vars(host)
# Then we selectively merge some variable dictionaries down to a
# single dictionary, used to template the HostVars for this host
temp_vars = self.inventory.get_variables(host, vault_password=self.vault_pass)
temp_vars = utils.combine_vars(temp_vars, inject['combined_cache'] )
temp_vars = utils.combine_vars(temp_vars, {'groups': inject['groups']})
temp_vars = utils.combine_vars(temp_vars, self.play_vars)
temp_vars = utils.combine_vars(temp_vars, self.play_file_vars)
temp_vars = utils.combine_vars(temp_vars, self.extra_vars)
hostvars = HostVars(temp_vars, self.inventory, vault_password=self.vault_pass)
# and we save the HostVars in the injected dictionary so they
# may be referenced from playbooks/templates
inject['hostvars'] = hostvars
host_connection = inject.get('ansible_connection', self.transport)
if host_connection in [ 'paramiko', 'ssh', 'accelerate' ]:
port = hostvars.get('ansible_ssh_port', self.remote_port)
if port is None:
port = C.DEFAULT_REMOTE_PORT
else:
# fireball, local, etc
port = self.remote_port
if self.inventory.basedir() is not None:
inject['inventory_dir'] = self.inventory.basedir()
if self.inventory.src() is not None:
inject['inventory_file'] = self.inventory.src()
# could be already set by playbook code
inject.setdefault('ansible_version', utils.version_info(gitinfo=False))
# allow with_foo to work in playbooks...
items = None
items_plugin = self.module_vars.get('items_lookup_plugin', None)
if items_plugin is not None and items_plugin in utils.plugins.lookup_loader:
basedir = self.basedir
if '_original_file' in inject:
basedir = os.path.dirname(inject['_original_file'])
filesdir = os.path.join(basedir, '..', 'files')
if os.path.exists(filesdir):
basedir = filesdir
try:
items_terms = self.module_vars.get('items_lookup_terms', '')
items_terms = template.template(basedir, items_terms, inject)
items = utils.plugins.lookup_loader.get(items_plugin, runner=self, basedir=basedir).run(items_terms, inject=inject)
except errors.AnsibleUndefinedVariable, e:
if 'has no attribute' in str(e):
# the undefined variable was an attribute of a variable that does
# exist, so try and run this through the conditional check to see
# if the user wanted to skip something on being undefined
if utils.check_conditional(self.conditional, self.basedir, inject, fail_on_undefined=True):
# the conditional check passed, so we have to fail here
raise
else:
# the conditional failed, so we skip this task
result = utils.jsonify(dict(changed=False, skipped=True))
self.callbacks.on_skipped(host, None)
return ReturnData(host=host, result=result)
except errors.AnsibleError, e:
raise
except Exception, e:
raise errors.AnsibleError("Unexpected error while executing task: %s" % str(e))
# strip out any jinja2 template syntax within
# the data returned by the lookup plugin
items = utils._clean_data_struct(items, from_remote=True)
if items is None:
items = []
else:
if type(items) != list:
raise errors.AnsibleError("lookup plugins have to return a list: %r" % items)
if len(items) and utils.is_list_of_strings(items) and self.module_name in ( 'apt', 'yum', 'pkgng', 'zypper', 'dnf' ):
# hack for apt, yum, and pkgng so that with_items maps back into a single module call
use_these_items = []
for x in items:
inject['item'] = x
if not self.conditional or utils.check_conditional(self.conditional, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
use_these_items.append(x)
inject['item'] = ",".join(use_these_items)
items = None
def _safe_template_complex_args(args, inject):
# Ensure the complex args here are a dictionary, but
# first template them if they contain a variable
returned_args = args
if isinstance(args, basestring):
# If the complex_args were evaluated to a dictionary and there are
# more keys in the templated version than the evaled version, some
# param inserted additional keys (the template() call also runs
# safe_eval on the var if it looks like it's a datastructure). If the
# evaled_args are not a dict, it's most likely a whole variable (ie.
# args: {{var}}), in which case there's no way to detect the proper
# count of params in the dictionary.
templated_args = template.template(self.basedir, args, inject, convert_bare=True)
evaled_args = utils.safe_eval(args)
if isinstance(evaled_args, dict) and len(evaled_args) > 0 and len(evaled_args) != len(templated_args):
raise errors.AnsibleError("a variable tried to insert extra parameters into the args for this task")
# set the returned_args to the templated_args
returned_args = templated_args
# and a final check to make sure the complex args are a dict
if returned_args is not None and not isinstance(returned_args, dict):
raise errors.AnsibleError("args must be a dictionary, received %s" % returned_args)
return returned_args
# logic to decide how to run things depends on whether with_items is used
if items is None:
complex_args = _safe_template_complex_args(self.complex_args, inject)
return self._executor_internal_inner(host, self.module_name, self.module_args, inject, port, complex_args=complex_args)
elif len(items) > 0:
# executing using with_items, so make multiple calls
# TODO: refactor
if self.background > 0:
raise errors.AnsibleError("lookup plugins (with_*) cannot be used with async tasks")
all_comm_ok = True
all_changed = False
all_failed = False
results = []
for x in items:
# use a fresh inject for each item
this_inject = inject.copy()
this_inject['item'] = x
complex_args = _safe_template_complex_args(self.complex_args, this_inject)
result = self._executor_internal_inner(
host,
self.module_name,
self.module_args,
this_inject,
port,
complex_args=complex_args
)
if 'stdout' in result.result and 'stdout_lines' not in result.result:
result.result['stdout_lines'] = result.result['stdout'].splitlines()
results.append(result.result)
if result.comm_ok == False:
all_comm_ok = False
all_failed = True
break
for x in results:
if x.get('changed') == True:
all_changed = True
if (x.get('failed') == True) or ('failed_when_result' in x and [x['failed_when_result']] or [('rc' in x) and (x['rc'] != 0)])[0]:
all_failed = True
break
msg = 'All items completed'
if all_failed:
msg = "One or more items failed."
rd_result = dict(failed=all_failed, changed=all_changed, results=results, msg=msg)
if not all_failed:
del rd_result['failed']
return ReturnData(host=host, comm_ok=all_comm_ok, result=rd_result)
else:
self.callbacks.on_skipped(host, None)
return ReturnData(host=host, comm_ok=True, result=dict(changed=False, skipped=True))
# *****************************************************
def _executor_internal_inner(self, host, module_name, module_args, inject, port, is_chained=False, complex_args=None):
''' decides how to invoke a module '''
# late processing of parameterized become_user (with_items,..)
if self.become_user_var is not None:
self.become_user = template.template(self.basedir, self.become_user_var, inject)
# module_name may be dynamic (but cannot contain {{ ansible_ssh_user }})
module_name = template.template(self.basedir, module_name, inject)
if module_name in utils.plugins.action_loader:
if self.background != 0:
raise errors.AnsibleError("async mode is not supported with the %s module" % module_name)
handler = utils.plugins.action_loader.get(module_name, self)
elif self.background == 0:
handler = utils.plugins.action_loader.get('normal', self)
else:
handler = utils.plugins.action_loader.get('async', self)
if type(self.conditional) != list:
self.conditional = [ self.conditional ]
for cond in self.conditional:
if not utils.check_conditional(cond, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
result = dict(changed=False, skipped=True)
if self.no_log:
result = utils.censor_unlogged_data(result)
self.callbacks.on_skipped(host, result)
else:
self.callbacks.on_skipped(host, inject.get('item',None))
return ReturnData(host=host, result=utils.jsonify(result))
if getattr(handler, 'setup', None) is not None:
handler.setup(module_name, inject)
conn = None
actual_host = inject.get('ansible_ssh_host', host)
# allow ansible_ssh_host to be templated
actual_host = template.template(self.basedir, actual_host, inject, fail_on_undefined=True)
actual_port = port
actual_user = inject.get('ansible_ssh_user', self.remote_user)
actual_pass = inject.get('ansible_ssh_pass', self.remote_pass)
actual_transport = inject.get('ansible_connection', self.transport)
actual_private_key_file = inject.get('ansible_ssh_private_key_file', self.private_key_file)
actual_private_key_file = template.template(self.basedir, actual_private_key_file, inject, fail_on_undefined=True)
self.become = utils.boolean(inject.get('ansible_become', inject.get('ansible_sudo', inject.get('ansible_su', self.become))))
self.become_user = inject.get('ansible_become_user', inject.get('ansible_sudo_user', inject.get('ansible_su_user',self.become_user)))
self.become_pass = inject.get('ansible_become_pass', inject.get('ansible_sudo_pass', inject.get('ansible_su_pass', self.become_pass)))
self.become_exe = inject.get('ansible_become_exe', inject.get('ansible_sudo_exe', self.become_exe))
self.become_method = inject.get('ansible_become_method', self.become_method)
# select default root user in case self.become requested
# but no user specified; happens e.g. in host vars when
# just ansible_become=True is specified
if self.become and self.become_user is None:
self.become_user = 'root'
if actual_private_key_file is not None:
actual_private_key_file = os.path.expanduser(actual_private_key_file)
if self.accelerate and actual_transport != 'local':
#Fix to get the inventory name of the host to accelerate plugin
if inject.get('ansible_ssh_host', None):
self.accelerate_inventory_host = host
else:
self.accelerate_inventory_host = None
# if we're using accelerated mode, force the
# transport to accelerate
actual_transport = "accelerate"
if not self.accelerate_port:
self.accelerate_port = C.ACCELERATE_PORT
actual_port = inject.get('ansible_ssh_port', port)
# the delegated host may have different SSH port configured, etc
# and we need to transfer those, and only those, variables
self.delegate_to = inject.get('delegate_to', None)
if self.delegate_to:
self.delegate_to = template.template(self.basedir, self.delegate_to, inject)
if self.delegate_to is not None:
delegate = self._compute_delegate(actual_pass, inject)
actual_transport = delegate['transport']
actual_host = delegate['ssh_host']
actual_port = delegate['port']
actual_user = delegate['user']
actual_pass = delegate['pass']
actual_private_key_file = delegate['private_key_file']
self.become_pass = delegate.get('become_pass',delegate.get('sudo_pass'))
inject = delegate['inject']
# set resolved delegate_to into inject so modules can call _remote_checksum
inject['delegate_to'] = self.delegate_to
# user/pass may still contain variables at this stage
actual_user = template.template(self.basedir, actual_user, inject)
try:
actual_pass = template.template(self.basedir, actual_pass, inject)
self.become_pass = template.template(self.basedir, self.become_pass, inject)
except:
# ignore password template errors, could be triggered by password charaters #10468
pass
# make actual_user available as __magic__ ansible_ssh_user variable
inject['ansible_ssh_user'] = actual_user
try:
if actual_transport == 'accelerate':
# for accelerate, we stuff both ports into a single
# variable so that we don't have to mangle other function
# calls just to accommodate this one case
actual_port = [actual_port, self.accelerate_port]
elif actual_port is not None:
actual_port = int(template.template(self.basedir, actual_port, inject))
except ValueError, e:
result = dict(failed=True, msg="FAILED: Configured port \"%s\" is not a valid port, expected integer" % actual_port)
return ReturnData(host=host, comm_ok=False, result=result)
try:
if self.delegate_to or host != actual_host:
delegate_host = host
else:
delegate_host = None
conn = self.connector.connect(actual_host, actual_port, actual_user, actual_pass, actual_transport, actual_private_key_file, delegate_host)
default_shell = getattr(conn, 'default_shell', '')
shell_type = inject.get('ansible_shell_type')
if not shell_type:
if default_shell:
shell_type = default_shell
else:
shell_type = os.path.basename(C.DEFAULT_EXECUTABLE)
shell_plugin = utils.plugins.shell_loader.get(shell_type)
if shell_plugin is None:
shell_plugin = utils.plugins.shell_loader.get('sh')
conn.shell = shell_plugin
except errors.AnsibleConnectionFailed, e:
result = dict(failed=True, msg="FAILED: %s" % str(e))
return ReturnData(host=host, comm_ok=False, result=result)
tmp = ''
# action plugins may DECLARE via TRANSFERS_FILES = True that they need a remote tmp path working dir
if self._early_needs_tmp_path(module_name, handler):
tmp = self._make_tmp_path(conn)
# allow module args to work as a dictionary
# though it is usually a string
if isinstance(module_args, dict):
module_args = utils.serialize_args(module_args)
# render module_args and complex_args templates
try:
# When templating module_args, we need to be careful to ensure
# that no variables inadvertently (or maliciously) add params
# to the list of args. We do this by counting the number of k=v
# pairs before and after templating.
num_args_pre = self._count_module_args(module_args, allow_dupes=True)
module_args = template.template(self.basedir, module_args, inject, fail_on_undefined=self.error_on_undefined_vars)
num_args_post = self._count_module_args(module_args)
if num_args_pre != num_args_post:
raise errors.AnsibleError("A variable inserted a new parameter into the module args. " + \
"Be sure to quote variables if they contain equal signs (for example: \"{{var}}\").")
# And we also make sure nothing added in special flags for things
# like the command/shell module (ie. #USE_SHELL)
if '#USE_SHELL' in module_args:
raise errors.AnsibleError("A variable tried to add #USE_SHELL to the module arguments.")
complex_args = template.template(self.basedir, complex_args, inject, fail_on_undefined=self.error_on_undefined_vars)
except jinja2.exceptions.UndefinedError, e:
raise errors.AnsibleUndefinedVariable("One or more undefined variables: %s" % str(e))
# filter omitted arguments out from complex_args
if complex_args:
complex_args = dict(filter(lambda x: x[1] != self.omit_token, complex_args.iteritems()))
# Filter omitted arguments out from module_args.
# We do this with split_args instead of parse_kv to ensure
# that things are not unquoted/requoted incorrectly
args = split_args(module_args)
final_args = []
for arg in args:
if '=' in arg:
k,v = arg.split('=', 1)
if unquote(v) != self.omit_token:
final_args.append(arg)
else:
# not a k=v param, append it
final_args.append(arg)
module_args = ' '.join(final_args)
result = handler.run(conn, tmp, module_name, module_args, inject, complex_args)
# Code for do until feature
until = self.module_vars.get('until', None)
if until is not None and result.comm_ok:
inject[self.module_vars.get('register')] = result.result
cond = template.template(self.basedir, until, inject, expand_lists=False)
if not utils.check_conditional(cond, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
retries = template.template(self.basedir, self.module_vars.get('retries'), inject, expand_lists=False)
delay = self.module_vars.get('delay')
for x in range(1, int(retries) + 1):
# template the delay, cast to float and sleep
delay = template.template(self.basedir, delay, inject, expand_lists=False)
delay = float(delay)
time.sleep(delay)
tmp = ''
if self._early_needs_tmp_path(module_name, handler):
tmp = self._make_tmp_path(conn)
result = handler.run(conn, tmp, module_name, module_args, inject, complex_args)
result.result['attempts'] = x
vv("Result from run %i is: %s" % (x, result.result))
inject[self.module_vars.get('register')] = result.result
cond = template.template(self.basedir, until, inject, expand_lists=False)
if utils.check_conditional(cond, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
break
if result.result['attempts'] == retries and not utils.check_conditional(cond, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars):
result.result['failed'] = True
result.result['msg'] = "Task failed as maximum retries was encountered"
else:
result.result['attempts'] = 0
conn.close()
if not result.comm_ok:
# connection or parsing errors...
self.callbacks.on_unreachable(host, result.result)
else:
data = result.result
# https://github.com/ansible/ansible/issues/4958
if hasattr(sys.stdout, "isatty"):
if "stdout" in data and sys.stdout.isatty():
if not string_functions.isprintable(data['stdout']):
data['stdout'] = ''.join(c for c in data['stdout'] if string_functions.isprintable(c))
if 'item' in inject:
result.result['item'] = inject['item']
result.result['invocation'] = dict(
module_args=module_args,
module_name=module_name
)
changed_when = self.module_vars.get('changed_when')
failed_when = self.module_vars.get('failed_when')
if (changed_when is not None or failed_when is not None) and self.background == 0:
register = self.module_vars.get('register')
if register is not None:
if 'stdout' in data:
data['stdout_lines'] = data['stdout'].splitlines()
inject[register] = data
# only run the final checks if the async_status has finished,
# or if we're not running an async_status check at all
if (module_name == 'async_status' and "finished" in data) or module_name != 'async_status':
if changed_when is not None and 'skipped' not in data:
data['changed'] = utils.check_conditional(changed_when, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars)
if failed_when is not None and 'skipped' not in data:
data['failed_when_result'] = data['failed'] = utils.check_conditional(failed_when, self.basedir, inject, fail_on_undefined=self.error_on_undefined_vars)
if is_chained:
# no callbacks
return result
if 'skipped' in data:
self.callbacks.on_skipped(host, inject.get('item',None))
if self.no_log:
data = utils.censor_unlogged_data(data)
if not result.is_successful():
ignore_errors = self.module_vars.get('ignore_errors', False)
self.callbacks.on_failed(host, data, ignore_errors)
else:
if self.diff:
self.callbacks.on_file_diff(conn.host, result.diff)
self.callbacks.on_ok(host, data)
return result
def _early_needs_tmp_path(self, module_name, handler):
''' detect if a tmp path should be created before the handler is called '''
if module_name in utils.plugins.action_loader:
return getattr(handler, 'TRANSFERS_FILES', False)
# other modules never need tmp path at early stage
return False
def _late_needs_tmp_path(self, conn, tmp, module_style):
if "tmp" in tmp:
# tmp has already been created
return False
if not conn.has_pipelining or not C.ANSIBLE_SSH_PIPELINING or C.DEFAULT_KEEP_REMOTE_FILES or self.become_method == 'su':
# tmp is necessary to store module source code
return True
if not conn.has_pipelining:
# tmp is necessary to store the module source code
# or we want to keep the files on the target system
return True
if module_style != "new":
# even when conn has pipelining, old style modules need tmp to store arguments
return True
return False
# *****************************************************
def _low_level_exec_command(self, conn, cmd, tmp, sudoable=False,
executable=None, become=False, in_data=None):
''' execute a command string over SSH, return the output '''
# this can be skipped with powershell modules when there is no analog to a Windows command (like chmod)
if cmd:
if executable is None:
executable = C.DEFAULT_EXECUTABLE
become_user = self.become_user
# compare connection user to (su|sudo)_user and disable if the same
# assume connection type is local if no user attribute
this_user = getattr(conn, 'user', getpass.getuser())
if (not become and this_user == become_user):
sudoable = False
become = False
rc, stdin, stdout, stderr = conn.exec_command(cmd,
tmp,
become_user=become_user,
sudoable=sudoable,
executable=executable,
in_data=in_data)
if type(stdout) not in [ str, unicode ]:
out = ''.join(stdout.readlines())
else:
out = stdout
if type(stderr) not in [ str, unicode ]:
err = ''.join(stderr.readlines())
else:
err = stderr
if rc is not None:
return dict(rc=rc, stdout=out, stderr=err)
else:
return dict(stdout=out, stderr=err)
return dict(rc=None, stdout='', stderr='')
# *****************************************************
def _remote_chmod(self, conn, mode, path, tmp, sudoable=False, become=False):
''' issue a remote chmod command '''
cmd = conn.shell.chmod(mode, path)
return self._low_level_exec_command(conn, cmd, tmp, sudoable=sudoable, become=become)
# *****************************************************
def _remote_expand_user(self, conn, path, tmp):
''' takes a remote path and performs tilde expansion on the remote host '''
if not path.startswith('~'):
return path
split_path = path.split(os.path.sep, 1)
expand_path = split_path[0]
if expand_path == '~':
if self.become and self.become_user:
expand_path = '~%s' % self.become_user
cmd = conn.shell.expand_user(expand_path)
data = self._low_level_exec_command(conn, cmd, tmp, sudoable=False, become=False)
initial_fragment = utils.last_non_blank_line(data['stdout'])
if not initial_fragment:
# Something went wrong trying to expand the path remotely. Return
# the original string
return path
if len(split_path) > 1:
return conn.shell.join_path(initial_fragment, *split_path[1:])
else:
return initial_fragment
# *****************************************************
def _remote_checksum(self, conn, tmp, path, inject):
''' takes a remote checksum and returns 1 if no file '''
# Lookup the python interp from the host or delegate
# host == inven_host when there is no delegate
host = inject['inventory_hostname']
if 'delegate_to' in inject:
delegate = inject['delegate_to']
if delegate:
# host == None when the delegate is not in inventory
host = None
# delegate set, check whether the delegate has inventory vars
delegate = template.template(self.basedir, delegate, inject)
if delegate in inject['hostvars']:
# host == delegate if we need to lookup the
# python_interpreter from the delegate's inventory vars
host = delegate
if host:
python_interp = inject['hostvars'][host].get('ansible_python_interpreter', 'python')
else:
python_interp = 'python'
cmd = conn.shell.checksum(path, python_interp)
#TODO: remove this horrible hack and find way to get checksum to work with other privilege escalation methods
if self.become_method == 'sudo':
sudoable = True
else:
sudoable = False
data = self._low_level_exec_command(conn, cmd, tmp, sudoable=sudoable)
data2 = utils.last_non_blank_line(data['stdout'])
try:
if data2 == '':
# this may happen if the connection to the remote server
# failed, so just return "INVALIDCHECKSUM" to avoid errors
return "INVALIDCHECKSUM"
else:
return data2.split()[0]
except IndexError:
sys.stderr.write("warning: Calculating checksum failed unusually, please report this to the list so it can be fixed\n")
sys.stderr.write("command: %s\n" % cmd)
sys.stderr.write("----\n")
sys.stderr.write("output: %s\n" % data)
sys.stderr.write("----\n")
# this will signal that it changed and allow things to keep going
return "INVALIDCHECKSUM"
# *****************************************************
def _make_tmp_path(self, conn):
''' make and return a temporary path on a remote box '''
basefile = 'ansible-tmp-%s-%s' % (time.time(), random.randint(0, 2**48))
use_system_tmp = False
if self.become and self.become_user != 'root':
use_system_tmp = True
tmp_mode = None
if self.remote_user != 'root' or (self.become and self.become_user != 'root'):
tmp_mode = 'a+rx'
cmd = conn.shell.mkdtemp(basefile, use_system_tmp, tmp_mode)
result = self._low_level_exec_command(conn, cmd, None, sudoable=False)
# error handling on this seems a little aggressive?
if result['rc'] != 0:
if result['rc'] == 5:
output = 'Authentication failure.'
elif result['rc'] == 255 and self.transport in ['ssh']:
if utils.VERBOSITY > 3:
output = 'SSH encountered an unknown error. The output was:\n%s' % (result['stdout']+result['stderr'])
else:
output = 'SSH encountered an unknown error during the connection. We recommend you re-run the command using -vvvv, which will enable SSH debugging output to help diagnose the issue'
elif 'No space left on device' in result['stderr']:
output = result['stderr']
else:
output = 'Authentication or permission failure. In some cases, you may have been able to authenticate and did not have permissions on the remote directory. Consider changing the remote temp path in ansible.cfg to a path rooted in "/tmp". Failed command was: %s, exited with result %d' % (cmd, result['rc'])
if 'stdout' in result and result['stdout'] != '':
output = output + ": %s" % result['stdout']
raise errors.AnsibleError(output)
rc = conn.shell.join_path(utils.last_non_blank_line(result['stdout']).strip(), '')
# Catch failure conditions, files should never be
# written to locations in /.
if rc == '/':
raise errors.AnsibleError('failed to resolve remote temporary directory from %s: `%s` returned empty string' % (basetmp, cmd))
return rc
# *****************************************************
def _remove_tmp_path(self, conn, tmp_path):
''' Remove a tmp_path. '''
if "-tmp-" in tmp_path:
cmd = conn.shell.remove(tmp_path, recurse=True)
self._low_level_exec_command(conn, cmd, None, sudoable=False)
# If we have gotten here we have a working ssh configuration.
# If ssh breaks we could leave tmp directories out on the remote system.
# *****************************************************
def _copy_module(self, conn, tmp, module_name, module_args, inject, complex_args=None):
''' transfer a module over SFTP, does not run it '''
(
module_style,
module_shebang,
module_data
) = self._configure_module(conn, module_name, module_args, inject, complex_args)
module_remote_path = conn.shell.join_path(tmp, module_name)
self._transfer_str(conn, tmp, module_name, module_data)
return (module_remote_path, module_style, module_shebang)
# *****************************************************
def _configure_module(self, conn, module_name, module_args, inject, complex_args=None):
''' find module and configure it '''
# Search module path(s) for named module.
module_suffixes = getattr(conn, 'default_suffixes', None)
module_path = utils.plugins.module_finder.find_plugin(module_name, module_suffixes)
if module_path is None:
module_path2 = utils.plugins.module_finder.find_plugin('ping', module_suffixes)
if module_path2 is not None:
raise errors.AnsibleFileNotFound("module %s not found in configured module paths" % (module_name))
else:
raise errors.AnsibleFileNotFound("module %s not found in configured module paths. Additionally, core modules are missing. If this is a checkout, run 'git submodule update --init --recursive' to correct this problem." % (module_name))
# insert shared code and arguments into the module
(module_data, module_style, module_shebang) = module_replacer.modify_module(
module_path, complex_args, module_args, inject
)
return (module_style, module_shebang, module_data)
# *****************************************************
def _parallel_exec(self, hosts):
''' handles mulitprocessing when more than 1 fork is required '''
manager = multiprocessing.Manager()
job_queue = manager.Queue()
for host in hosts:
job_queue.put(host)
result_queue = manager.Queue()
try:
fileno = sys.stdin.fileno()
except ValueError:
fileno = None
workers = []
for i in range(self.forks):
new_stdin = None
if fileno is not None:
try:
new_stdin = os.fdopen(os.dup(fileno))
except OSError, e:
# couldn't dupe stdin, most likely because it's
# not a valid file descriptor, so we just rely on
# using the one that was passed in
pass
prc = multiprocessing.Process(target=_executor_hook,
args=(job_queue, result_queue, new_stdin))
prc.start()
workers.append(prc)
try:
for worker in workers:
worker.join()
except KeyboardInterrupt:
for worker in workers:
worker.terminate()
worker.join()
results = []
try:
while not result_queue.empty():
results.append(result_queue.get(block=False))
except socket.error:
raise errors.AnsibleError("<interrupted>")
return results
# *****************************************************
def _partition_results(self, results):
''' separate results by ones we contacted & ones we didn't '''
if results is None:
return None
results2 = dict(contacted={}, dark={})
for result in results:
host = result.host
if host is None:
raise Exception("internal error, host not set")
if result.communicated_ok():
results2["contacted"][host] = result.result
else:
results2["dark"][host] = result.result
# hosts which were contacted but never got a chance to return
for host in self.run_hosts:
if not (host in results2['dark'] or host in results2['contacted']):
results2["dark"][host] = {}
return results2
# *****************************************************
def run(self):
''' xfer & run module on all matched hosts '''
# find hosts that match the pattern
if not self.run_hosts:
self.run_hosts = self.inventory.list_hosts(self.pattern)
hosts = self.run_hosts
if len(hosts) == 0:
self.callbacks.on_no_hosts()
return dict(contacted={}, dark={})
global multiprocessing_runner
multiprocessing_runner = self
results = None
# Check if this is an action plugin. Some of them are designed
# to be ran once per group of hosts. Example module: pause,
# run once per hostgroup, rather than pausing once per each
# host.
p = utils.plugins.action_loader.get(self.module_name, self)
if self.forks == 0 or self.forks > len(hosts):
self.forks = len(hosts)
if (p and (getattr(p, 'BYPASS_HOST_LOOP', None)) or self.run_once):
# Expose the current hostgroup to the bypassing plugins
self.host_set = hosts
# We aren't iterating over all the hosts in this
# group. So, just choose the "delegate_to" host if that is defined and is
# one of the targeted hosts, otherwise pick the first host in our group to
# construct the conn object with.
if self.delegate_to is not None and self.delegate_to in hosts:
host = self.delegate_to
else:
host = hosts[0]
result_data = self._executor(host, None).result
# Create a ResultData item for each host in this group
# using the returned result. If we didn't do this we would
# get false reports of dark hosts.
results = [ ReturnData(host=h, result=result_data, comm_ok=True) \
for h in hosts ]
del self.host_set
elif self.forks > 1:
try:
results = self._parallel_exec(hosts)
except IOError, ie:
print ie.errno
if ie.errno == 32:
# broken pipe from Ctrl+C
raise errors.AnsibleError("interrupted")
raise
else:
results = [ self._executor(h, None) for h in hosts ]
return self._partition_results(results)
# *****************************************************
def run_async(self, time_limit):
''' Run this module asynchronously and return a poller. '''
self.background = time_limit
results = self.run()
return results, poller.AsyncPoller(results, self)
# *****************************************************
def noop_on_check(self, inject):
''' Should the runner run in check mode or not ? '''
# initialize self.always_run on first call
if self.always_run is None:
self.always_run = self.module_vars.get('always_run', False)
self.always_run = check_conditional(
self.always_run, self.basedir, inject, fail_on_undefined=True)
return (self.check and not self.always_run)
| gpl-3.0 |
chouseknecht/ansible | lib/ansible/modules/network/cloudengine/ce_bfd_view.py | 11 | 20187 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = """
---
module: ce_bfd_view
version_added: "2.4"
short_description: Manages BFD session view configuration on HUAWEI CloudEngine devices.
description:
- Manages BFD session view configuration on HUAWEI CloudEngine devices.
author: QijunPan (@QijunPan)
options:
session_name:
description:
- Specifies the name of a BFD session.
The value is a string of 1 to 15 case-sensitive characters without spaces.
required: true
local_discr:
description:
- Specifies the local discriminator of a BFD session.
The value is an integer that ranges from 1 to 16384.
remote_discr:
description:
- Specifies the remote discriminator of a BFD session.
The value is an integer that ranges from 1 to 4294967295.
min_tx_interval:
description:
- Specifies the minimum interval for receiving BFD packets.
The value is an integer that ranges from 50 to 1000, in milliseconds.
min_rx_interval:
description:
- Specifies the minimum interval for sending BFD packets.
The value is an integer that ranges from 50 to 1000, in milliseconds.
detect_multi:
description:
- Specifies the local detection multiplier of a BFD session.
The value is an integer that ranges from 3 to 50.
wtr_interval:
description:
- Specifies the WTR time of a BFD session.
The value is an integer that ranges from 1 to 60, in minutes.
The default value is 0.
tos_exp:
description:
- Specifies a priority for BFD control packets.
The value is an integer ranging from 0 to 7.
The default value is 7, which is the highest priority.
admin_down:
description:
- Enables the BFD session to enter the AdminDown state.
By default, a BFD session is enabled.
The default value is bool type.
type: bool
default: 'no'
description:
description:
- Specifies the description of a BFD session.
The value is a string of 1 to 51 case-sensitive characters with spaces.
state:
description:
- Determines whether the config should be present or not on the device.
default: present
choices: ['present', 'absent']
extends_documentation_fragment: ce
"""
EXAMPLES = '''
- name: bfd view module test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: Set the local discriminator of a BFD session to 80 and the remote discriminator to 800
ce_bfd_view:
session_name: atob
local_discr: 80
remote_discr: 800
state: present
provider: '{{ cli }}'
- name: Set the minimum interval for receiving BFD packets to 500 ms
ce_bfd_view:
session_name: atob
min_rx_interval: 500
state: present
provider: '{{ cli }}'
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {
"admin_down": false,
"description": null,
"detect_multi": null,
"local_discr": 80,
"min_rx_interval": null,
"min_tx_interval": null,
"remote_discr": 800,
"session_name": "atob",
"state": "present",
"tos_exp": null,
"wtr_interval": null
}
existing:
description: k/v pairs of existing configuration
returned: always
type: dict
sample: {
"session": {
"adminDown": "false",
"createType": "SESS_STATIC",
"description": null,
"detectMulti": "3",
"localDiscr": null,
"minRxInt": null,
"minTxInt": null,
"remoteDiscr": null,
"sessName": "atob",
"tosExp": null,
"wtrTimerInt": null
}
}
end_state:
description: k/v pairs of configuration after module execution
returned: always
type: dict
sample: {
"session": {
"adminDown": "false",
"createType": "SESS_STATIC",
"description": null,
"detectMulti": "3",
"localDiscr": "80",
"minRxInt": null,
"minTxInt": null,
"remoteDiscr": "800",
"sessName": "atob",
"tosExp": null,
"wtrTimerInt": null
}
}
updates:
description: commands sent to the device
returned: always
type: list
sample: [
"bfd atob",
"discriminator local 80",
"discriminator remote 800"
]
changed:
description: check to see if a change was made on the device
returned: always
type: bool
sample: true
'''
import sys
from xml.etree import ElementTree
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.network.cloudengine.ce import get_nc_config, set_nc_config, ce_argument_spec
CE_NC_GET_BFD = """
<filter type="subtree">
<bfd xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
%s
</bfd>
</filter>
"""
CE_NC_GET_BFD_GLB = """
<bfdSchGlobal>
<bfdEnable></bfdEnable>
</bfdSchGlobal>
"""
CE_NC_GET_BFD_SESSION = """
<bfdCfgSessions>
<bfdCfgSession>
<sessName>%s</sessName>
<createType></createType>
<localDiscr></localDiscr>
<remoteDiscr></remoteDiscr>
<minTxInt></minTxInt>
<minRxInt></minRxInt>
<detectMulti></detectMulti>
<wtrTimerInt></wtrTimerInt>
<tosExp></tosExp>
<adminDown></adminDown>
<description></description>
</bfdCfgSession>
</bfdCfgSessions>
"""
class BfdView(object):
"""Manages BFD View"""
def __init__(self, argument_spec):
self.spec = argument_spec
self.module = None
self.__init_module__()
# module input info
self.session_name = self.module.params['session_name']
self.local_discr = self.module.params['local_discr']
self.remote_discr = self.module.params['remote_discr']
self.min_tx_interval = self.module.params['min_tx_interval']
self.min_rx_interval = self.module.params['min_rx_interval']
self.detect_multi = self.module.params['detect_multi']
self.wtr_interval = self.module.params['wtr_interval']
self.tos_exp = self.module.params['tos_exp']
self.admin_down = self.module.params['admin_down']
self.description = self.module.params['description']
self.state = self.module.params['state']
# host info
self.host = self.module.params['host']
self.username = self.module.params['username']
self.port = self.module.params['port']
# state
self.changed = False
self.bfd_dict = dict()
self.updates_cmd = list()
self.commands = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def __init_module__(self):
"""init module"""
self.module = AnsibleModule(argument_spec=self.spec,
supports_check_mode=True)
def get_bfd_dict(self):
"""bfd config dict"""
bfd_dict = dict()
bfd_dict["global"] = dict()
bfd_dict["session"] = dict()
conf_str = CE_NC_GET_BFD % (CE_NC_GET_BFD_GLB + (CE_NC_GET_BFD_SESSION % self.session_name))
xml_str = get_nc_config(self.module, conf_str)
if "<data/>" in xml_str:
return bfd_dict
xml_str = xml_str.replace('\r', '').replace('\n', '').\
replace('xmlns="urn:ietf:params:xml:ns:netconf:base:1.0"', "").\
replace('xmlns="http://www.huawei.com/netconf/vrp"', "")
root = ElementTree.fromstring(xml_str)
# get bfd global info
glb = root.find("bfd/bfdSchGlobal")
if glb:
for attr in glb:
bfd_dict["global"][attr.tag] = attr.text
# get bfd session info
sess = root.find("bfd/bfdCfgSessions/bfdCfgSession")
if sess:
for attr in sess:
bfd_dict["session"][attr.tag] = attr.text
return bfd_dict
def config_session(self):
"""configures bfd session"""
xml_str = ""
cmd_list = list()
cmd_session = ""
if not self.session_name:
return xml_str
if self.bfd_dict["global"].get("bfdEnable", "false") != "true":
self.module.fail_json(msg="Error: Please enable BFD globally first.")
if not self.bfd_dict["session"]:
self.module.fail_json(msg="Error: BFD session is not exist.")
session = self.bfd_dict["session"]
xml_str = "<sessName>%s</sessName>" % self.session_name
cmd_session = "bfd %s" % self.session_name
# BFD session view
if self.local_discr is not None:
if self.state == "present" and str(self.local_discr) != session.get("localDiscr"):
xml_str += "<localDiscr>%s</localDiscr>" % self.local_discr
cmd_list.append("discriminator local %s" % self.local_discr)
elif self.state == "absent" and str(self.local_discr) == session.get("localDiscr"):
xml_str += "<localDiscr/>"
cmd_list.append("undo discriminator local")
if self.remote_discr is not None:
if self.state == "present" and str(self.remote_discr) != session.get("remoteDiscr"):
xml_str += "<remoteDiscr>%s</remoteDiscr>" % self.remote_discr
cmd_list.append("discriminator remote %s" % self.remote_discr)
elif self.state == "absent" and str(self.remote_discr) == session.get("remoteDiscr"):
xml_str += "<remoteDiscr/>"
cmd_list.append("undo discriminator remote")
if self.min_tx_interval is not None:
if self.state == "present" and str(self.min_tx_interval) != session.get("minTxInt"):
xml_str += "<minTxInt>%s</minTxInt>" % self.min_tx_interval
cmd_list.append("min-tx-interval %s" % self.min_tx_interval)
elif self.state == "absent" and str(self.min_tx_interval) == session.get("minTxInt"):
xml_str += "<minTxInt/>"
cmd_list.append("undo min-tx-interval")
if self.min_rx_interval is not None:
if self.state == "present" and str(self.min_rx_interval) != session.get("minRxInt"):
xml_str += "<minRxInt>%s</minRxInt>" % self.min_rx_interval
cmd_list.append("min-rx-interval %s" % self.min_rx_interval)
elif self.state == "absent" and str(self.min_rx_interval) == session.get("minRxInt"):
xml_str += "<minRxInt/>"
cmd_list.append("undo min-rx-interval")
if self.detect_multi is not None:
if self.state == "present" and str(self.detect_multi) != session.get("detectMulti"):
xml_str += " <detectMulti>%s</detectMulti>" % self.detect_multi
cmd_list.append("detect-multiplier %s" % self.detect_multi)
elif self.state == "absent" and str(self.detect_multi) == session.get("detectMulti"):
xml_str += " <detectMulti/>"
cmd_list.append("undo detect-multiplier")
if self.wtr_interval is not None:
if self.state == "present" and str(self.wtr_interval) != session.get("wtrTimerInt"):
xml_str += " <wtrTimerInt>%s</wtrTimerInt>" % self.wtr_interval
cmd_list.append("wtr %s" % self.wtr_interval)
elif self.state == "absent" and str(self.wtr_interval) == session.get("wtrTimerInt"):
xml_str += " <wtrTimerInt/>"
cmd_list.append("undo wtr")
if self.tos_exp is not None:
if self.state == "present" and str(self.tos_exp) != session.get("tosExp"):
xml_str += " <tosExp>%s</tosExp>" % self.tos_exp
cmd_list.append("tos-exp %s" % self.tos_exp)
elif self.state == "absent" and str(self.tos_exp) == session.get("tosExp"):
xml_str += " <tosExp/>"
cmd_list.append("undo tos-exp")
if self.admin_down and session.get("adminDown", "false") == "false":
xml_str += " <adminDown>true</adminDown>"
cmd_list.append("shutdown")
elif not self.admin_down and session.get("adminDown", "false") == "true":
xml_str += " <adminDown>false</adminDown>"
cmd_list.append("undo shutdown")
if self.description:
if self.state == "present" and self.description != session.get("description"):
xml_str += "<description>%s</description>" % self.description
cmd_list.append("description %s" % self.description)
elif self.state == "absent" and self.description == session.get("description"):
xml_str += "<description/>"
cmd_list.append("undo description")
if xml_str.endswith("</sessName>"):
# no config update
return ""
else:
cmd_list.insert(0, cmd_session)
self.updates_cmd.extend(cmd_list)
return '<bfdCfgSessions><bfdCfgSession operation="merge">' + xml_str\
+ '</bfdCfgSession></bfdCfgSessions>'
def netconf_load_config(self, xml_str):
"""load bfd config by netconf"""
if not xml_str:
return
xml_cfg = """
<config>
<bfd xmlns="http://www.huawei.com/netconf/vrp" content-version="1.0" format-version="1.0">
%s
</bfd>
</config>""" % xml_str
set_nc_config(self.min_rx_interval, xml_cfg)
self.changed = True
def check_params(self):
"""Check all input params"""
# check session_name
if not self.session_name:
self.module.fail_json(msg="Error: Missing required arguments: session_name.")
if self.session_name:
if len(self.session_name) < 1 or len(self.session_name) > 15:
self.module.fail_json(msg="Error: Session name is invalid.")
# check local_discr
if self.local_discr is not None:
if self.local_discr < 1 or self.local_discr > 16384:
self.module.fail_json(msg="Error: Session local_discr is not ranges from 1 to 16384.")
# check remote_discr
if self.remote_discr is not None:
if self.remote_discr < 1 or self.remote_discr > 4294967295:
self.module.fail_json(msg="Error: Session remote_discr is not ranges from 1 to 4294967295.")
# check min_tx_interval
if self.min_tx_interval is not None:
if self.min_tx_interval < 50 or self.min_tx_interval > 1000:
self.module.fail_json(msg="Error: Session min_tx_interval is not ranges from 50 to 1000.")
# check min_rx_interval
if self.min_rx_interval is not None:
if self.min_rx_interval < 50 or self.min_rx_interval > 1000:
self.module.fail_json(msg="Error: Session min_rx_interval is not ranges from 50 to 1000.")
# check detect_multi
if self.detect_multi is not None:
if self.detect_multi < 3 or self.detect_multi > 50:
self.module.fail_json(msg="Error: Session detect_multi is not ranges from 3 to 50.")
# check wtr_interval
if self.wtr_interval is not None:
if self.wtr_interval < 1 or self.wtr_interval > 60:
self.module.fail_json(msg="Error: Session wtr_interval is not ranges from 1 to 60.")
# check tos_exp
if self.tos_exp is not None:
if self.tos_exp < 0 or self.tos_exp > 7:
self.module.fail_json(msg="Error: Session tos_exp is not ranges from 0 to 7.")
# check description
if self.description:
if len(self.description) < 1 or len(self.description) > 51:
self.module.fail_json(msg="Error: Session description is invalid.")
def get_proposed(self):
"""get proposed info"""
# base config
self.proposed["session_name"] = self.session_name
self.proposed["local_discr"] = self.local_discr
self.proposed["remote_discr"] = self.remote_discr
self.proposed["min_tx_interval"] = self.min_tx_interval
self.proposed["min_rx_interval"] = self.min_rx_interval
self.proposed["detect_multi"] = self.detect_multi
self.proposed["wtr_interval"] = self.wtr_interval
self.proposed["tos_exp"] = self.tos_exp
self.proposed["admin_down"] = self.admin_down
self.proposed["description"] = self.description
self.proposed["state"] = self.state
def get_existing(self):
"""get existing info"""
if not self.bfd_dict:
return
self.existing["session"] = self.bfd_dict.get("session")
def get_end_state(self):
"""get end state info"""
bfd_dict = self.get_bfd_dict()
if not bfd_dict:
return
self.end_state["session"] = bfd_dict.get("session")
if self.end_state == self.existing:
self.changed = False
def work(self):
"""worker"""
self.check_params()
self.bfd_dict = self.get_bfd_dict()
self.get_existing()
self.get_proposed()
# deal present or absent
xml_str = ''
if self.session_name:
xml_str += self.config_session()
# update to device
if xml_str:
self.netconf_load_config(xml_str)
self.changed = True
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
if self.changed:
self.results['updates'] = self.updates_cmd
else:
self.results['updates'] = list()
self.module.exit_json(**self.results)
def main():
"""Module main"""
argument_spec = dict(
session_name=dict(required=True, type='str'),
local_discr=dict(required=False, type='int'),
remote_discr=dict(required=False, type='int'),
min_tx_interval=dict(required=False, type='int'),
min_rx_interval=dict(required=False, type='int'),
detect_multi=dict(required=False, type='int'),
wtr_interval=dict(required=False, type='int'),
tos_exp=dict(required=False, type='int'),
admin_down=dict(required=False, type='bool', default=False),
description=dict(required=False, type='str'),
state=dict(required=False, default='present', choices=['present', 'absent'])
)
argument_spec.update(ce_argument_spec)
module = BfdView(argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
SteveHNH/ansible | lib/ansible/modules/network/nxos/nxos_aaa_server.py | 22 | 10872 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_aaa_server
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages AAA server global configuration.
description:
- Manages AAA server global configuration
author:
- Jason Edelman (@jedelman8)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- The server_type parameter is always required.
- If encrypt_type is not supplied, the global AAA server key will be
stored as encrypted (type 7).
- Changes to the global AAA server key with encrypt_type=0
are not idempotent.
- If global AAA server key is not found, it's shown as "unknown"
- state=default will set the supplied parameters to their default values.
The parameters that you want to default must also be set to default.
If global_key=default, the global key will be removed.
options:
server_type:
description:
- The server type is either radius or tacacs.
required: true
choices: ['radius', 'tacacs']
global_key:
description:
- Global AAA shared secret.
required: false
default: null
encrypt_type:
description:
- The state of encryption applied to the entered global key.
O clear text, 7 encrypted. Type-6 encryption is not supported.
required: false
default: null
choices: ['0', '7']
deadtime:
description:
- Duration for which a non-reachable AAA server is skipped,
in minutes. Range is 1-1440. Device default is 0.
required: false
default: null
server_timeout:
description:
- Global AAA server timeout period, in seconds. Range is 1-60.
Device default is 5.
required: false
default: null
directed_request:
description:
- Enables direct authentication requests to AAA server.
Device default is disabled.
required: false
default: null
choices: ['enabled', 'disabled']
state:
description:
- Manage the state of the resource.
required: true
default: present
choices: ['present','default']
'''
EXAMPLES = '''
# Radius Server Basic settings
- name: "Radius Server Basic settings"
nxos_aaa_server:
server_type: radius
server_timeout: 9
deadtime: 20
directed_request: enabled
# Tacacs Server Basic settings
- name: "Tacacs Server Basic settings"
nxos_aaa_server:
server_type: tacacs
server_timeout: 8
deadtime: 19
directed_request: disabled
# Setting Global Key
- name: "AAA Server Global Key"
nxos_aaa_server:
server_type: radius
global_key: test_key
'''
RETURN = '''
commands:
description: command sent to the device
returned: always
type: list
sample: ["radius-server deadtime 22", "radius-server timeout 11",
"radius-server directed-request"]
'''
import re
from ansible.module_utils.nxos import load_config, run_commands
from ansible.module_utils.nxos import nxos_argument_spec, check_args
from ansible.module_utils.basic import AnsibleModule
def execute_show_command(command, module, command_type='cli_show'):
command = {
'command': command,
'output': 'text',
}
return run_commands(module, command)
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def get_aaa_server_info(server_type, module):
aaa_server_info = {}
server_command = 'show {0}-server'.format(server_type)
request_command = 'show {0}-server directed-request'.format(server_type)
global_key_command = 'show run | sec {0}'.format(server_type)
aaa_regex = '.*{0}-server\skey\s\d\s+(?P<key>\S+).*'.format(server_type)
server_body = execute_show_command(
server_command, module, command_type='cli_show_ascii')[0]
split_server = server_body.splitlines()
for line in split_server:
if line.startswith('timeout'):
aaa_server_info['server_timeout'] = line.split(':')[1]
elif line.startswith('deadtime'):
aaa_server_info['deadtime'] = line.split(':')[1]
request_body = execute_show_command(
request_command, module, command_type='cli_show_ascii')[0]
aaa_server_info['directed_request'] = request_body.replace('\n', '')
key_body = execute_show_command(
global_key_command, module, command_type='cli_show_ascii')[0]
try:
match_global_key = re.match(aaa_regex, key_body, re.DOTALL)
group_key = match_global_key.groupdict()
aaa_server_info['global_key'] = group_key["key"].replace('\"', '')
except (AttributeError, TypeError):
aaa_server_info['global_key'] = 'unknown'
return aaa_server_info
def set_aaa_server_global_key(encrypt_type, key, server_type):
if not encrypt_type:
encrypt_type = ''
return '{0}-server key {1} {2}'.format(
server_type, encrypt_type, key)
def config_aaa_server(params, server_type):
cmds = []
deadtime = params.get('deadtime')
server_timeout = params.get('server_timeout')
directed_request = params.get('directed_request')
encrypt_type = params.get('encrypt_type', '7')
global_key = params.get('global_key')
if deadtime is not None:
cmds.append('{0}-server deadtime {1}'.format(server_type, deadtime))
if server_timeout is not None:
cmds.append('{0}-server timeout {1}'.format(server_type, server_timeout))
if directed_request is not None:
if directed_request == 'enabled':
cmds.append('{0}-server directed-request'.format(server_type))
elif directed_request == 'disabled':
cmds.append('no {0}-server directed-request'.format(server_type))
if global_key is not None:
cmds.append('{0}-server key {1} {2}'.format(server_type, encrypt_type,
global_key))
return cmds
def default_aaa_server(existing, params, server_type):
cmds = []
deadtime = params.get('deadtime')
server_timeout = params.get('server_timeout')
directed_request = params.get('directed_request')
global_key = params.get('global_key')
existing_key = existing.get('global_key')
if deadtime is not None:
cmds.append('no {0}-server deadtime 1'.format(server_type))
if server_timeout is not None:
cmds.append('no {0}-server timeout 1'.format(server_type))
if directed_request is not None:
cmds.append('no {0}-server directed-request'.format(server_type))
if global_key is not None and existing_key is not None:
cmds.append('no {0}-server key 7 {1}'.format(server_type, existing_key))
return cmds
def main():
argument_spec = dict(
server_type=dict(type='str', choices=['radius', 'tacacs'], required=True),
global_key=dict(type='str'),
encrypt_type=dict(type='str', choices=['0', '7']),
deadtime=dict(type='str'),
server_timeout=dict(type='str'),
directed_request=dict(type='str', choices=['enabled', 'disabled', 'default']),
state=dict(choices=['default', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec, supports_check_mode=True)
warnings = list()
check_args(module, warnings)
results = {'changed': False, 'commands': [], 'warnings': warnings}
server_type = module.params['server_type']
global_key = module.params['global_key']
encrypt_type = module.params['encrypt_type']
deadtime = module.params['deadtime']
server_timeout = module.params['server_timeout']
directed_request = module.params['directed_request']
state = module.params['state']
if encrypt_type and not global_key:
module.fail_json(msg='encrypt_type must be used with global_key.')
args = dict(server_type=server_type, global_key=global_key,
encrypt_type=encrypt_type, deadtime=deadtime,
server_timeout=server_timeout, directed_request=directed_request)
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_aaa_server_info(server_type, module)
commands = []
if state == 'present':
if deadtime:
try:
if int(deadtime) < 0 or int(deadtime) > 1440:
raise ValueError
except ValueError:
module.fail_json(
msg='deadtime must be an integer between 0 and 1440')
if server_timeout:
try:
if int(server_timeout) < 1 or int(server_timeout) > 60:
raise ValueError
except ValueError:
module.fail_json(
msg='server_timeout must be an integer between 1 and 60')
delta = dict(set(proposed.items()).difference(
existing.items()))
if delta:
command = config_aaa_server(delta, server_type)
if command:
commands.append(command)
elif state == 'default':
for key, value in proposed.items():
if key != 'server_type' and value != 'default':
module.fail_json(
msg='Parameters must be set to "default"'
'when state=default')
command = default_aaa_server(existing, proposed, server_type)
if command:
commands.append(command)
cmds = flatten_list(commands)
if cmds:
results['changed'] = True
if not module.check_mode:
load_config(module, cmds)
if 'configure' in cmds:
cmds.pop(0)
results['commands'] = cmds
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
wmanley/stb-tester | tests/validate-ocr.py | 1 | 4119 | #!/usr/bin/env python
u"""
validate-ocr.py can be run on a corpus of test images reporting how good a job
stbt has done of reading the text. Thus it can be used to measure improvements
to the OCR algorithm and, more importantly, detect any regressions introduced.
The corpus consists of a set of images with corresponding text files describing
what the images contain. e.g. the image `main-menu.png` might have a
corresponding file `main-menu.png.txt` containing:
lang: deu
---
Recht
Links
Oben
Unten
validate-ocr.py would then check that the result of calling
`ocr('main-menu.png', lang='deu')` contains the text "Recht", "Links", "Oben"
and "Unten" printing a diffable text report to stdout and a human friendly and
more verbose html report to the filename given to `--report-filename`.
Everything above '---' is interpreted as JSON and passed to the ocr() function.
Blank lines are ignored.
This tool is designed such that it can be run on corpuses outside the
stb-tester git tree to allow corpuses containing screen captures from many
set-top boxes without bloating the main stb-tester repo or risking upsetting
the owners of the various set-top box UIs. """
import argparse
import os
import sys
import cv2
import jinja2
import yaml
def test(imgname, phrases, params):
from stbt import ocr
img = cv2.imread(imgname)
if img is None:
raise IOError('No such file or directory "%s"' % imgname)
text = ocr(img, **params)
matches = sum(1 for x in phrases if x in text)
return {
"matches": matches,
"total": len(phrases),
"percentage": float(matches) / len(phrases) * 100,
"name": os.path.basename(imgname),
"path": imgname,
"phrases": [{"text": x, "match": x in text} for x in phrases],
"text": text,
}
def main(argv):
parser = argparse.ArgumentParser(
description=__doc__,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--report-filename",
help="Filename to write the HTML report to")
parser.add_argument("corpus", help="Directory containing test corpus")
args = parser.parse_args(argv[1:])
results = []
files = []
for root, _dirs, dfiles in os.walk(args.corpus):
files += [root + '/' + f for f in dfiles if f.endswith('.png.txt')]
for n, f in zip(range(len(files)), files):
sys.stderr.write("%i / %i Complete\r" % (n, len(files)))
imgname = f[:-len('.txt')]
with open(f) as of:
text = of.read()
sections = text.split('---', 1)
if len(sections) == 2:
params = yaml.safe_load(sections[0])
else:
params = {}
phrases = [x.decode('utf-8') for x in sections[-1].split('\n')
if x.strip() != '']
results.append(test(imgname, phrases, params))
sys.stderr.write('\n')
total = sum(x['total'] for x in results)
total_matched = sum(x['matches'] for x in results)
if args.report_filename:
template = os.path.dirname(__file__) + '/validate-ocr.html.jinja'
with open(args.report_filename, 'w') as f:
f.write(jinja2.Template(open(template).read()).render(
images=results,
total=total,
total_matched=total_matched,
percentage=float(total_matched) / total * 100).encode('utf-8'))
sys.stdout.write("Passes:\n")
for x in results:
if x['matches'] > 0:
sys.stdout.write(" " + x['name'] + '\n')
for y in x['phrases']:
if y['match']:
sys.stdout.write(' ' + y['text'].encode('utf-8') + '\n')
sys.stdout.write("Failures:\n")
for x in results:
if x['matches'] < x['total']:
sys.stdout.write(" " + x['name'] + '\n')
for y in x['phrases']:
if not y['match']:
sys.stdout.write(' ' + y['text'].encode('utf-8') + '\n')
return 0 if total == total_matched else 1
if __name__ == '__main__':
sys.exit(main(sys.argv))
| lgpl-2.1 |
Southpaw-TACTIC/TACTIC | src/pyasm/biz/prod_setting.py | 1 | 5114 | ###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ['ProdSetting', 'ProjectSetting']
from pyasm.common import Container, TacticException
from pyasm.search import *
from .project import Project
class ProjectSetting(SObject):
'''Defines all of the settings for a given production'''
SEARCH_TYPE = "config/prod_setting"
def _get_container_key(cls, key, search_type=None):
if search_type:
key = "ProjectSetting:%s:%s" % (key, search_type)
else:
key = "ProjectSetting:%s" % key
return key
_get_container_key = classmethod(_get_container_key)
def get_value_by_key(cls, key, search_type=None):
'''
container_key = cls._get_container_key(key,search_type)
value = Container.get(container_key)
if value:
return value
'''
if Project.get_project_name() in ['sthpw', 'admin']:
return ''
prod_setting = cls.get_by_key(key, search_type)
value = ''
if prod_setting:
value = prod_setting.get_value("value")
return value
get_value_by_key = classmethod(get_value_by_key)
def get_by_key(cls, key, search_type=None):
import time
start = time.time()
project = Project.get_project_code()
dict_key = '%s:%s' %(key, search_type)
try:
search = Search(cls.SEARCH_TYPE, project_code=project)
except:
return None
search.add_filter("key", key)
if search_type:
search.add_filter("search_type", search_type)
if Project.get_project_name() in ['admin', 'sthpw']:
return None
prod_setting = ProjectSetting.get_by_search(search, dict_key)
return prod_setting
get_by_key = classmethod(get_by_key)
def get_seq_by_key(cls, key, search_type=None):
seq = []
value = cls.get_value_by_key(key, search_type)
if value:
seq = value.split("|")
return seq
get_seq_by_key = classmethod(get_seq_by_key)
def add_value_by_key(cls, key, value, search_type=None):
seq = cls.get_seq_by_key(key, search_type)
if not seq:
seq = []
elif value in seq:
return
seq.append(value)
setting = cls.get_by_key(key, search_type)
if not setting:
return
setting.set_value( "value", "|".join(seq) )
setting.commit()
container_key = cls._get_container_key(key,search_type)
value = Container.put(container_key, None)
add_value_by_key = classmethod(add_value_by_key)
def get_map_by_key(cls, key, search_type=None):
''' this is more like an ordered map'''
seq = []
map = []
value = cls.get_value_by_key(key, search_type)
if value:
seq = value.split("|")
for item in seq:
try:
key, value = item.split(':')
map.append((key, value))
except Exception as e:
raise TacticException('ProjectSetting should be formated like <key1>:<value1>|<key2>:<value2>|...')
return map
get_map_by_key = classmethod(get_map_by_key)
def get_dict_by_key(cls, key, search_type=None):
''' this is to retrieve an unordered dict'''
seq = []
dict = {}
value = cls.get_value_by_key(key, search_type)
if value:
seq = value.split("|")
for item in seq:
try:
key, value = item.split(':')
dict[key] = value
except Exception as e:
raise TacticException('ProjectSetting should be formated like <key1>:<value1>|<key2>:<value2>|...')
return dict
get_dict_by_key = classmethod(get_dict_by_key)
def create(key, value, type="string", description='', search_type=''):
'''create a ProjectSetting'''
if Project.get_project_name() in ['admin', 'sthpw']:
return None
ProjectSetting.clear_cache()
setting = ProjectSetting.get_by_key(key, search_type)
if not setting:
setting= SObjectFactory.create( ProjectSetting.SEARCH_TYPE )
setting.set_value("key", key)
setting.set_value("value", value)
setting.set_value("type", type)
if description:
setting.set_value("description", description)
if search_type:
setting.set_value("search_type", search_type)
else:
setting.set_value("value", value)
setting.commit()
return setting
create = staticmethod(create)
# DEPRECATED
class ProdSetting(ProjectSetting):
pass
| epl-1.0 |
Endika/website | website_slides/controllers/main.py | 10 | 18112 | # -*- coding: utf-8 -*-
import base64
import logging
import werkzeug
from openerp.addons.web import http
from openerp.exceptions import AccessError
from openerp.http import request
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
class WebsiteSlides(http.Controller):
_slides_per_page = 12
_slides_per_list = 20
_order_by_criterion = {
'date': 'date_published desc',
'view': 'total_views desc',
'vote': 'likes desc',
}
def _set_viewed_slide(self, slide, view_mode):
slide_key = '%s_%s' % (view_mode, request.session_id)
viewed_slides = request.session.setdefault(slide_key, list())
if slide.id not in viewed_slides:
if view_mode == 'slide':
slide.sudo().slide_views += 1
elif view_mode == 'embed':
slide.sudo().embed_views += 1
viewed_slides.append(slide.id)
request.session[slide_key] = viewed_slides
return True
def _get_slide_detail(self, slide):
most_viewed_slides = slide.get_most_viewed_slides(
self._slides_per_list)
related_slides = slide.get_related_slides(self._slides_per_list)
return {
'slide': slide,
'most_viewed_slides': most_viewed_slides,
'related_slides': related_slides,
'user': request.env.user,
'is_public_user': request.env.user == request.website.user_id,
'comments': (slide.channel_id.can_see_full and
slide.website_message_ids or []),
'private': not slide.channel_id.can_see_full,
}
# --------------------------------------------------
# MAIN / SEARCH
# --------------------------------------------------
@http.route('/slides', type='http', auth="public", website=True)
def slides_index(self, *args, **post):
""" Returns a list of available channels: if only one is available,
redirects directly to its slides
"""
channels = request.env['slide.channel'].search(
[], order='sequence, id')
if not channels:
return request.website.render("website_slides.channel_not_found")
elif len(channels) == 1:
return request.redirect("/slides/%s" % channels.id)
return request.website.render('website_slides.channels', {
'channels': channels,
'user': request.env.user,
'is_public_user': request.env.user == request.website.user_id
})
@http.route([
'/slides/<model("slide.channel"):channel>',
'/slides/<model("slide.channel"):channel>/page/<int:page>',
'/slides/<model("slide.channel"):channel>/<string:slide_type>',
'/slides/<model("slide.channel"):channel>/<string:slide_type>'
'/page/<int:page>',
'/slides/<model("slide.channel"):channel>/tag/'
'<model("slide.tag"):tag>',
'/slides/<model("slide.channel"):channel>/tag/'
'<model("slide.tag"):tag>/page/<int:page>',
'/slides/<model("slide.channel"):channel>/category/'
'<model("slide.category"):category>',
'/slides/<model("slide.channel"):channel>/category/'
'<model("slide.category"):category>/page/<int:page>',
'/slides/<model("slide.channel"):channel>/category/'
'<model("slide.category"):category>/<string:slide_type>',
'/slides/<model("slide.channel"):channel>/category/'
'<model("slide.category"):category>/'
'<string:slide_type>/page/<int:page>'],
type='http', auth="public", website=True)
def channel(
self, channel, category=None, tag=None, page=1, slide_type=None,
sorting='creation', search=None, **kw):
user = request.env.user
Slide = request.env['slide.slide']
domain = [('channel_id', '=', channel.id)]
pager_url = "/slides/%s" % (channel.id)
pager_args = {}
if search:
domain += [
'|', '|',
('name', 'ilike', search),
('description', 'ilike', search),
('index_content', 'ilike', search)]
pager_args['search'] = search
else:
if category:
domain += [('category_id', '=', category.id)]
pager_url += "/category/%s" % category.id
elif tag:
domain += [('tag_ids.id', '=', tag.id)]
pager_url += "/tag/%s" % tag.id
if slide_type:
domain += [('slide_type', '=', slide_type)]
pager_url += "/%s" % slide_type
if not sorting or sorting not in self._order_by_criterion:
sorting = 'date'
order = self._order_by_criterion[sorting]
pager_args['sorting'] = sorting
pager_count = Slide.search_count(domain)
pager = request.website.pager(url=pager_url, total=pager_count,
page=page, step=self._slides_per_page,
scope=self._slides_per_page,
url_args=pager_args)
slides = Slide.search(
domain, limit=self._slides_per_page, offset=pager['offset'],
order=order)
values = {
'channel': channel,
'category': category,
'slides': slides,
'tag': tag,
'slide_type': slide_type,
'sorting': sorting,
'user': user,
'pager': pager,
'is_public_user': user == request.website.user_id,
}
if search:
values['search'] = search
return request.website.render(
'website_slides.slides_search', values)
# Display uncategorized slides
if not slide_type and not category:
category_datas = []
for category in Slide.read_group(
domain, ['category_id'], ['category_id']):
category_id, name = category.get('category_id') or \
(False, _('Uncategorized'))
category_datas.append({
'id': category_id,
'name': name,
'total': category['category_id_count'],
'slides': Slide.search(
category['__domain'], limit=4, offset=0, order=order)
})
values.update({
'category_datas': category_datas,
})
return request.website.render('website_slides.home', values)
# --------------------------------------------------
# SLIDE.SLIDE CONTOLLERS
# --------------------------------------------------
@http.route(
'/slides/slide/<model("slide.slide"):slide>',
type='http', auth="public", website=True)
def slide_view(self, slide, **kwargs):
values = self._get_slide_detail(slide)
if not values.get('private'):
self._set_viewed_slide(slide, 'slide')
return request.website.render(
'website_slides.slide_detail_view', values)
@http.route('/slides/slide/<model("slide.slide"):slide>/pdf_content',
type='http', auth="public", website=True)
def slide_get_pdf_content(self, slide):
response = werkzeug.wrappers.Response()
response.data = slide.datas.decode('base64')
response.mimetype = 'application/pdf'
return response
@http.route(
'/slides/slide/<model("slide.slide"):slide>/comment', type='http',
auth="public", methods=['POST'], website=True)
def slide_comment(self, slide, **post):
""" Controller for message_post. Public user can post; their name and
email is used to find or create a partner and post as admin with the
right partner. Their comments are not published by default. Logged
users can post as usual. """
# TDE TODO :
# - fix _find_partner_from_emails -> is an api.one + strange results +
# should work as public user
# - subscribe partner instead of user writing the message ?
# - public user -> cannot create mail.message ?
if not post.get('comment'):
return werkzeug.utils.redirect(
request.httprequest.referrer + "#discuss")
# public user: check or find author based on email, do not
# subscribe public user and do not publish their comments by default
# to avoid direct spam
if request.uid == request.website.user_id.id:
if not post.get('email'):
return werkzeug.utils.redirect(
request.httprequest.referrer + "#discuss")
# FIXME: public user has no right to create mail.message, should
# be investigated - using SUPERUSER_ID meanwhile
contextual_slide = slide.sudo().with_context(
mail_create_nosubcribe=True)
# TDE FIXME: check in mail_thread, find partner from emails
# should maybe work as public user
partner_id = slide.sudo()._find_partner_from_emails(
[post.get('email')])[0][0]
if partner_id:
partner = request.env['res.partner'].sudo().browse(partner_id)
else:
partner = request.env['res.partner'].sudo().create({
'name': post.get('name', post['email']),
'email': post['email']
})
post_kwargs = {
'author_id': partner.id,
'website_published': False,
'email_from': partner.email,
}
# logged user: as usual, published by default
else:
contextual_slide = slide
post_kwargs = {}
contextual_slide.message_post(
body=post['comment'],
type='comment',
subtype='mt_comment',
**post_kwargs
)
return werkzeug.utils.redirect(
request.httprequest.referrer + "#discuss")
@http.route('/slides/slide/<model("slide.slide"):slide>/download',
type='http', auth="public", website=True)
def slide_download(self, slide):
if slide.download_security == 'public' or \
(slide.download_security == 'user' and request.session.uid):
filecontent = base64.b64decode(slide.datas)
disposition = 'attachment; filename=%s.pdf' % (
werkzeug.urls.url_quote(slide.name))
return request.make_response(
filecontent,
[('Content-Type', 'application/pdf'),
('Content-Length', len(filecontent)),
('Content-Disposition', disposition)])
elif not request.session.uid and slide.download_security == 'user':
return werkzeug.utils.redirect('/web?redirect=/slides/slide/%s' % (
slide.id))
return request.website.render("website.403")
@http.route('/slides/slide/<model("slide.slide"):slide>/promote',
type='http', auth='public', website=True)
def slide_set_promoted(self, slide):
slide.channel_id.promoted_slide_id = slide.id
return request.redirect("/slides/%s" % slide.channel_id.id)
# JSONRPC
@http.route('/slides/slide/like', type='json', auth="user", website=True)
def slide_like(self, slide_id):
slide = request.env['slide.slide'].browse(int(slide_id))
slide.likes += 1
return slide.likes
@http.route(
'/slides/slide/dislike', type='json', auth="user", website=True)
def slide_dislike(self, slide_id):
slide = request.env['slide.slide'].browse(int(slide_id))
slide.dislikes += 1
return slide.dislikes
@http.route(['/slides/slide/send_share_email'], type='json', auth='user',
website=True)
def slide_send_share_email(self, slide_id, email):
slide = request.env['slide.slide'].browse(int(slide_id))
result = slide.send_share_email(email)
return result
@http.route('/slides/slide/overlay', type='json', auth="public",
website=True)
def slide_get_next_slides(self, slide_id):
slide = request.env['slide.slide'].browse(int(slide_id))
slides_to_suggest = 9
def slide_mapped_dict(slide):
return {
'img_src': '/website/image/slide.slide/%s/image_thumb' % (
slide.id),
'caption': slide.name,
'url': slide.website_url
}
vals = map(slide_mapped_dict, slide.get_related_slides(
slides_to_suggest))
add_more_slide = slides_to_suggest - len(vals)
if max(add_more_slide, 0):
vals += map(slide_mapped_dict, slide.get_most_viewed_slides(
add_more_slide))
return vals
# --------------------------------------------------
# TOOLS
# --------------------------------------------------
@http.route(['/slides/dialog_preview'], type='json', auth='user',
methods=['POST'], website=True)
def dialog_preview(self, **data):
Slide = request.env['slide.slide']
document_type, document_id = Slide._find_document_data_from_url(
data['url'])
preview = {}
if not document_id:
preview['error'] = _(
'Please enter valid youtube or google doc url')
return preview
existing_slide = Slide.search([('channel_id', '=', int(
data['channel_id'])), ('document_id', '=', document_id)], limit=1)
if existing_slide:
preview['error'] = _(
'This video already exists in this channel '
'<a target="_blank" href="/slides/slide/%s">'
'click here to view it </a>' % existing_slide.id)
return preview
values = Slide._parse_document_url(
data['url'], only_preview_fields=True)
if values.get('error'):
preview['error'] = _(
'Could not fetch data from url. Document or access right not '
'available.\nHere is the received response: %s' % (
values['error']))
return preview
return values
@http.route(['/slides/add_slide'], type='json', auth='user',
methods=['POST'], website=True)
def create_slide(self, *args, **post):
payload = request.httprequest.content_length
# payload is total request content size so it's not exact size of file.
# already add client validation this is for double check if
# client alter.
if (payload / 1024 / 1024 > 17):
return {'error': _('File is too big.')}
values = dict((fname, post[fname]) for fname in [
'name', 'url', 'tag_ids', 'slide_type', 'channel_id',
'mime_type', 'datas', 'description', 'image', 'index_content',
'website_published'] if post.get(fname))
if post.get('category_id'):
if post['category_id'][0] == 0:
values['category_id'] = request.env['slide.category'].create({
'name': post['category_id'][1]['name'],
'channel_id': values.get('channel_id')}).id
else:
values['category_id'] = post['category_id'][0]
# handle exception during creation of slide and sent error notification
# to the client otherwise client slide create dialog box continue
# processing even server fail to create a slide.
try:
slide_id = request.env['slide.slide'].create(values)
except AccessError as e:
_logger.error(e)
return {'error': e.name}
except Exception as e:
_logger.error(e)
return {'error': _('Internal server error, please try again later '
'or contact administrator.\nHere is the error '
'message: %s' % e.message)}
return {'url': "/slides/slide/%s" % (slide_id.id)}
# --------------------------------------------------
# EMBED IN THIRD PARTY WEBSITES
# --------------------------------------------------
@http.route('/slides/embed/<int:slide_id>', type='http', auth='public',
website=True)
def slides_embed(self, slide_id, page="1", **kw):
# Note : don't use the 'model' in the route (use 'slide_id'), otherwise
# if public cannot access the embedded slide, the error will be the
# website.403 page instead of the one of the
# website_slides.embed_slide.
# Do not forget the rendering here will be displayed in
# the embedded iframe
# determine if it is embedded from external web page
referrer_url = request.httprequest.headers.get('Referer', '')
base_url = request.env['ir.config_parameter'].get_param('web.base.url')
is_embedded = referrer_url and not bool(
base_url in referrer_url) or False
# try accessing slide, and display to corresponding template
try:
slide = request.env['slide.slide'].browse(slide_id)
if is_embedded:
request.env['slide.embed'].sudo().add_embed_url(
slide.id, referrer_url)
values = self._get_slide_detail(slide)
values['page'] = page
values['is_embedded'] = is_embedded
if not values.get('private'):
self._set_viewed_slide(slide, 'embed')
return request.website.render('website_slides.embed_slide', values)
except AccessError:
# TODO : please, make it clean one day, or find
# another secure way to detect if the slide can be embedded,
# and properly display the error message.
slide = request.env['slide.slide'].sudo().browse(slide_id)
return request.website.render(
'website_slides.embed_slide_forbidden', {'slide': slide})
| agpl-3.0 |
burzillibus/RobHome | venv/lib/python2.7/site-packages/dns/rdtypes/IN/A.py | 8 | 1853 | # Copyright (C) 2003-2007, 2009-2011 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
import dns.exception
import dns.ipv4
import dns.rdata
import dns.tokenizer
class A(dns.rdata.Rdata):
"""A record.
@ivar address: an IPv4 address
@type address: string (in the standard "dotted quad" format)"""
__slots__ = ['address']
def __init__(self, rdclass, rdtype, address):
super(A, self).__init__(rdclass, rdtype)
# check that it's OK
dns.ipv4.inet_aton(address)
self.address = address
def to_text(self, origin=None, relativize=True, **kw):
return self.address
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
address = tok.get_identifier()
tok.get_eol()
return cls(rdclass, rdtype, address)
def to_wire(self, file, compress=None, origin=None):
file.write(dns.ipv4.inet_aton(self.address))
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
address = dns.ipv4.inet_ntoa(wire[current: current + rdlen]).decode()
return cls(rdclass, rdtype, address)
| mit |
emijrp/youtube-dl | test/test_age_restriction.py | 171 | 1379 | #!/usr/bin/env python
from __future__ import unicode_literals
# Allow direct execution
import os
import sys
import unittest
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from test.helper import try_rm
from youtube_dl import YoutubeDL
def _download_restricted(url, filename, age):
""" Returns true if the file has been downloaded """
params = {
'age_limit': age,
'skip_download': True,
'writeinfojson': True,
'outtmpl': '%(id)s.%(ext)s',
}
ydl = YoutubeDL(params)
ydl.add_default_info_extractors()
json_filename = os.path.splitext(filename)[0] + '.info.json'
try_rm(json_filename)
ydl.download([url])
res = os.path.exists(json_filename)
try_rm(json_filename)
return res
class TestAgeRestriction(unittest.TestCase):
def _assert_restricted(self, url, filename, age, old_age=None):
self.assertTrue(_download_restricted(url, filename, old_age))
self.assertFalse(_download_restricted(url, filename, age))
def test_youtube(self):
self._assert_restricted('07FYdnEawAQ', '07FYdnEawAQ.mp4', 10)
def test_youporn(self):
self._assert_restricted(
'http://www.youporn.com/watch/505835/sex-ed-is-it-safe-to-masturbate-daily/',
'505835.mp4', 2, old_age=25)
if __name__ == '__main__':
unittest.main()
| unlicense |
binken/robotframework-selenium2library | src/Selenium2Library/keywords/_runonfailure.py | 72 | 2833 | from robot.libraries import BuiltIn
from keywordgroup import KeywordGroup
BUILTIN = BuiltIn.BuiltIn()
class _RunOnFailureKeywords(KeywordGroup):
def __init__(self):
self._run_on_failure_keyword = None
self._running_on_failure_routine = False
# Public
def register_keyword_to_run_on_failure(self, keyword):
"""Sets the keyword to execute when a Selenium2Library keyword fails.
`keyword_name` is the name of a keyword (from any available
libraries) that will be executed if a Selenium2Library keyword fails.
It is not possible to use a keyword that requires arguments.
Using the value "Nothing" will disable this feature altogether.
The initial keyword to use is set in `importing`, and the
keyword that is used by default is `Capture Page Screenshot`.
Taking a screenshot when something failed is a very useful
feature, but notice that it can slow down the execution.
This keyword returns the name of the previously registered
failure keyword. It can be used to restore the original
value later.
Example:
| Register Keyword To Run On Failure | Log Source | # Run `Log Source` on failure. |
| ${previous kw}= | Register Keyword To Run On Failure | Nothing | # Disables run-on-failure functionality and stores the previous kw name in a variable. |
| Register Keyword To Run On Failure | ${previous kw} | # Restore to the previous keyword. |
This run-on-failure functionality only works when running tests on Python/Jython 2.4
or newer and it does not work on IronPython at all.
"""
old_keyword = self._run_on_failure_keyword
old_keyword_text = old_keyword if old_keyword is not None else "No keyword"
new_keyword = keyword if keyword.strip().lower() != "nothing" else None
new_keyword_text = new_keyword if new_keyword is not None else "No keyword"
self._run_on_failure_keyword = new_keyword
self._info('%s will be run on failure.' % new_keyword_text)
return old_keyword_text
# Private
def _run_on_failure(self):
if self._run_on_failure_keyword is None:
return
if self._running_on_failure_routine:
return
self._running_on_failure_routine = True
try:
BUILTIN.run_keyword(self._run_on_failure_keyword)
except Exception, err:
self._run_on_failure_error(err)
finally:
self._running_on_failure_routine = False
def _run_on_failure_error(self, err):
err = "Keyword '%s' could not be run on failure: %s" % (self._run_on_failure_keyword, err)
if hasattr(self, '_warn'):
self._warn(err)
return
raise Exception(err)
| apache-2.0 |
israeltobias/DownMedia | youtube-dl/youtube_dl/extractor/comedycentral.py | 33 | 5341 | from __future__ import unicode_literals
from .mtv import MTVServicesInfoExtractor
from .common import InfoExtractor
class ComedyCentralIE(MTVServicesInfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?cc\.com/
(video-clips|episodes|cc-studios|video-collections|shows(?=/[^/]+/(?!full-episodes)))
/(?P<title>.*)'''
_FEED_URL = 'http://comedycentral.com/feeds/mrss/'
_TESTS = [{
'url': 'http://www.cc.com/video-clips/kllhuv/stand-up-greg-fitzsimmons--uncensored---too-good-of-a-mother',
'md5': 'c4f48e9eda1b16dd10add0744344b6d8',
'info_dict': {
'id': 'cef0cbb3-e776-4bc9-b62e-8016deccb354',
'ext': 'mp4',
'title': 'CC:Stand-Up|August 18, 2013|1|0101|Uncensored - Too Good of a Mother',
'description': 'After a certain point, breastfeeding becomes c**kblocking.',
'timestamp': 1376798400,
'upload_date': '20130818',
},
}, {
'url': 'http://www.cc.com/shows/the-daily-show-with-trevor-noah/interviews/6yx39d/exclusive-rand-paul-extended-interview',
'only_matching': True,
}]
class ComedyCentralFullEpisodesIE(MTVServicesInfoExtractor):
_VALID_URL = r'''(?x)https?://(?:www\.)?cc\.com/
(?:full-episodes|shows(?=/[^/]+/full-episodes))
/(?P<id>[^?]+)'''
_FEED_URL = 'http://comedycentral.com/feeds/mrss/'
_TESTS = [{
'url': 'http://www.cc.com/full-episodes/pv391a/the-daily-show-with-trevor-noah-november-28--2016---ryan-speedo-green-season-22-ep-22028',
'info_dict': {
'description': 'Donald Trump is accused of exploiting his president-elect status for personal gain, Cuban leader Fidel Castro dies, and Ryan Speedo Green discusses "Sing for Your Life."',
'title': 'November 28, 2016 - Ryan Speedo Green',
},
'playlist_count': 4,
}, {
'url': 'http://www.cc.com/shows/the-daily-show-with-trevor-noah/full-episodes',
'only_matching': True,
}]
def _real_extract(self, url):
playlist_id = self._match_id(url)
webpage = self._download_webpage(url, playlist_id)
mgid = self._extract_triforce_mgid(webpage, data_zone='t2_lc_promo1')
videos_info = self._get_videos_info(mgid)
return videos_info
class ToshIE(MTVServicesInfoExtractor):
IE_DESC = 'Tosh.0'
_VALID_URL = r'^https?://tosh\.cc\.com/video-(?:clips|collections)/[^/]+/(?P<videotitle>[^/?#]+)'
_FEED_URL = 'http://tosh.cc.com/feeds/mrss'
_TESTS = [{
'url': 'http://tosh.cc.com/video-clips/68g93d/twitter-users-share-summer-plans',
'info_dict': {
'description': 'Tosh asked fans to share their summer plans.',
'title': 'Twitter Users Share Summer Plans',
},
'playlist': [{
'md5': 'f269e88114c1805bb6d7653fecea9e06',
'info_dict': {
'id': '90498ec2-ed00-11e0-aca6-0026b9414f30',
'ext': 'mp4',
'title': 'Tosh.0|June 9, 2077|2|211|Twitter Users Share Summer Plans',
'description': 'Tosh asked fans to share their summer plans.',
'thumbnail': r're:^https?://.*\.jpg',
# It's really reported to be published on year 2077
'upload_date': '20770610',
'timestamp': 3390510600,
'subtitles': {
'en': 'mincount:3',
},
},
}]
}, {
'url': 'http://tosh.cc.com/video-collections/x2iz7k/just-plain-foul/m5q4fp',
'only_matching': True,
}]
class ComedyCentralTVIE(MTVServicesInfoExtractor):
_VALID_URL = r'https?://(?:www\.)?comedycentral\.tv/(?:staffeln|shows)/(?P<id>[^/?#&]+)'
_TESTS = [{
'url': 'http://www.comedycentral.tv/staffeln/7436-the-mindy-project-staffel-4',
'info_dict': {
'id': 'local_playlist-f99b626bdfe13568579a',
'ext': 'flv',
'title': 'Episode_the-mindy-project_shows_season-4_episode-3_full-episode_part1',
},
'params': {
# rtmp download
'skip_download': True,
},
}, {
'url': 'http://www.comedycentral.tv/shows/1074-workaholics',
'only_matching': True,
}, {
'url': 'http://www.comedycentral.tv/shows/1727-the-mindy-project/bonus',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
mrss_url = self._search_regex(
r'data-mrss=(["\'])(?P<url>(?:(?!\1).)+)\1',
webpage, 'mrss url', group='url')
return self._get_videos_info_from_url(mrss_url, video_id)
class ComedyCentralShortnameIE(InfoExtractor):
_VALID_URL = r'^:(?P<id>tds|thedailyshow)$'
_TESTS = [{
'url': ':tds',
'only_matching': True,
}, {
'url': ':thedailyshow',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
shortcut_map = {
'tds': 'http://www.cc.com/shows/the-daily-show-with-trevor-noah/full-episodes',
'thedailyshow': 'http://www.cc.com/shows/the-daily-show-with-trevor-noah/full-episodes',
}
return self.url_result(shortcut_map[video_id])
| gpl-3.0 |
vipul-sharma20/oh-mainline | vendor/packages/docutils/test/test_parsers/test_rst/test_TableParser.py | 18 | 6100 | #! /usr/bin/env python
# coding: utf-8
# $Id: test_TableParser.py 7668 2013-06-04 12:46:30Z milde $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
Tests for states.py.
"""
from __init__ import DocutilsTestSupport
def suite():
s = DocutilsTestSupport.GridTableParserTestSuite()
s.generateTests(totest)
return s
totest = {}
totest['grid_tables'] = [
["""\
+-------------------------------------+
| A table with one cell and one line. |
+-------------------------------------+
""",
[(0, 0, 2, 38, ['A table with one cell and one line.'])],
([37],
[],
[[(0, 0, 1, ['A table with one cell and one line.'])]])],
["""\
+--------------+--------------+
| A table with | two columns. |
+--------------+--------------+
""",
[(0, 0, 2, 15, ['A table with']),
(0, 15, 2, 30, ['two columns.'])],
([14, 14],
[],
[[(0, 0, 1, ['A table with']),
(0, 0, 1, ['two columns.'])]])],
# Combining chars in grid tables still fail
# [u"""\
# +--------------+------------------+
# | A tāble w̅ith | comb̲ining chars. |
# +--------------+------------------+
# """,
# [(0, 0, 2, 15, [u'A table with']),
# (0, 15, 2, 30, [u'combining chars.'])],
# ([14, 14],
# [],
# [[(0, 0, 1, [u'A table with']),
# (0, 0, 1, [u'combining chars.'])]])],
["""\
+--------------+-------------+
| A table with | two columns |
+--------------+-------------+
| and | two rows. |
+--------------+-------------+
""",
[(0, 0, 2, 15, ['A table with']),
(0, 15, 2, 29, ['two columns']),
(2, 0, 4, 15, ['and']),
(2, 15, 4, 29, ['two rows.'])],
([14, 13],
[],
[[(0, 0, 1, ['A table with']),
(0, 0, 1, ['two columns'])],
[(0, 0, 3, ['and']),
(0, 0, 3, ['two rows.'])]])],
["""\
+--------------------------+
| A table with three rows, |
+------------+-------------+
| and two | columns. |
+------------+-------------+
| First and last rows |
| contain column spans. |
+--------------------------+
""",
[(0, 0, 2, 27, ['A table with three rows,']),
(2, 0, 4, 13, ['and two']),
(2, 13, 4, 27, ['columns.']),
(4, 0, 7, 27, ['First and last rows', 'contain column spans.'])],
([12, 13],
[],
[[(0, 1, 1, ['A table with three rows,']),
None],
[(0, 0, 3, ['and two']),
(0, 0, 3, ['columns.'])],
[(0, 1, 5, ['First and last rows', 'contain column spans.']),
None]])],
["""\
+------------+-------------+---------------+
| A table | two rows in | and row spans |
| with three +-------------+ to left and |
| columns, | the middle, | right. |
+------------+-------------+---------------+
""",
[(0, 0, 4, 13, ['A table', 'with three', 'columns,']),
(0, 13, 2, 27, ['two rows in']),
(0, 27, 4, 43, ['and row spans', 'to left and', 'right.']),
(2, 13, 4, 27, ['the middle,'])],
([12, 13, 15],
[],
[[(1, 0, 1, ['A table', 'with three', 'columns,']),
(0, 0, 1, ['two rows in']),
(1, 0, 1, ['and row spans', 'to left and', 'right.'])],
[None,
(0, 0, 3, ['the middle,']),
None]])],
["""\
+------------+-------------+---------------+
| A table | | two rows in | and funny |
| with 3 +--+-------------+-+ stuff. |
| columns, | the middle, | | |
+------------+-------------+---------------+
""",
[(0, 0, 4, 13, ['A table |', 'with 3 +--', 'columns,']),
(0, 13, 2, 27, ['two rows in']),
(0, 27, 4, 43, [' and funny', '-+ stuff.', ' |']),
(2, 13, 4, 27, ['the middle,'])],
([12, 13, 15],
[],
[[(1, 0, 1, ['A table |', 'with 3 +--', 'columns,']),
(0, 0, 1, ['two rows in']),
(1, 0, 1, [' and funny', '-+ stuff.', ' |'])],
[None,
(0, 0, 3, ['the middle,']),
None]])],
["""\
+-----------+-------------------------+
| W/NW cell | N/NE cell |
| +-------------+-----------+
| | Middle cell | E/SE cell |
+-----------+-------------+ |
| S/SE cell | |
+-------------------------+-----------+
""",
[(0, 0, 4, 12, ['W/NW cell', '', '']),
(0, 12, 2, 38, ['N/NE cell']),
(2, 12, 4, 26, ['Middle cell']),
(2, 26, 6, 38, ['E/SE cell', '', '']),
(4, 0, 6, 26, ['S/SE cell'])],
([11, 13, 11],
[],
[[(1, 0, 1, ['W/NW cell', '', '']),
(0, 1, 1, ['N/NE cell']),
None],
[None,
(0, 0, 3, ['Middle cell']),
(1, 0, 3, ['E/SE cell', '', ''])],
[(0, 1, 5, ['S/SE cell']),
None,
None]])],
["""\
+--------------+-------------+
| A bad table. | |
+--------------+ |
| Cells must be rectangles. |
+----------------------------+
""",
'TableMarkupError: Malformed table; parse incomplete.',
'TableMarkupError: Malformed table; parse incomplete.'],
["""\
+-------------------------------+
| A table with two header rows, |
+------------+------------------+
| the first | with a span. |
+============+==================+
| Two body | rows, |
+------------+------------------+
| the second with a span. |
+-------------------------------+
""",
[(0, 0, 2, 32, ['A table with two header rows,']),
(2, 0, 4, 13, ['the first']),
(2, 13, 4, 32, ['with a span.']),
(4, 0, 6, 13, ['Two body']),
(4, 13, 6, 32, ['rows,']),
(6, 0, 8, 32, ['the second with a span.'])],
([12, 18],
[[(0, 1, 1, ['A table with two header rows,']),
None],
[(0, 0, 3, ['the first']),
(0, 0, 3, ['with a span.'])]],
[[(0, 0, 5, ['Two body']),
(0, 0, 5, ['rows,'])],
[(0, 1, 7, ['the second with a span.']),
None]])],
["""\
+-------------------------------+
| A table with two head/body |
+=============+=================+
| row | separators. |
+=============+=================+
| That's bad. | |
+-------------+-----------------+
""",
'TableMarkupError: Multiple head/body row separators '
'(table lines 3 and 5); only one allowed.',
'TableMarkupError: Multiple head/body row separators '
'(table lines 3 and 5); only one allowed.'],
["""\
+-------------------------------------+
| |
+-------------------------------------+
""",
[(0, 0, 2, 38, [''])],
([37],
[],
[[(0, 0, 1, [''])]])],
]
if __name__ == '__main__':
import unittest
unittest.main(defaultTest='suite')
| agpl-3.0 |
factorlibre/l10n-spain | l10n_es_aeat/models/aeat_export_configuration.py | 11 | 5175 | # -*- encoding: utf-8 -*-
##############################################################################
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see http://www.gnu.org/licenses/.
#
##############################################################################
from openerp import models, fields, api, _
class AeatModelExportConfig(models.Model):
_name = 'aeat.model.export.config'
name = fields.Char(string='Name')
model_number = fields.Char(string='Model number', size=3)
model = fields.Many2one('ir.model', 'Odoo model')
date_start = fields.Date(string='Start date')
date_end = fields.Date(string='End date')
config_lines = fields.One2many(
comodel_name='aeat.model.export.config.line',
inverse_name='export_config_id', string='Lines')
class AeatModelExportConfigLine(models.Model):
_name = 'aeat.model.export.config.line'
_order = 'sequence'
sequence = fields.Integer("Sequence")
export_config_id = fields.Many2one(
comodel_name='aeat.model.export.config', string='Config parent',
ondelete="cascade", required=True)
name = fields.Char(string="Name", required=True)
repeat_expression = fields.Char(
string='Repeat expression',
help="If set, this expression will be used for getting the list of "
"elements to iterate on")
repeat = fields.Boolean(compute='_compute_repeat', store=True)
conditional_expression = fields.Char(
string='Conditional expression',
help="If set, this expression will be used to evaluate if this line "
"should be added")
conditional = fields.Boolean(compute='_compute_conditional', store=True)
sub_config = fields.Many2one(
comodel_name='aeat.model.export.config', string='Sub-configuration')
export_type = fields.Selection(
selection=[('string', 'Alphanumeric'),
('float', 'Number with decimals'),
('integer', 'Number without decimals'),
('boolean', 'Boolean'),
('subconfig', 'Sub-configuration')],
default='string', string="Export field type", required=True)
apply_sign = fields.Boolean("Apply sign", default=True)
positive_sign = fields.Char("Positive sign character", size=1, default='0')
negative_sign = fields.Char(
"Negative sign character", size=1, default='N', oldname='sign')
size = fields.Integer("Field size")
alignment = fields.Selection(
[('left', 'Left'), ('right', 'Right')],
default='left', string="Alignment")
bool_no = fields.Char("Value for no", size=1, default=' ')
bool_yes = fields.Char("Value for yes", size=1, default='X')
decimal_size = fields.Integer("Number of char for decimals", default=0)
expression = fields.Char('Expression')
fixed_value = fields.Char('Fixed value')
position = fields.Integer(compute='_compute_position')
value = fields.Char(compute='_compute_value', store=True)
@api.one
@api.depends('repeat_expression')
def _compute_repeat(self):
self.repeat = bool(self.repeat_expression)
@api.one
@api.depends('conditional_expression')
def _compute_conditional(self):
self.conditional = bool(self.conditional_expression)
@api.one
@api.depends('sequence')
def _compute_position(self):
# TODO: Take into account sub-configurations
self.position = 1
for line in self.export_config_id.config_lines:
if line == self:
break
self.position += line.size
@api.one
@api.depends('fixed_value', 'expression')
def _compute_value(self):
if self.export_type == 'subconfig':
self.value = '-'
elif self.expression:
self.value = _('Expression: ')
if len(self.expression) > 35:
self.value += u'"%s…"' % self.expression[:34]
else:
self.value += u'"%s"' % self.expression
else:
self.value = _('Fixed: %s') % (self.fixed_value or _('<blank>'))
@api.one
@api.onchange('export_type')
def onchange_type(self):
if self.export_type in ('float', 'integer'):
self.alignment = 'right'
elif self.export_type in ('string', 'boolean'):
self.alignment = 'left'
@api.one
@api.onchange('sub_config')
def onchange_subconfig(self):
if self.sub_config:
self.export_type = False
self.decimal_size = 0
self.alignment = False
self.apply_sign = False
| agpl-3.0 |
stutivarshney/Bal-Aveksha | WebServer/BalAvekshaEnv/lib/python3.5/site-packages/django/templatetags/static.py | 91 | 4391 | from django import template
from django.apps import apps
from django.utils.encoding import iri_to_uri
from django.utils.six.moves.urllib.parse import urljoin
register = template.Library()
class PrefixNode(template.Node):
def __repr__(self):
return "<PrefixNode for %r>" % self.name
def __init__(self, varname=None, name=None):
if name is None:
raise template.TemplateSyntaxError(
"Prefix nodes must be given a name to return.")
self.varname = varname
self.name = name
@classmethod
def handle_token(cls, parser, token, name):
"""
Class method to parse prefix node and return a Node.
"""
# token.split_contents() isn't useful here because tags using this method don't accept variable as arguments
tokens = token.contents.split()
if len(tokens) > 1 and tokens[1] != 'as':
raise template.TemplateSyntaxError(
"First argument in '%s' must be 'as'" % tokens[0])
if len(tokens) > 1:
varname = tokens[2]
else:
varname = None
return cls(varname, name)
@classmethod
def handle_simple(cls, name):
try:
from django.conf import settings
except ImportError:
prefix = ''
else:
prefix = iri_to_uri(getattr(settings, name, ''))
return prefix
def render(self, context):
prefix = self.handle_simple(self.name)
if self.varname is None:
return prefix
context[self.varname] = prefix
return ''
@register.tag
def get_static_prefix(parser, token):
"""
Populates a template variable with the static prefix,
``settings.STATIC_URL``.
Usage::
{% get_static_prefix [as varname] %}
Examples::
{% get_static_prefix %}
{% get_static_prefix as static_prefix %}
"""
return PrefixNode.handle_token(parser, token, "STATIC_URL")
@register.tag
def get_media_prefix(parser, token):
"""
Populates a template variable with the media prefix,
``settings.MEDIA_URL``.
Usage::
{% get_media_prefix [as varname] %}
Examples::
{% get_media_prefix %}
{% get_media_prefix as media_prefix %}
"""
return PrefixNode.handle_token(parser, token, "MEDIA_URL")
class StaticNode(template.Node):
def __init__(self, varname=None, path=None):
if path is None:
raise template.TemplateSyntaxError(
"Static template nodes must be given a path to return.")
self.path = path
self.varname = varname
def url(self, context):
path = self.path.resolve(context)
return self.handle_simple(path)
def render(self, context):
url = self.url(context)
if self.varname is None:
return url
context[self.varname] = url
return ''
@classmethod
def handle_simple(cls, path):
if apps.is_installed('django.contrib.staticfiles'):
from django.contrib.staticfiles.storage import staticfiles_storage
return staticfiles_storage.url(path)
else:
return urljoin(PrefixNode.handle_simple("STATIC_URL"), path)
@classmethod
def handle_token(cls, parser, token):
"""
Class method to parse prefix node and return a Node.
"""
bits = token.split_contents()
if len(bits) < 2:
raise template.TemplateSyntaxError(
"'%s' takes at least one argument (path to file)" % bits[0])
path = parser.compile_filter(bits[1])
if len(bits) >= 2 and bits[-2] == 'as':
varname = bits[3]
else:
varname = None
return cls(varname, path)
@register.tag('static')
def do_static(parser, token):
"""
Joins the given path with the STATIC_URL setting.
Usage::
{% static path [as varname] %}
Examples::
{% static "myapp/css/base.css" %}
{% static variable_with_path %}
{% static "myapp/css/base.css" as admin_base_css %}
{% static variable_with_path as varname %}
"""
return StaticNode.handle_token(parser, token)
def static(path):
"""
Given a relative path to a static asset, return the absolute path to the
asset.
"""
return StaticNode.handle_simple(path)
| gpl-3.0 |
brchiu/tensorflow | tensorflow/contrib/learn/python/learn/metric_spec.py | 42 | 16838 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""The metric spec class to flexibly connect models and metrics (deprecated).
This module and all its submodules are deprecated. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for migration instructions.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import six
from tensorflow.python.platform import tf_logging as logging
from tensorflow.python.util import tf_inspect
from tensorflow.python.util.deprecation import deprecated
def _assert_named_args(sentinel):
if sentinel is not None:
raise ValueError(
'`metric_fn` requires named args: '
'`labels`, `predictions`, and optionally `weights`.')
def _args(fn):
"""Get argument names for function-like object.
Args:
fn: Function, or function-like object (e.g., result of `functools.partial`).
Returns:
`tuple` of string argument names.
"""
if hasattr(fn, 'func') and hasattr(fn, 'keywords'):
# Handle functools.partial and similar objects.
return tuple(
[arg for arg in _args(fn.func) if arg not in set(fn.keywords.keys())])
# Handle function.
return tuple(tf_inspect.getargspec(fn).args)
_CANONICAL_LABELS_ARG = 'labels'
_LABELS_ARGS = set((_CANONICAL_LABELS_ARG, 'label', 'targets', 'target'))
_CANONICAL_PREDICTIONS_ARG = 'predictions'
_PREDICTIONS_ARGS = set((_CANONICAL_PREDICTIONS_ARG, 'prediction',
'logits', 'logit'))
_CANONICAL_WEIGHTS_ARG = 'weights'
_WEIGHTS_ARGS = set((_CANONICAL_WEIGHTS_ARG, 'weight'))
def _matching_arg(
fn_name, fn_args, candidate_args, canonical_arg, is_required=False):
"""Find single argument in `args` from `candidate_args`.
Args:
fn_name: Function name, only used for error string.
fn_args: String argument names to `fn_name` function.
candidate_args: Candidate argument names to find in `args`.
canonical_arg: Canonical argument name in `candidate_args`. This is only
used to log a warning if a non-canonical match is found.
is_required: Whether function is required to have an arg in
`candidate_args`.
Returns:
String argument name if found, or `None` if not found.
Raises:
ValueError: if 2 candidates are found, or 0 are found and `is_required` is
set.
"""
assert canonical_arg in candidate_args # Sanity check.
matching_args = candidate_args.intersection(fn_args)
if len(matching_args) > 1:
raise ValueError(
'Ambiguous arguments %s, must provide only one of %s.' % (
matching_args, candidate_args))
matching_arg = matching_args.pop() if matching_args else None
if matching_arg:
if matching_arg != canonical_arg:
logging.warning(
'Canonical arg %s missing from %s(%s), using %s.',
canonical_arg, fn_name, fn_args, matching_arg)
elif is_required:
raise ValueError(
'%s missing from %s(%s).' % (candidate_args, fn_name, fn_args))
return matching_arg
def _fn_name(fn):
if hasattr(fn, '__name__'):
return fn.__name__
if hasattr(fn, 'func') and hasattr(fn.func, '__name__'):
return fn.func.__name__ # If it's a functools.partial.
return str(fn)
def _adapt_metric_fn(
metric_fn, metric_fn_name, is_labels_required, is_weights_required):
"""Adapt `metric_fn` to take only named args.
This returns a function that takes only named args `labels`, `predictions`,
and `weights`, and invokes `metric_fn` according to the following rules:
- If `metric_fn` args include exactly one of `_LABELS_ARGS`, that arg is
passed (usually by name, but positionally if both it and `predictions` need
to be passed positionally). Otherwise, `labels` are omitted.
- If `metric_fn` args include exactly one of `_PREDICTIONS_ARGS`, that arg is
passed by name. Otherwise, `predictions` are passed positionally as the
first non-label argument.
- If exactly one of `_WEIGHTS_ARGS` is provided, that arg is passed by
name.
Args:
metric_fn: Metric function to be wrapped.
metric_fn_name: `metric_fn` name, only used for logging.
is_labels_required: Whether `labels` is a required arg.
is_weights_required: Whether `weights` is a required arg.
Returns:
Function accepting only named args `labels, `predictions`, and `weights`,
and passing those to `metric_fn`.
Raises:
ValueError: if one of the following is true:
- `metric_fn` has more than one arg of `_LABELS_ARGS`, `_PREDICTIONS_ARGS`,
or `_WEIGHTS_ARGS`
- `is_labels_required` is true, and `metric_fn` has no arg from
`_LABELS_ARGS`.
- `is_weights_required` is true, and `metric_fn` has no arg from
`_WEIGHTS_ARGS`.
"""
args = _args(metric_fn)
labels_arg = _matching_arg(
metric_fn_name, args, _LABELS_ARGS, _CANONICAL_LABELS_ARG,
is_labels_required)
predictions_arg = _matching_arg(
metric_fn_name, args, _PREDICTIONS_ARGS, _CANONICAL_PREDICTIONS_ARG)
weights_arg = _matching_arg(
metric_fn_name, args, _WEIGHTS_ARGS, _CANONICAL_WEIGHTS_ARG,
is_weights_required)
# pylint: disable=invalid-name
if labels_arg:
if predictions_arg:
# Both labels and predictions are named args.
def _named_metric_fn(
_sentinel=None, labels=None, predictions=None, weights=None):
_assert_named_args(_sentinel)
kwargs = {
labels_arg: labels,
predictions_arg: predictions,
}
if weights is not None:
kwargs[weights_arg] = weights
return metric_fn(**kwargs)
return _named_metric_fn
if labels_arg == args[0]:
# labels is a named arg, and first. predictions is not a named arg, so we
# want to pass it as the 2nd positional arg (i.e., the first non-labels
# position), which means passing both positionally.
def _positional_metric_fn(
_sentinel=None, labels=None, predictions=None, weights=None):
_assert_named_args(_sentinel)
# TODO(ptucker): Should we support metrics that take only labels?
# Currently, if you want streaming mean of a label, you have to wrap it
# in a fn that takes discards predictions.
if weights is None:
return metric_fn(labels, predictions)
return metric_fn(labels, predictions, **{weights_arg: weights})
return _positional_metric_fn
# labels is a named arg, and not first, so we pass predictions positionally
# and labels by name.
def _positional_predictions_metric_fn(
_sentinel=None, labels=None, predictions=None, weights=None):
_assert_named_args(_sentinel)
kwargs = {
labels_arg: labels,
}
if weights is not None:
kwargs[weights_arg] = weights
return metric_fn(predictions, **kwargs)
return _positional_predictions_metric_fn
if predictions_arg:
# No labels, and predictions is named, so we pass the latter as a named arg.
def _named_no_labels_metric_fn(
_sentinel=None, labels=None, predictions=None, weights=None):
del labels
_assert_named_args(_sentinel)
kwargs = {
predictions_arg: predictions,
}
# TODO(ptucker): Should we allow weights with no labels?
if weights is not None:
kwargs[weights_arg] = weights
return metric_fn(**kwargs)
return _named_no_labels_metric_fn
# Neither labels nor predictions are named, so we just pass predictions as the
# first arg.
def _positional_no_labels_metric_fn(
_sentinel=None, labels=None, predictions=None, weights=None):
del labels
_assert_named_args(_sentinel)
if weights is None:
return metric_fn(predictions)
# TODO(ptucker): Should we allow weights with no labels?
return metric_fn(predictions, **{weights_arg: weights})
return _positional_no_labels_metric_fn
class MetricSpec(object):
"""MetricSpec connects a model to metric functions.
THIS CLASS IS DEPRECATED. See
[contrib/learn/README.md](https://www.tensorflow.org/code/tensorflow/contrib/learn/README.md)
for general migration instructions.
The MetricSpec class contains all information necessary to connect the
output of a `model_fn` to the metrics (usually, streaming metrics) that are
used in evaluation.
It is passed in the `metrics` argument of `Estimator.evaluate`. The
`Estimator` then knows which predictions, labels, and weight to use to call a
given metric function.
When building the ops to run in evaluation, an `Estimator` will call
`create_metric_ops`, which will connect the given `metric_fn` to the model
as detailed in the docstring for `create_metric_ops`, and return the metric.
Example:
Assuming a model has an input function which returns inputs containing
(among other things) a tensor with key "input_key", and a labels dictionary
containing "label_key". Let's assume that the `model_fn` for this model
returns a prediction with key "prediction_key".
In order to compute the accuracy of the "prediction_key" prediction, we
would add
```
"prediction accuracy": MetricSpec(metric_fn=prediction_accuracy_fn,
prediction_key="prediction_key",
label_key="label_key")
```
to the metrics argument to `evaluate`. `prediction_accuracy_fn` can be either
a predefined function in metric_ops (e.g., `streaming_accuracy`) or a custom
function you define.
If we would like the accuracy to be weighted by "input_key", we can add that
as the `weight_key` argument.
```
"prediction accuracy": MetricSpec(metric_fn=prediction_accuracy_fn,
prediction_key="prediction_key",
label_key="label_key",
weight_key="input_key")
```
An end-to-end example is as follows:
```
estimator = tf.contrib.learn.Estimator(...)
estimator.fit(...)
_ = estimator.evaluate(
input_fn=input_fn,
steps=1,
metrics={
'prediction accuracy':
metric_spec.MetricSpec(
metric_fn=prediction_accuracy_fn,
prediction_key="prediction_key",
label_key="label_key")
})
```
"""
@deprecated(None, 'Use tf.estimator.EstimatorSpec.eval_metric_ops.')
def __init__(self,
metric_fn,
prediction_key=None,
label_key=None,
weight_key=None):
"""Constructor.
Creates a MetricSpec.
Args:
metric_fn: A function to use as a metric. See `_adapt_metric_fn` for
rules on how `predictions`, `labels`, and `weights` are passed to this
function. This must return either a single `Tensor`, which is
interpreted as a value of this metric, or a pair
`(value_op, update_op)`, where `value_op` is the op to call to
obtain the value of the metric, and `update_op` should be run for
each batch to update internal state.
prediction_key: The key for a tensor in the `predictions` dict (output
from the `model_fn`) to use as the `predictions` input to the
`metric_fn`. Optional. If `None`, the `model_fn` must return a single
tensor or a dict with only a single entry as `predictions`.
label_key: The key for a tensor in the `labels` dict (output from the
`input_fn`) to use as the `labels` input to the `metric_fn`.
Optional. If `None`, the `input_fn` must return a single tensor or a
dict with only a single entry as `labels`.
weight_key: The key for a tensor in the `inputs` dict (output from the
`input_fn`) to use as the `weights` input to the `metric_fn`.
Optional. If `None`, no weights will be passed to the `metric_fn`.
"""
self._metric_fn_name = _fn_name(metric_fn)
self._metric_fn = _adapt_metric_fn(
metric_fn=metric_fn,
metric_fn_name=self._metric_fn_name,
is_labels_required=label_key is not None,
is_weights_required=weight_key is not None)
self._prediction_key = prediction_key
self._label_key = label_key
self._weight_key = weight_key
@property
def prediction_key(self):
return self._prediction_key
@property
def label_key(self):
return self._label_key
@property
def weight_key(self):
return self._weight_key
@property
def metric_fn(self):
"""Metric function.
This function accepts named args: `predictions`, `labels`, `weights`. It
returns a single `Tensor` or `(value_op, update_op)` pair. See `metric_fn`
constructor argument for more details.
Returns:
Function, see `metric_fn` constructor argument for more details.
"""
return self._metric_fn
def __str__(self):
return ('MetricSpec(metric_fn=%s, ' % self._metric_fn_name +
'prediction_key=%s, ' % self.prediction_key +
'label_key=%s, ' % self.label_key +
'weight_key=%s)' % self.weight_key
)
def create_metric_ops(self, inputs, labels, predictions):
"""Connect our `metric_fn` to the specified members of the given dicts.
This function will call the `metric_fn` given in our constructor as follows:
```
metric_fn(predictions[self.prediction_key],
labels[self.label_key],
weights=weights[self.weight_key])
```
And returns the result. The `weights` argument is only passed if
`self.weight_key` is not `None`.
`predictions` and `labels` may be single tensors as well as dicts. If
`predictions` is a single tensor, `self.prediction_key` must be `None`. If
`predictions` is a single element dict, `self.prediction_key` is allowed to
be `None`. Conversely, if `labels` is a single tensor, `self.label_key` must
be `None`. If `labels` is a single element dict, `self.label_key` is allowed
to be `None`.
Args:
inputs: A dict of inputs produced by the `input_fn`
labels: A dict of labels or a single label tensor produced by the
`input_fn`.
predictions: A dict of predictions or a single tensor produced by the
`model_fn`.
Returns:
The result of calling `metric_fn`.
Raises:
ValueError: If `predictions` or `labels` is a single `Tensor` and
`self.prediction_key` or `self.label_key` is not `None`; or if
`self.label_key` is `None` but `labels` is a dict with more than one
element, or if `self.prediction_key` is `None` but `predictions` is a
dict with more than one element.
"""
def _get_dict(name, dict_or_tensor, key):
"""Get a single tensor or an element of a dict or raise ValueError."""
if key:
if not isinstance(dict_or_tensor, dict):
raise ValueError('MetricSpec with ' + name + '_key specified'
' requires ' +
name + 's dict, got %s.\n' % dict_or_tensor +
'You must not provide a %s_key if you ' % name +
'only have a single Tensor as %ss.' % name)
if key not in dict_or_tensor:
raise KeyError(
'Key \'%s\' missing from %s.' % (key, dict_or_tensor.keys()))
return dict_or_tensor[key]
else:
if isinstance(dict_or_tensor, dict):
if len(dict_or_tensor) != 1:
raise ValueError('MetricSpec without specified ' + name + '_key'
' requires ' + name + 's tensor or single element'
' dict, got %s' % dict_or_tensor)
return six.next(six.itervalues(dict_or_tensor))
return dict_or_tensor
# Get the predictions.
prediction = _get_dict('prediction', predictions, self.prediction_key)
# Get the labels.
label = _get_dict('label', labels, self.label_key)
try:
return self.metric_fn(
labels=label,
predictions=prediction,
weights=inputs[self.weight_key] if self.weight_key else None)
except Exception as ex:
logging.error('Could not create metric ops for %s, %s.' % (self, ex))
raise
| apache-2.0 |
naresh21/synergetics-edx-platform | common/lib/xmodule/xmodule/video_module/video_module.py | 5 | 43772 | # -*- coding: utf-8 -*-
"""Video is ungraded Xmodule for support video content.
It's new improved video module, which support additional feature:
- Can play non-YouTube video sources via in-browser HTML5 video player.
- YouTube defaults to HTML5 mode from the start.
- Speed changes in both YouTube and non-YouTube videos happen via
in-browser HTML5 video method (when in HTML5 mode).
- Navigational subtitles can be disabled altogether via an attribute
in XML.
Examples of html5 videos for manual testing:
https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.mp4
https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.webm
https://s3.amazonaws.com/edx-course-videos/edx-intro/edX-FA12-cware-1_100.ogv
"""
import copy
import json
import logging
import random
from collections import OrderedDict
from operator import itemgetter
from lxml import etree
from pkg_resources import resource_string
from django.conf import settings
from openedx.core.lib.cache_utils import memoize_in_request_cache
from xblock.core import XBlock
from xblock.fields import ScopeIds
from xblock.runtime import KvsFieldData
from opaque_keys.edx.locator import AssetLocator
from xmodule.modulestore.inheritance import InheritanceKeyValueStore, own_metadata
from xmodule.x_module import XModule, module_attr
from xmodule.editing_module import TabsEditingDescriptor
from xmodule.raw_module import EmptyDataRawDescriptor
from xmodule.xml_module import is_pointer_tag, name_to_pathname, deserialize_field
from xmodule.exceptions import NotFoundError
from xmodule.contentstore.content import StaticContent
from xmodule.validation import StudioValidationMessage, StudioValidation
from .transcripts_utils import VideoTranscriptsMixin, Transcript, get_html5_ids
from .video_utils import create_youtube_string, get_poster, rewrite_video_url, format_xml_exception_message
from .bumper_utils import bumperize
from .video_xfields import VideoFields
from .video_handlers import VideoStudentViewHandlers, VideoStudioViewHandlers
from xmodule.video_module import manage_video_subtitles_save
from xmodule.mixin import LicenseMixin
# The following import/except block for edxval is temporary measure until
# edxval is a proper XBlock Runtime Service.
#
# Here's the deal: the VideoModule should be able to take advantage of edx-val
# (https://github.com/edx/edx-val) to figure out what URL to give for video
# resources that have an edx_video_id specified. edx-val is a Django app, and
# including it causes tests to fail because we run common/lib tests standalone
# without Django dependencies. The alternatives seem to be:
#
# 1. Move VideoModule out of edx-platform.
# 2. Accept the Django dependency in common/lib.
# 3. Try to import, catch the exception on failure, and check for the existence
# of edxval_api before invoking it in the code.
# 4. Make edxval an XBlock Runtime Service
#
# (1) is a longer term goal. VideoModule should be made into an XBlock and
# extracted from edx-platform entirely. But that's expensive to do because of
# the various dependencies (like templates). Need to sort this out.
# (2) is explicitly discouraged.
# (3) is what we're doing today. The code is still functional when called within
# the context of the LMS, but does not cause failure on import when running
# standalone tests. Most VideoModule tests tend to be in the LMS anyway,
# probably for historical reasons, so we're not making things notably worse.
# (4) is one of the next items on the backlog for edxval, and should get rid
# of this particular import silliness. It's just that I haven't made one before,
# and I was worried about trying it with my deadline constraints.
try:
import edxval.api as edxval_api
except ImportError:
edxval_api = None
try:
from branding.models import BrandingInfoConfig
except ImportError:
BrandingInfoConfig = None
log = logging.getLogger(__name__)
# Make '_' a no-op so we can scrape strings. Using lambda instead of
# `django.utils.translation.ugettext_noop` because Django cannot be imported in this file
_ = lambda text: text
@XBlock.wants('settings')
class VideoModule(VideoFields, VideoTranscriptsMixin, VideoStudentViewHandlers, XModule, LicenseMixin):
"""
XML source example:
<video show_captions="true"
youtube="0.75:jNCf2gIqpeE,1.0:ZwkTiUPN0mg,1.25:rsq9auxASqI,1.50:kMyNdzVHHgg"
url_name="lecture_21_3" display_name="S19V3: Vacancies"
>
<source src=".../mit-3091x/M-3091X-FA12-L21-3_100.mp4"/>
<source src=".../mit-3091x/M-3091X-FA12-L21-3_100.webm"/>
<source src=".../mit-3091x/M-3091X-FA12-L21-3_100.ogv"/>
</video>
"""
video_time = 0
icon_class = 'video'
# To make sure that js files are called in proper order we use numerical
# index. We do that to avoid issues that occurs in tests.
module = __name__.replace('.video_module', '', 2)
#TODO: For each of the following, ensure that any generated html is properly escaped.
js = {
'js': [
resource_string(module, 'js/src/time.js'),
resource_string(module, 'js/src/video/00_component.js'),
resource_string(module, 'js/src/video/00_video_storage.js'),
resource_string(module, 'js/src/video/00_resizer.js'),
resource_string(module, 'js/src/video/00_async_process.js'),
resource_string(module, 'js/src/video/00_i18n.js'),
resource_string(module, 'js/src/video/00_sjson.js'),
resource_string(module, 'js/src/video/00_iterator.js'),
resource_string(module, 'js/src/video/01_initialize.js'),
resource_string(module, 'js/src/video/025_focus_grabber.js'),
resource_string(module, 'js/src/video/02_html5_video.js'),
resource_string(module, 'js/src/video/03_video_player.js'),
resource_string(module, 'js/src/video/035_video_accessible_menu.js'),
resource_string(module, 'js/src/video/04_video_control.js'),
resource_string(module, 'js/src/video/04_video_full_screen.js'),
resource_string(module, 'js/src/video/05_video_quality_control.js'),
resource_string(module, 'js/src/video/06_video_progress_slider.js'),
resource_string(module, 'js/src/video/07_video_volume_control.js'),
resource_string(module, 'js/src/video/08_video_speed_control.js'),
resource_string(module, 'js/src/video/09_video_caption.js'),
resource_string(module, 'js/src/video/09_play_placeholder.js'),
resource_string(module, 'js/src/video/09_play_pause_control.js'),
resource_string(module, 'js/src/video/09_play_skip_control.js'),
resource_string(module, 'js/src/video/09_skip_control.js'),
resource_string(module, 'js/src/video/09_bumper.js'),
resource_string(module, 'js/src/video/09_save_state_plugin.js'),
resource_string(module, 'js/src/video/09_events_plugin.js'),
resource_string(module, 'js/src/video/09_events_bumper_plugin.js'),
resource_string(module, 'js/src/video/09_poster.js'),
resource_string(module, 'js/src/video/095_video_context_menu.js'),
resource_string(module, 'js/src/video/10_commands.js'),
resource_string(module, 'js/src/video/10_main.js')
]
}
css = {'scss': [
resource_string(module, 'css/video/display.scss'),
resource_string(module, 'css/video/accessible_menu.scss'),
]}
js_module_name = "Video"
def validate(self):
"""
Validates the state of this Video Module Instance.
"""
return self.descriptor.validate()
def get_transcripts_for_student(self, transcripts):
"""Return transcript information necessary for rendering the XModule student view.
This is more or less a direct extraction from `get_html`.
Args:
transcripts (dict): A dict with all transcripts and a sub.
Returns:
Tuple of (track_url, transcript_language, sorted_languages)
track_url -> subtitle download url
transcript_language -> default transcript language
sorted_languages -> dictionary of available transcript languages
"""
track_url = None
sub, other_lang = transcripts["sub"], transcripts["transcripts"]
if self.download_track:
if self.track:
track_url = self.track
elif sub or other_lang:
track_url = self.runtime.handler_url(self, 'transcript', 'download').rstrip('/?')
transcript_language = self.get_default_transcript_language(transcripts)
native_languages = {lang: label for lang, label in settings.LANGUAGES if len(lang) == 2}
languages = {
lang: native_languages.get(lang, display)
for lang, display in settings.ALL_LANGUAGES
if lang in other_lang
}
if not other_lang or (other_lang and sub):
languages['en'] = 'English'
# OrderedDict for easy testing of rendered context in tests
sorted_languages = sorted(languages.items(), key=itemgetter(1))
sorted_languages = OrderedDict(sorted_languages)
return track_url, transcript_language, sorted_languages
def get_html(self):
track_status = (self.download_track and self.track)
transcript_download_format = self.transcript_download_format if not track_status else None
sources = filter(None, self.html5_sources)
download_video_link = None
branding_info = None
youtube_streams = ""
# Determine if there is an alternative source for this video
# based on user locale. This exists to support cases where
# we leverage a geography specific CDN, like China.
cdn_url = getattr(settings, 'VIDEO_CDN_URL', {}).get(self.system.user_location)
# If we have an edx_video_id, we prefer its values over what we store
# internally for download links (source, html5_sources) and the youtube
# stream.
if self.edx_video_id and edxval_api:
try:
val_profiles = ["youtube", "desktop_webm", "desktop_mp4"]
# strip edx_video_id to prevent ValVideoNotFoundError error if unwanted spaces are there. TNL-5769
val_video_urls = edxval_api.get_urls_for_profiles(self.edx_video_id.strip(), val_profiles)
# VAL will always give us the keys for the profiles we asked for, but
# if it doesn't have an encoded video entry for that Video + Profile, the
# value will map to `None`
# add the non-youtube urls to the list of alternative sources
# use the last non-None non-youtube url as the link to download the video
for url in [val_video_urls[p] for p in val_profiles if p != "youtube"]:
if url:
if url not in sources:
sources.append(url)
if self.download_video:
# function returns None when the url cannot be re-written
rewritten_link = rewrite_video_url(cdn_url, url)
if rewritten_link:
download_video_link = rewritten_link
else:
download_video_link = url
# set the youtube url
if val_video_urls["youtube"]:
youtube_streams = "1.00:{}".format(val_video_urls["youtube"])
except edxval_api.ValInternalError:
# VAL raises this exception if it can't find data for the edx video ID. This can happen if the
# course data is ported to a machine that does not have the VAL data. So for now, pass on this
# exception and fallback to whatever we find in the VideoDescriptor.
log.warning("Could not retrieve information from VAL for edx Video ID: %s.", self.edx_video_id)
# If the user comes from China use China CDN for html5 videos.
# 'CN' is China ISO 3166-1 country code.
# Video caching is disabled for Studio. User_location is always None in Studio.
# CountryMiddleware disabled for Studio.
if getattr(self, 'video_speed_optimizations', True) and cdn_url:
branding_info = BrandingInfoConfig.get_config().get(self.system.user_location)
for index, source_url in enumerate(sources):
new_url = rewrite_video_url(cdn_url, source_url)
if new_url:
sources[index] = new_url
# If there was no edx_video_id, or if there was no download specified
# for it, we fall back on whatever we find in the VideoDescriptor
if not download_video_link and self.download_video:
if self.source:
download_video_link = self.source
elif self.html5_sources:
download_video_link = self.html5_sources[0]
track_url, transcript_language, sorted_languages = self.get_transcripts_for_student(self.get_transcripts_info())
# CDN_VIDEO_URLS is only to be used here and will be deleted
# TODO(ali@edx.org): Delete this after the CDN experiment has completed.
html_id = self.location.html_id()
if self.system.user_location == 'CN' and \
settings.FEATURES.get('ENABLE_VIDEO_BEACON', False) and \
html_id in getattr(settings, 'CDN_VIDEO_URLS', {}).keys():
cdn_urls = getattr(settings, 'CDN_VIDEO_URLS', {})[html_id]
cdn_exp_group, new_source = random.choice(zip(range(len(cdn_urls)), cdn_urls))
if cdn_exp_group > 0:
sources[0] = new_source
cdn_eval = True
else:
cdn_eval = False
cdn_exp_group = None
self.youtube_streams = youtube_streams or create_youtube_string(self) # pylint: disable=W0201
settings_service = self.runtime.service(self, 'settings')
yt_api_key = None
if settings_service:
xblock_settings = settings_service.get_settings_bucket(self)
if xblock_settings and 'YOUTUBE_API_KEY' in xblock_settings:
yt_api_key = xblock_settings['YOUTUBE_API_KEY']
metadata = {
'saveStateUrl': self.system.ajax_url + '/save_user_state',
'autoplay': settings.FEATURES.get('AUTOPLAY_VIDEOS', False),
'streams': self.youtube_streams,
'sub': self.sub,
'sources': sources,
# This won't work when we move to data that
# isn't on the filesystem
'captionDataDir': getattr(self, 'data_dir', None),
'showCaptions': json.dumps(self.show_captions),
'generalSpeed': self.global_speed,
'speed': self.speed,
'savedVideoPosition': self.saved_video_position.total_seconds(),
'start': self.start_time.total_seconds(),
'end': self.end_time.total_seconds(),
'transcriptLanguage': transcript_language,
'transcriptLanguages': sorted_languages,
'ytTestTimeout': settings.YOUTUBE['TEST_TIMEOUT'],
'ytApiUrl': settings.YOUTUBE['API'],
'ytMetadataUrl': settings.YOUTUBE['METADATA_URL'],
'ytKey': yt_api_key,
'transcriptTranslationUrl': self.runtime.handler_url(
self, 'transcript', 'translation/__lang__'
).rstrip('/?'),
'transcriptAvailableTranslationsUrl': self.runtime.handler_url(
self, 'transcript', 'available_translations'
).rstrip('/?'),
## For now, the option "data-autohide-html5" is hard coded. This option
## either enables or disables autohiding of controls and captions on mouse
## inactivity. If set to true, controls and captions will autohide for
## HTML5 sources (non-YouTube) after a period of mouse inactivity over the
## whole video. When the mouse moves (or a key is pressed while any part of
## the video player is focused), the captions and controls will be shown
## once again.
##
## There is no option in the "Advanced Editor" to set this option. However,
## this option will have an effect if changed to "True". The code on
## front-end exists.
'autohideHtml5': False,
# This is the server's guess at whether youtube is available for
# this user, based on what was recorded the last time we saw the
# user, and defaulting to True.
'recordedYoutubeIsAvailable': self.youtube_is_available,
}
bumperize(self)
context = {
'bumper_metadata': json.dumps(self.bumper['metadata']), # pylint: disable=E1101
'metadata': json.dumps(OrderedDict(metadata)),
'poster': json.dumps(get_poster(self)),
'branding_info': branding_info,
'cdn_eval': cdn_eval,
'cdn_exp_group': cdn_exp_group,
'id': self.location.html_id(),
'display_name': self.display_name_with_default,
'handout': self.handout,
'download_video_link': download_video_link,
'track': track_url,
'transcript_download_format': transcript_download_format,
'transcript_download_formats_list': self.descriptor.fields['transcript_download_format'].values,
'license': getattr(self, "license", None),
}
return self.system.render_template('video.html', context)
@XBlock.wants("request_cache")
@XBlock.wants("settings")
class VideoDescriptor(VideoFields, VideoTranscriptsMixin, VideoStudioViewHandlers,
TabsEditingDescriptor, EmptyDataRawDescriptor, LicenseMixin):
"""
Descriptor for `VideoModule`.
"""
module_class = VideoModule
transcript = module_attr('transcript')
show_in_read_only_mode = True
tabs = [
{
'name': _("Basic"),
'template': "video/transcripts.html",
'current': True
},
{
'name': _("Advanced"),
'template': "tabs/metadata-edit-tab.html"
}
]
def __init__(self, *args, **kwargs):
"""
Mostly handles backward compatibility issues.
`source` is deprecated field.
a) If `source` exists and `source` is not `html5_sources`: show `source`
field on front-end as not-editable but clearable. Dropdown is a new
field `download_video` and it has value True.
b) If `source` is cleared it is not shown anymore.
c) If `source` exists and `source` in `html5_sources`, do not show `source`
field. `download_video` field has value True.
"""
super(VideoDescriptor, self).__init__(*args, **kwargs)
# For backwards compatibility -- if we've got XML data, parse it out and set the metadata fields
if self.data:
field_data = self._parse_video_xml(etree.fromstring(self.data))
self._field_data.set_many(self, field_data)
del self.data
self.source_visible = False
if self.source:
# If `source` field value exist in the `html5_sources` field values,
# then delete `source` field value and use value from `html5_sources` field.
if self.source in self.html5_sources:
self.source = '' # Delete source field value.
self.download_video = True
else: # Otherwise, `source` field value will be used.
self.source_visible = True
if not self.fields['download_video'].is_set_on(self):
self.download_video = True
# Force download_video field to default value if it's not explicitly set for backward compatibility.
if not self.fields['download_video'].is_set_on(self):
self.download_video = self.download_video
self.force_save_fields(['download_video'])
# for backward compatibility.
# If course was existed and was not re-imported by the moment of adding `download_track` field,
# we should enable `download_track` if following is true:
if not self.fields['download_track'].is_set_on(self) and self.track:
self.download_track = True
def validate(self):
"""
Validates the state of this video Module Instance. This
is the override of the general XBlock method, and it will also ask
its superclass to validate.
"""
validation = super(VideoDescriptor, self).validate()
if not isinstance(validation, StudioValidation):
validation = StudioValidation.copy(validation)
no_transcript_lang = []
for lang_code, transcript in self.transcripts.items():
if not transcript:
no_transcript_lang.append([label for code, label in settings.ALL_LANGUAGES if code == lang_code][0])
if no_transcript_lang:
ungettext = self.runtime.service(self, "i18n").ungettext
validation.set_summary(
StudioValidationMessage(
StudioValidationMessage.WARNING,
ungettext(
'There is no transcript file associated with the {lang} language.',
'There are no transcript files associated with the {lang} languages.',
len(no_transcript_lang)
).format(lang=', '.join(no_transcript_lang))
)
)
return validation
def editor_saved(self, user, old_metadata, old_content):
"""
Used to update video values during `self`:save method from CMS.
old_metadata: dict, values of fields of `self` with scope=settings which were explicitly set by user.
old_content, same as `old_metadata` but for scope=content.
Due to nature of code flow in item.py::_save_item, before current function is called,
fields of `self` instance have been already updated, but not yet saved.
To obtain values, which were changed by user input,
one should compare own_metadata(self) and old_medatada.
Video player has two tabs, and due to nature of sync between tabs,
metadata from Basic tab is always sent when video player is edited and saved first time, for example:
{'youtube_id_1_0': u'3_yD_cEKoCk', 'display_name': u'Video', 'sub': u'3_yD_cEKoCk', 'html5_sources': []},
that's why these fields will always present in old_metadata after first save. This should be fixed.
At consequent save requests html5_sources are always sent too, disregard of their change by user.
That means that html5_sources are always in list of fields that were changed (`metadata` param in save_item).
This should be fixed too.
"""
metadata_was_changed_by_user = old_metadata != own_metadata(self)
# There is an edge case when old_metadata and own_metadata are same and we are importing transcript from youtube
# then there is a syncing issue where html5_subs are not syncing with youtube sub, We can make sync better by
# checking if transcript is present for the video and if any html5_ids transcript is not present then trigger
# the manage_video_subtitles_save to create the missing transcript with particular html5_id.
if not metadata_was_changed_by_user and self.sub and hasattr(self, 'html5_sources'):
html5_ids = get_html5_ids(self.html5_sources)
for subs_id in html5_ids:
try:
Transcript.asset(self.location, subs_id)
except NotFoundError:
# If a transcript does not not exist with particular html5_id then there is no need to check other
# html5_ids because we have to create a new transcript with this missing html5_id by turning on
# metadata_was_changed_by_user flag.
metadata_was_changed_by_user = True
break
if metadata_was_changed_by_user:
manage_video_subtitles_save(
self,
user,
old_metadata if old_metadata else None,
generate_translation=True
)
def save_with_metadata(self, user):
"""
Save module with updated metadata to database."
"""
self.save()
self.runtime.modulestore.update_item(self, user.id)
@property
def editable_metadata_fields(self):
editable_fields = super(VideoDescriptor, self).editable_metadata_fields
settings_service = self.runtime.service(self, 'settings')
if settings_service:
xb_settings = settings_service.get_settings_bucket(self)
if not xb_settings.get("licensing_enabled", False) and "license" in editable_fields:
del editable_fields["license"]
if self.source_visible:
editable_fields['source']['non_editable'] = True
else:
editable_fields.pop('source')
languages = [{'label': label, 'code': lang} for lang, label in settings.ALL_LANGUAGES]
languages.sort(key=lambda l: l['label'])
editable_fields['transcripts']['languages'] = languages
editable_fields['transcripts']['type'] = 'VideoTranslations'
editable_fields['transcripts']['urlRoot'] = self.runtime.handler_url(
self,
'studio_transcript',
'translation'
).rstrip('/?')
editable_fields['handout']['type'] = 'FileUploader'
return editable_fields
@classmethod
def from_xml(cls, xml_data, system, id_generator):
"""
Creates an instance of this descriptor from the supplied xml_data.
This may be overridden by subclasses
xml_data: A string of xml that will be translated into data and children for
this module
system: A DescriptorSystem for interacting with external resources
id_generator is used to generate course-specific urls and identifiers
"""
xml_object = etree.fromstring(xml_data)
url_name = xml_object.get('url_name', xml_object.get('slug'))
block_type = 'video'
definition_id = id_generator.create_definition(block_type, url_name)
usage_id = id_generator.create_usage(definition_id)
if is_pointer_tag(xml_object):
filepath = cls._format_filepath(xml_object.tag, name_to_pathname(url_name))
xml_object = cls.load_file(filepath, system.resources_fs, usage_id)
system.parse_asides(xml_object, definition_id, usage_id, id_generator)
field_data = cls._parse_video_xml(xml_object, id_generator)
kvs = InheritanceKeyValueStore(initial_values=field_data)
field_data = KvsFieldData(kvs)
video = system.construct_xblock_from_class(
cls,
# We're loading a descriptor, so student_id is meaningless
# We also don't have separate notions of definition and usage ids yet,
# so we use the location for both
ScopeIds(None, block_type, definition_id, usage_id),
field_data,
)
return video
def definition_to_xml(self, resource_fs):
"""
Returns an xml string representing this module.
"""
xml = etree.Element('video')
youtube_string = create_youtube_string(self)
# Mild workaround to ensure that tests pass -- if a field
# is set to its default value, we don't need to write it out.
if youtube_string and youtube_string != '1.00:3_yD_cEKoCk':
xml.set('youtube', unicode(youtube_string))
xml.set('url_name', self.url_name)
attrs = {
'display_name': self.display_name,
'show_captions': json.dumps(self.show_captions),
'start_time': self.start_time,
'end_time': self.end_time,
'sub': self.sub,
'download_track': json.dumps(self.download_track),
'download_video': json.dumps(self.download_video),
}
for key, value in attrs.items():
# Mild workaround to ensure that tests pass -- if a field
# is set to its default value, we don't write it out.
if value:
if key in self.fields and self.fields[key].is_set_on(self):
try:
xml.set(key, unicode(value))
except UnicodeDecodeError:
exception_message = format_xml_exception_message(self.location, key, value)
log.exception(exception_message)
# If exception is UnicodeDecodeError set value using unicode 'utf-8' scheme.
log.info("Setting xml value using 'utf-8' scheme.")
xml.set(key, unicode(value, 'utf-8'))
except ValueError:
exception_message = format_xml_exception_message(self.location, key, value)
log.exception(exception_message)
raise
for source in self.html5_sources:
ele = etree.Element('source')
ele.set('src', source)
xml.append(ele)
if self.track:
ele = etree.Element('track')
ele.set('src', self.track)
xml.append(ele)
if self.handout:
ele = etree.Element('handout')
ele.set('src', self.handout)
xml.append(ele)
if self.transcripts is not None:
# sorting for easy testing of resulting xml
for transcript_language in sorted(self.transcripts.keys()):
ele = etree.Element('transcript')
ele.set('language', transcript_language)
ele.set('src', self.transcripts[transcript_language])
xml.append(ele)
if self.edx_video_id and edxval_api:
try:
xml.append(edxval_api.export_to_xml(self.edx_video_id))
except edxval_api.ValVideoNotFoundError:
pass
# handle license specifically
self.add_license_to_xml(xml)
return xml
def create_youtube_url(self, youtube_id):
"""
Args:
youtube_id: The ID of the video to create a link for
Returns:
A full youtube url to the video whose ID is passed in
"""
if youtube_id:
return u'https://www.youtube.com/watch?v={0}'.format(youtube_id)
else:
return u''
def get_context(self):
"""
Extend context by data for transcript basic tab.
"""
_context = super(VideoDescriptor, self).get_context()
metadata_fields = copy.deepcopy(self.editable_metadata_fields)
display_name = metadata_fields['display_name']
video_url = metadata_fields['html5_sources']
youtube_id_1_0 = metadata_fields['youtube_id_1_0']
def get_youtube_link(video_id):
"""
Returns the fully-qualified YouTube URL for the given video identifier
"""
# First try a lookup in VAL. If we have a YouTube entry there, it overrides the
# one passed in.
if self.edx_video_id and edxval_api:
val_youtube_id = edxval_api.get_url_for_profile(self.edx_video_id, "youtube")
if val_youtube_id:
video_id = val_youtube_id
return self.create_youtube_url(video_id)
_ = self.runtime.service(self, "i18n").ugettext
video_url.update({
'help': _('The URL for your video. This can be a YouTube URL or a link to an .mp4, .ogg, or .webm video file hosted elsewhere on the Internet.'), # pylint: disable=line-too-long
'display_name': _('Default Video URL'),
'field_name': 'video_url',
'type': 'VideoList',
'default_value': [get_youtube_link(youtube_id_1_0['default_value'])]
})
youtube_id_1_0_value = get_youtube_link(youtube_id_1_0['value'])
if youtube_id_1_0_value:
video_url['value'].insert(0, youtube_id_1_0_value)
metadata = {
'display_name': display_name,
'video_url': video_url
}
_context.update({'transcripts_basic_tab_metadata': metadata})
return _context
@classmethod
def _parse_youtube(cls, data):
"""
Parses a string of Youtube IDs such as "1.0:AXdE34_U,1.5:VO3SxfeD"
into a dictionary. Necessary for backwards compatibility with
XML-based courses.
"""
ret = {'0.75': '', '1.00': '', '1.25': '', '1.50': ''}
videos = data.split(',')
for video in videos:
pieces = video.split(':')
try:
speed = '%.2f' % float(pieces[0]) # normalize speed
# Handle the fact that youtube IDs got double-quoted for a period of time.
# Note: we pass in "VideoFields.youtube_id_1_0" so we deserialize as a String--
# it doesn't matter what the actual speed is for the purposes of deserializing.
youtube_id = deserialize_field(cls.youtube_id_1_0, pieces[1])
ret[speed] = youtube_id
except (ValueError, IndexError):
log.warning('Invalid YouTube ID: %s', video)
return ret
@classmethod
def _parse_video_xml(cls, xml, id_generator=None):
"""
Parse video fields out of xml_data. The fields are set if they are
present in the XML.
Arguments:
id_generator is used to generate course-specific urls and identifiers
"""
field_data = {}
# Convert between key types for certain attributes --
# necessary for backwards compatibility.
conversions = {
# example: 'start_time': cls._example_convert_start_time
}
# Convert between key names for certain attributes --
# necessary for backwards compatibility.
compat_keys = {
'from': 'start_time',
'to': 'end_time'
}
sources = xml.findall('source')
if sources:
field_data['html5_sources'] = [ele.get('src') for ele in sources]
track = xml.find('track')
if track is not None:
field_data['track'] = track.get('src')
handout = xml.find('handout')
if handout is not None:
field_data['handout'] = handout.get('src')
transcripts = xml.findall('transcript')
if transcripts:
field_data['transcripts'] = {tr.get('language'): tr.get('src') for tr in transcripts}
for attr, value in xml.items():
if attr in compat_keys:
attr = compat_keys[attr]
if attr in cls.metadata_to_strip + ('url_name', 'name'):
continue
if attr == 'youtube':
speeds = cls._parse_youtube(value)
for speed, youtube_id in speeds.items():
# should have made these youtube_id_1_00 for
# cleanliness, but hindsight doesn't need glasses
normalized_speed = speed[:-1] if speed.endswith('0') else speed
# If the user has specified html5 sources, make sure we don't use the default video
if youtube_id != '' or 'html5_sources' in field_data:
field_data['youtube_id_{0}'.format(normalized_speed.replace('.', '_'))] = youtube_id
elif attr in conversions:
field_data[attr] = conversions[attr](value)
elif attr not in cls.fields:
field_data.setdefault('xml_attributes', {})[attr] = value
else:
# We export values with json.dumps (well, except for Strings, but
# for about a month we did it for Strings also).
field_data[attr] = deserialize_field(cls.fields[attr], value)
course_id = getattr(id_generator, 'target_course_id', None)
# Update the handout location with current course_id
if 'handout' in field_data.keys() and course_id:
handout_location = StaticContent.get_location_from_path(field_data['handout'])
if isinstance(handout_location, AssetLocator):
handout_new_location = StaticContent.compute_location(course_id, handout_location.path)
field_data['handout'] = StaticContent.serialize_asset_key_with_slash(handout_new_location)
# For backwards compatibility: Add `source` if XML doesn't have `download_video`
# attribute.
if 'download_video' not in field_data and sources:
field_data['source'] = field_data['html5_sources'][0]
# For backwards compatibility: if XML doesn't have `download_track` attribute,
# it means that it is an old format. So, if `track` has some value,
# `download_track` needs to have value `True`.
if 'download_track' not in field_data and track is not None:
field_data['download_track'] = True
video_asset_elem = xml.find('video_asset')
if (
edxval_api and
video_asset_elem is not None and
'edx_video_id' in field_data
):
# Allow ValCannotCreateError to escape
edxval_api.import_from_xml(
video_asset_elem,
field_data['edx_video_id'],
course_id=course_id
)
# load license if it exists
field_data = LicenseMixin.parse_license_from_xml(field_data, xml)
return field_data
def index_dictionary(self):
xblock_body = super(VideoDescriptor, self).index_dictionary()
video_body = {
"display_name": self.display_name,
}
def _update_transcript_for_index(language=None):
""" Find video transcript - if not found, don't update index """
try:
transcripts = self.get_transcripts_info()
transcript = self.get_transcript(
transcripts, transcript_format='txt', lang=language
)[0].replace("\n", " ")
transcript_index_name = "transcript_{}".format(language if language else self.transcript_language)
video_body.update({transcript_index_name: transcript})
except NotFoundError:
pass
if self.sub:
_update_transcript_for_index()
# Check to see if there are transcripts in other languages besides default transcript
if self.transcripts:
for language in self.transcripts.keys():
_update_transcript_for_index(language)
if "content" in xblock_body:
xblock_body["content"].update(video_body)
else:
xblock_body["content"] = video_body
xblock_body["content_type"] = "Video"
return xblock_body
@property
def request_cache(self):
"""
Returns the request_cache from the runtime.
"""
return self.runtime.service(self, "request_cache")
@memoize_in_request_cache('request_cache')
def get_cached_val_data_for_course(self, video_profile_names, course_id):
"""
Returns the VAL data for the requested video profiles for the given course.
"""
return edxval_api.get_video_info_for_course_and_profiles(unicode(course_id), video_profile_names)
def student_view_data(self, context=None):
"""
Returns a JSON representation of the student_view of this XModule.
The contract of the JSON content is between the caller and the particular XModule.
"""
context = context or {}
# If the "only_on_web" field is set on this video, do not return the rest of the video's data
# in this json view, since this video is to be accessed only through its web view."
if self.only_on_web:
return {"only_on_web": True}
encoded_videos = {}
val_video_data = {}
# Check in VAL data first if edx_video_id exists
if self.edx_video_id:
video_profile_names = context.get("profiles", ["mobile_low"])
# get and cache bulk VAL data for course
val_course_data = self.get_cached_val_data_for_course(video_profile_names, self.location.course_key)
val_video_data = val_course_data.get(self.edx_video_id, {})
# Get the encoded videos if data from VAL is found
if val_video_data:
encoded_videos = val_video_data.get('profiles', {})
# If information for this edx_video_id is not found in the bulk course data, make a
# separate request for this individual edx_video_id, unless cache misses are disabled.
# This is useful/required for videos that don't have a course designated, such as the introductory video
# that is shared across many courses. However, this results in a separate database request so watch
# out for any performance hit if many such videos exist in a course. Set the 'allow_cache_miss' parameter
# to False to disable this fall back.
elif context.get("allow_cache_miss", "True").lower() == "true":
try:
val_video_data = edxval_api.get_video_info(self.edx_video_id)
# Unfortunately, the VAL API is inconsistent in how it returns the encodings, so remap here.
for enc_vid in val_video_data.pop('encoded_videos'):
if enc_vid['profile'] in video_profile_names:
encoded_videos[enc_vid['profile']] = {key: enc_vid[key] for key in ["url", "file_size"]}
except edxval_api.ValVideoNotFoundError:
pass
# Fall back to other video URLs in the video module if not found in VAL
if not encoded_videos:
video_url = self.html5_sources[0] if self.html5_sources else self.source
if video_url:
encoded_videos["fallback"] = {
"url": video_url,
"file_size": 0, # File size is unknown for fallback URLs
}
# Include youtube link if there is no encoding for mobile- ie only a fallback URL or no encodings at all
# We are including a fallback URL for older versions of the mobile app that don't handle Youtube urls
if self.youtube_id_1_0:
encoded_videos["youtube"] = {
"url": self.create_youtube_url(self.youtube_id_1_0),
"file_size": 0, # File size is not relevant for external link
}
transcripts_info = self.get_transcripts_info()
transcripts = {
lang: self.runtime.handler_url(self, 'transcript', 'download', query="lang=" + lang, thirdparty=True)
for lang in self.available_translations(transcripts_info, verify_assets=False)
}
return {
"only_on_web": self.only_on_web,
"duration": val_video_data.get('duration', None),
"transcripts": transcripts,
"encoded_videos": encoded_videos,
}
| agpl-3.0 |
shabab12/edx-platform | common/test/acceptance/pages/studio/overview.py | 9 | 35446 | """
Course Outline page in Studio.
"""
import datetime
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from selenium.webdriver import ActionChains
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from ..common.utils import click_css, confirm_prompt
from .course_page import CoursePage
from .container import ContainerPage
from .utils import set_input_value_and_save, set_input_value
class CourseOutlineItem(object):
"""
A mixin class for any :class:`PageObject` shown in a course outline.
"""
# Note there are a few pylint disable=no-member occurances in this class, because
# it was written assuming it is going to be a mixin to a PageObject and will have functions
# such as self.wait_for_ajax, which doesn't exist on a generic `object`.
BODY_SELECTOR = None
EDIT_BUTTON_SELECTOR = '.xblock-field-value-edit'
NAME_SELECTOR = '.item-title'
NAME_INPUT_SELECTOR = '.xblock-field-input'
NAME_FIELD_WRAPPER_SELECTOR = '.xblock-title .wrapper-xblock-field'
STATUS_MESSAGE_SELECTOR = '> div[class$="status"] .status-message'
CONFIGURATION_BUTTON_SELECTOR = '.action-item .configure-button'
def __repr__(self):
# CourseOutlineItem is also used as a mixin for CourseOutlinePage, which doesn't have a locator
# Check for the existence of a locator so that errors when navigating to the course outline page don't show up
# as errors in the repr method instead.
try:
return "{}(<browser>, {!r})".format(self.__class__.__name__, self.locator) # pylint: disable=no-member
except AttributeError:
return "{}(<browser>)".format(self.__class__.__name__)
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineItem` context
"""
# If the item doesn't have a body selector or locator, then it can't be bounded
# This happens in the context of the CourseOutlinePage
# pylint: disable=no-member
if self.BODY_SELECTOR and hasattr(self, 'locator'):
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
else:
return selector
@property
def name(self):
"""
Returns the display name of this object.
"""
name_element = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).first # pylint: disable=no-member
if name_element:
return name_element.text[0]
else:
return None
@property
def has_status_message(self):
"""
Returns True if the item has a status message, False otherwise.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).first.visible # pylint: disable=no-member
@property
def status_message(self):
"""
Returns the status message of this item.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).text[0] # pylint: disable=no-member
@property
def has_staff_lock_warning(self):
""" Returns True if the 'Contains staff only content' message is visible """
return self.status_message == 'Contains staff only content' if self.has_status_message else False
@property
def is_staff_only(self):
""" Returns True if the visiblity state of this item is staff only (has a black sidebar) """
return "is-staff-only" in self.q(css=self._bounded_selector(''))[0].get_attribute("class") # pylint: disable=no-member
def edit_name(self):
"""
Puts the item's name into editable form.
"""
self.q(css=self._bounded_selector(self.EDIT_BUTTON_SELECTOR)).first.click() # pylint: disable=no-member
def enter_name(self, new_name):
"""
Enters new_name as the item's display name.
"""
set_input_value(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
def change_name(self, new_name):
"""
Changes the container's name.
"""
self.edit_name()
set_input_value_and_save(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
self.wait_for_ajax() # pylint: disable=no-member
def finalize_name(self):
"""
Presses ENTER, saving the value of the display name for this item.
"""
# pylint: disable=no-member
self.q(css=self._bounded_selector(self.NAME_INPUT_SELECTOR)).results[0].send_keys(Keys.ENTER)
self.wait_for_ajax()
def set_staff_lock(self, is_locked):
"""
Sets the explicit staff lock of item on the container page to is_locked.
"""
modal = self.edit()
modal.is_explicitly_locked = is_locked
modal.save()
def in_editable_form(self):
"""
Return whether this outline item's display name is in its editable form.
"""
# pylint: disable=no-member
return "is-editing" in self.q(
css=self._bounded_selector(self.NAME_FIELD_WRAPPER_SELECTOR)
)[0].get_attribute("class")
def edit(self):
"""
Puts the item into editable form.
"""
self.q(css=self._bounded_selector(self.CONFIGURATION_BUTTON_SELECTOR)).first.click() # pylint: disable=no-member
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.') # pylint: disable=unnecessary-lambda
return modal
@property
def release_date(self):
"""
Returns the release date from the page. Date is "mm/dd/yyyy" string.
"""
element = self.q(css=self._bounded_selector(".status-release-value")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
@property
def due_date(self):
"""
Returns the due date from the page. Date is "mm/dd/yyyy" string.
"""
element = self.q(css=self._bounded_selector(".status-grading-date")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
@property
def policy(self):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.q(css=self._bounded_selector(".status-grading-value")) # pylint: disable=no-member
return element.first.text[0] if element.present else None
def publish(self):
"""
Publish the unit.
"""
click_css(self, self._bounded_selector('.action-publish'), require_notification=False)
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.') # pylint: disable=unnecessary-lambda
modal.publish()
@property
def publish_action(self):
"""
Returns the link for publishing a unit.
"""
return self.q(css=self._bounded_selector('.action-publish')).first # pylint: disable=no-member
class CourseOutlineContainer(CourseOutlineItem):
"""
A mixin to a CourseOutline page object that adds the ability to load
a child page object by title or by index.
CHILD_CLASS must be a :class:`CourseOutlineChild` subclass.
"""
CHILD_CLASS = None
ADD_BUTTON_SELECTOR = '> .outline-content > .add-item a.button-new'
def child(self, title, child_class=None):
"""
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
# pylint: disable=no-member
return child_class(
self.browser,
self.q(css=child_class.BODY_SELECTOR).filter(
lambda el: title in [inner.text for inner in
el.find_elements_by_css_selector(child_class.NAME_SELECTOR)]
).attrs('data-locator')[0]
)
def children(self, child_class=None):
"""
Returns all the children page objects of class child_class.
"""
if not child_class:
child_class = self.CHILD_CLASS
# pylint: disable=no-member
return self.q(css=self._bounded_selector(child_class.BODY_SELECTOR)).map(
lambda el: child_class(self.browser, el.get_attribute('data-locator'))).results
def child_at(self, index, child_class=None):
"""
Returns the child at the specified index.
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
return self.children(child_class)[index]
def add_child(self, require_notification=True):
"""
Adds a child to this xblock, waiting for notifications.
"""
click_css(
self,
self._bounded_selector(self.ADD_BUTTON_SELECTOR),
require_notification=require_notification,
)
def expand_subsection(self):
"""
Toggle the expansion of this subsection.
"""
# pylint: disable=no-member
self.browser.execute_script("jQuery.fx.off = true;")
def subsection_expanded():
"""
Returns whether or not this subsection is expanded.
"""
self.wait_for_element_presence(
self._bounded_selector(self.ADD_BUTTON_SELECTOR), 'Toggle control is present'
)
add_button = self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).first.results
return add_button and add_button[0].is_displayed()
currently_expanded = subsection_expanded()
# Need to click slightly off-center in order for the click to be recognized.
ele = self.browser.find_element_by_css_selector(self._bounded_selector('.ui-toggle-expansion .fa'))
ActionChains(self.browser).move_to_element_with_offset(ele, 4, 4).click().perform()
self.wait_for_element_presence(self._bounded_selector(self.ADD_BUTTON_SELECTOR), 'Subsection is expanded')
EmptyPromise(
lambda: subsection_expanded() != currently_expanded,
"Check that the container {} has been toggled".format(self.locator)
).fulfill()
self.browser.execute_script("jQuery.fx.off = false;")
return self
@property
def is_collapsed(self):
"""
Return whether this outline item is currently collapsed.
"""
return "is-collapsed" in self.q(css=self._bounded_selector('')).first.attrs("class")[0] # pylint: disable=no-member
class CourseOutlineChild(PageObject, CourseOutlineItem):
"""
A page object that will be used as a child of :class:`CourseOutlineContainer`.
"""
url = None
BODY_SELECTOR = '.outline-item'
def __init__(self, browser, locator):
super(CourseOutlineChild, self).__init__(browser)
self.locator = locator
def is_browser_on_page(self):
return self.q(css='{}[data-locator="{}"]'.format(self.BODY_SELECTOR, self.locator)).present
def delete(self, cancel=False):
"""
Clicks the delete button, then cancels at the confirmation prompt if cancel is True.
"""
click_css(self, self._bounded_selector('.delete-button'), require_notification=False)
confirm_prompt(self, cancel)
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `CourseOutlineChild` context
"""
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
@property
def name(self):
titles = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).text
if titles:
return titles[0]
else:
return None
@property
def children(self):
"""
Will return any first-generation descendant items of this item.
"""
descendants = self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineChild(self.browser, el.get_attribute('data-locator'))).results
# Now remove any non-direct descendants.
grandkids = []
for descendant in descendants:
grandkids.extend(descendant.children)
grand_locators = [grandkid.locator for grandkid in grandkids]
return [descendant for descendant in descendants if descendant.locator not in grand_locators]
class CourseOutlineUnit(CourseOutlineChild):
"""
PageObject that wraps a unit link on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-unit'
NAME_SELECTOR = '.unit-title a'
def go_to(self):
"""
Open the container page linked to by this unit link, and return
an initialized :class:`.ContainerPage` for that unit.
"""
return ContainerPage(self.browser, self.locator).visit()
def is_browser_on_page(self):
return self.q(css=self.BODY_SELECTOR).present
def children(self):
return self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineUnit(self.browser, el.get_attribute('data-locator'))).results
class CourseOutlineSubsection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a subsection block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-subsection'
NAME_SELECTOR = '.subsection-title'
NAME_FIELD_WRAPPER_SELECTOR = '.subsection-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineUnit
def unit(self, title):
"""
Return the :class:`.CourseOutlineUnit with the title `title`.
"""
return self.child(title)
def units(self):
"""
Returns the units in this subsection.
"""
return self.children()
def unit_at(self, index):
"""
Returns the CourseOutlineUnit at the specified index.
"""
return self.child_at(index)
def add_unit(self):
"""
Adds a unit to this subsection
"""
self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).click()
class CourseOutlineSection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a section block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-section'
NAME_SELECTOR = '.section-title'
NAME_FIELD_WRAPPER_SELECTOR = '.section-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineSubsection
def subsection(self, title):
"""
Return the :class:`.CourseOutlineSubsection` with the title `title`.
"""
return self.child(title)
def subsections(self):
"""
Returns a list of the CourseOutlineSubsections of this section
"""
return self.children()
def subsection_at(self, index):
"""
Returns the CourseOutlineSubsection at the specified index.
"""
return self.child_at(index)
def add_subsection(self):
"""
Adds a subsection to this section
"""
self.add_child()
class ExpandCollapseLinkState(object):
"""
Represents the three states that the expand/collapse link can be in
"""
MISSING = 0
COLLAPSE = 1
EXPAND = 2
class CourseOutlinePage(CoursePage, CourseOutlineContainer):
"""
Course Outline page in Studio.
"""
url_path = "course"
CHILD_CLASS = CourseOutlineSection
EXPAND_COLLAPSE_CSS = '.button-toggle-expand-collapse'
BOTTOM_ADD_SECTION_BUTTON = '.outline > .add-section .button-new'
def is_browser_on_page(self):
return all([
self.q(css='body.view-outline').present,
self.q(css='.content-primary').present,
self.q(css='div.ui-loading.is-hidden').present
])
def view_live(self):
"""
Clicks the "View Live" link and switches to the new tab
"""
click_css(self, '.view-live-button', require_notification=False)
self.browser.switch_to_window(self.browser.window_handles[-1])
def section(self, title):
"""
Return the :class:`.CourseOutlineSection` with the title `title`.
"""
return self.child(title)
def section_at(self, index):
"""
Returns the :class:`.CourseOutlineSection` at the specified index.
"""
return self.child_at(index)
def click_section_name(self, parent_css=''):
"""
Find and click on first section name in course outline
"""
self.q(css='{} .section-name'.format(parent_css)).first.click()
def get_section_name(self, parent_css='', page_refresh=False):
"""
Get the list of names of all sections present
"""
if page_refresh:
self.browser.refresh()
return self.q(css='{} .section-name'.format(parent_css)).text
def section_name_edit_form_present(self, parent_css=''):
"""
Check that section name edit form present
"""
return self.q(css='{} .section-name input'.format(parent_css)).present
def change_section_name(self, new_name, parent_css=''):
"""
Change section name of first section present in course outline
"""
self.click_section_name(parent_css)
self.q(css='{} .section-name input'.format(parent_css)).first.fill(new_name)
self.q(css='{} .section-name .save-button'.format(parent_css)).first.click()
self.wait_for_ajax()
def sections(self):
"""
Returns the sections of this course outline page.
"""
return self.children()
def add_section_from_top_button(self):
"""
Clicks the button for adding a section which resides at the top of the screen.
"""
click_css(self, '.wrapper-mast nav.nav-actions .button-new')
def add_section_from_bottom_button(self, click_child_icon=False):
"""
Clicks the button for adding a section which resides at the bottom of the screen.
"""
element_css = self.BOTTOM_ADD_SECTION_BUTTON
if click_child_icon:
element_css += " .fa-plus"
click_css(self, element_css)
def toggle_expand_collapse(self):
"""
Toggles whether all sections are expanded or collapsed
"""
self.q(css=self.EXPAND_COLLAPSE_CSS).click()
def start_reindex(self):
"""
Starts course reindex by clicking reindex button
"""
self.reindex_button.click()
def open_subsection_settings_dialog(self, index=0):
"""
clicks on the settings button of subsection.
"""
self.q(css=".subsection-header-actions .configure-button").nth(index).click()
self.wait_for_element_presence('.course-outline-modal', 'Subsection settings modal is present.')
def change_problem_release_date(self):
"""
Sets a new start date
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
self.q(css="#start_date").fill("01/01/2030")
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def change_problem_due_date(self, date):
"""
Sets a new due date.
Expects date to be a string that will be accepted by the input (for example, '01/01/1970')
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
self.q(css="#due_date").fill(date)
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_advanced_tab(self):
"""
Select the advanced settings tab
"""
self.q(css=".settings-tab-button[data-tab='advanced']").first.click()
self.wait_for_element_presence('input.no_special_exam', 'Special exam settings fields not present.')
def make_exam_proctored(self):
"""
Makes a Proctored exam.
"""
self.q(css="input.proctored_exam").first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def make_exam_timed(self, hide_after_due=False):
"""
Makes a timed exam.
"""
self.q(css="input.timed_exam").first.click()
if hide_after_due:
self.q(css='.field-hide-after-due input').first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_none_exam(self):
"""
Choose "none" exam but do not press enter
"""
self.q(css="input.no_special_exam").first.click()
def select_timed_exam(self):
"""
Choose a timed exam but do not press enter
"""
self.q(css="input.timed_exam").first.click()
def select_proctored_exam(self):
"""
Choose a proctored exam but do not press enter
"""
self.q(css="input.proctored_exam").first.click()
def select_practice_exam(self):
"""
Choose a practice exam but do not press enter
"""
self.q(css="input.practice_exam").first.click()
def time_allotted_field_visible(self):
"""
returns whether the time allotted field is visible
"""
return self.q(css=".field-time-limit").visible
def exam_review_rules_field_visible(self):
"""
Returns whether the review rules field is visible
"""
return self.q(css=".field-exam-review-rules").visible
def hide_after_due_field_visible(self):
"""
Returns whether the hide after due field is visible
"""
return self.q(css=".field-hide-after-due").visible
def proctoring_items_are_displayed(self):
"""
Returns True if all the items are found.
"""
# The None radio button
if not self.q(css="input.no_special_exam").present:
return False
# The Timed exam radio button
if not self.q(css="input.timed_exam").present:
return False
# The Proctored exam radio button
if not self.q(css="input.proctored_exam").present:
return False
# The Practice exam radio button
if not self.q(css="input.practice_exam").present:
return False
return True
def select_access_tab(self):
"""
Select the access settings tab.
"""
self.q(css=".settings-tab-button[data-tab='access']").first.click()
self.wait_for_element_visibility('#is_prereq', 'Gating settings fields are present.')
def make_gating_prerequisite(self):
"""
Makes a subsection a gating prerequisite.
"""
if not self.q(css="#is_prereq")[0].is_selected():
self.q(css='label[for="is_prereq"]').click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def add_prerequisite_to_subsection(self, min_score):
"""
Adds a prerequisite to a subsection.
"""
Select(self.q(css="#prereq")[0]).select_by_index(1)
self.q(css="#prereq_min_score").fill(min_score)
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def gating_prerequisite_checkbox_is_visible(self):
"""
Returns True if the gating prerequisite checkbox is visible.
"""
# The Prerequisite checkbox is visible
return self.q(css="#is_prereq").visible
def gating_prerequisite_checkbox_is_checked(self):
"""
Returns True if the gating prerequisite checkbox is checked.
"""
# The Prerequisite checkbox is checked
return self.q(css="#is_prereq:checked").present
def gating_prerequisites_dropdown_is_visible(self):
"""
Returns True if the gating prerequisites dropdown is visible.
"""
# The Prerequisites dropdown is visible
return self.q(css="#prereq").visible
def gating_prerequisite_min_score_is_visible(self):
"""
Returns True if the gating prerequisite minimum score input is visible.
"""
# The Prerequisites dropdown is visible
return self.q(css="#prereq_min_score").visible
@property
def bottom_add_section_button(self):
"""
Returns the query representing the bottom add section button.
"""
return self.q(css=self.BOTTOM_ADD_SECTION_BUTTON).first
@property
def has_no_content_message(self):
"""
Returns true if a message informing the user that the course has no content is visible
"""
return self.q(css='.outline .no-content').is_present()
@property
def has_rerun_notification(self):
"""
Returns true iff the rerun notification is present on the page.
"""
return self.q(css='.wrapper-alert.is-shown').is_present()
def dismiss_rerun_notification(self):
"""
Clicks the dismiss button in the rerun notification.
"""
self.q(css='.dismiss-button').click()
@property
def expand_collapse_link_state(self):
"""
Returns the current state of the expand/collapse link
"""
link = self.q(css=self.EXPAND_COLLAPSE_CSS)[0]
if not link.is_displayed():
return ExpandCollapseLinkState.MISSING
elif "collapse-all" in link.get_attribute("class"):
return ExpandCollapseLinkState.COLLAPSE
else:
return ExpandCollapseLinkState.EXPAND
@property
def reindex_button(self):
"""
Returns reindex button.
"""
return self.q(css=".button.button-reindex")[0]
def expand_all_subsections(self):
"""
Expands all the subsections in this course.
"""
for section in self.sections():
if section.is_collapsed:
section.expand_subsection()
for subsection in section.subsections():
if subsection.is_collapsed:
subsection.expand_subsection()
@property
def xblocks(self):
"""
Return a list of xblocks loaded on the outline page.
"""
return self.children(CourseOutlineChild)
@property
def license(self):
"""
Returns the course license text, if present. Else returns None.
"""
return self.q(css=".license-value").first.text[0]
@property
def deprecated_warning_visible(self):
"""
Returns true if the deprecated warning is visible.
"""
return self.q(css='.wrapper-alert-error.is-shown').is_present()
@property
def warning_heading_text(self):
"""
Returns deprecated warning heading text.
"""
return self.q(css='.warning-heading-text').text[0]
@property
def components_list_heading(self):
"""
Returns deprecated warning component list heading text.
"""
return self.q(css='.components-list-heading-text').text[0]
@property
def modules_remove_text_shown(self):
"""
Returns True if deprecated warning advance modules remove text is visible.
"""
return self.q(css='.advance-modules-remove-text').visible
@property
def modules_remove_text(self):
"""
Returns deprecated warning advance modules remove text.
"""
return self.q(css='.advance-modules-remove-text').text[0]
@property
def components_visible(self):
"""
Returns True if components list visible.
"""
return self.q(css='.components-list').visible
@property
def components_display_names(self):
"""
Returns deprecated warning components display name list.
"""
return self.q(css='.components-list li>a').text
@property
def deprecated_advance_modules(self):
"""
Returns deprecated advance modules list.
"""
return self.q(css='.advance-modules-list li').text
class CourseOutlineModal(object):
"""
Page object specifically for a modal window on the course outline page.
"""
MODAL_SELECTOR = ".wrapper-modal-window"
def __init__(self, page):
self.page = page
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineModal` context.
"""
return " ".join([self.MODAL_SELECTOR, selector])
def is_shown(self):
"""
Return whether or not the modal defined by self.MODAL_SELECTOR is shown.
"""
return self.page.q(css=self.MODAL_SELECTOR).present
def find_css(self, selector):
"""
Find the given css selector on the page.
"""
return self.page.q(css=self._bounded_selector(selector))
def click(self, selector, index=0):
"""
Perform a Click action on the given selector.
"""
self.find_css(selector).nth(index).click()
def save(self):
"""
Click the save action button, and wait for the ajax call to return.
"""
self.click(".action-save")
self.page.wait_for_ajax()
def publish(self):
"""
Click the publish action button, and wait for the ajax call to return.
"""
self.click(".action-publish")
self.page.wait_for_ajax()
def cancel(self):
"""
Click the cancel action button.
"""
self.click(".action-cancel")
def has_release_date(self):
"""
Check if the input box for the release date exists in the subsection's settings window
"""
return self.find_css("#start_date").present
def has_release_time(self):
"""
Check if the input box for the release time exists in the subsection's settings window
"""
return self.find_css("#start_time").present
def has_due_date(self):
"""
Check if the input box for the due date exists in the subsection's settings window
"""
return self.find_css("#due_date").present
def has_due_time(self):
"""
Check if the input box for the due time exists in the subsection's settings window
"""
return self.find_css("#due_time").present
def has_policy(self):
"""
Check if the input for the grading policy is present.
"""
return self.find_css("#grading_type").present
def set_date(self, property_name, input_selector, date):
"""
Set `date` value to input pointed by `selector` and `property_name`.
"""
month, day, year = map(int, date.split('/'))
self.click(input_selector)
if getattr(self, property_name):
current_month, current_year = map(int, getattr(self, property_name).split('/')[1:])
else: # Use default timepicker values, which are current month and year.
current_month, current_year = datetime.datetime.today().month, datetime.datetime.today().year
date_diff = 12 * (year - current_year) + month - current_month
selector = "a.ui-datepicker-{}".format('next' if date_diff > 0 else 'prev')
for __ in xrange(abs(date_diff)):
self.page.q(css=selector).click()
self.page.q(css="a.ui-state-default").nth(day - 1).click() # set day
self.page.wait_for_element_invisibility("#ui-datepicker-div", "datepicker should be closed")
EmptyPromise(
lambda: getattr(self, property_name) == u'{m}/{d}/{y}'.format(m=month, d=day, y=year),
"{} is updated in modal.".format(property_name)
).fulfill()
def set_time(self, input_selector, time):
"""
Set `time` value to input pointed by `input_selector`
Not using the time picker to make sure it's not being rounded up
"""
self.page.q(css=input_selector).fill(time)
self.page.q(css=input_selector).results[0].send_keys(Keys.ENTER)
@property
def release_date(self):
"""
Returns the unit's release date. Date is "mm/dd/yyyy" string.
"""
return self.find_css("#start_date").first.attrs('value')[0]
@release_date.setter
def release_date(self, date):
"""
Sets the unit's release date to `date`. Date is "mm/dd/yyyy" string.
"""
self.set_date('release_date', "#start_date", date)
@property
def release_time(self):
"""
Returns the current value of the release time. Default is u'00:00'
"""
return self.find_css("#start_time").first.attrs('value')[0]
@release_time.setter
def release_time(self, time):
"""
Time is "HH:MM" string.
"""
self.set_time("#start_time", time)
@property
def due_date(self):
"""
Returns the due date from the page. Date is "mm/dd/yyyy" string.
"""
return self.find_css("#due_date").first.attrs('value')[0]
@due_date.setter
def due_date(self, date):
"""
Sets the due date for the unit. Date is "mm/dd/yyyy" string.
"""
self.set_date('due_date', "#due_date", date)
@property
def due_time(self):
"""
Returns the current value of the release time. Default is u''
"""
return self.find_css("#due_time").first.attrs('value')[0]
@due_time.setter
def due_time(self, time):
"""
Time is "HH:MM" string.
"""
self.set_time("#due_time", time)
@property
def policy(self):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
return self.get_selected_option_text(element)
@policy.setter
def policy(self, grading_label):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
select = Select(element)
select.select_by_visible_text(grading_label)
EmptyPromise(
lambda: self.policy == grading_label,
"Grading label is updated.",
).fulfill()
@property
def is_explicitly_locked(self):
"""
Returns true if the explict staff lock checkbox is checked, false otherwise.
"""
return self.find_css('#staff_lock')[0].is_selected()
@is_explicitly_locked.setter
def is_explicitly_locked(self, value):
"""
Checks the explicit staff lock box if value is true, otherwise unchecks the box.
"""
if value != self.is_explicitly_locked:
self.find_css('label[for="staff_lock"]').click()
EmptyPromise(lambda: value == self.is_explicitly_locked, "Explicit staff lock is updated").fulfill()
def shows_staff_lock_warning(self):
"""
Returns true iff the staff lock warning is visible.
"""
return self.find_css('.staff-lock .tip-warning').visible
def get_selected_option_text(self, element):
"""
Returns the text of the first selected option for the element.
"""
if element:
select = Select(element)
return select.first_selected_option.text
else:
return None
| agpl-3.0 |
highweb-project/highweb-webcl-html5spec | build/android/pylib/local/device/local_device_environment.py | 4 | 4412 | # Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import datetime
import logging
import os
import shutil
import tempfile
import threading
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_utils
from devil.android import logcat_monitor
from devil.utils import file_utils
from devil.utils import parallelizer
from pylib import constants
from pylib.base import environment
def _DeviceCachePath(device):
file_name = 'device_cache_%s.json' % device.adb.GetDeviceSerial()
return os.path.join(constants.GetOutDirectory(), file_name)
class LocalDeviceEnvironment(environment.Environment):
def __init__(self, args, _error_func):
super(LocalDeviceEnvironment, self).__init__()
self._blacklist = (device_blacklist.Blacklist(args.blacklist_file)
if args.blacklist_file
else None)
self._device_serial = args.test_device
self._devices_lock = threading.Lock()
self._devices = []
self._max_tries = 1 + args.num_retries
self._tool_name = args.tool
self._enable_device_cache = args.enable_device_cache
self._concurrent_adb = args.enable_concurrent_adb
self._logcat_output_dir = args.logcat_output_dir
self._logcat_output_file = args.logcat_output_file
self._logcat_monitors = []
#override
def SetUp(self):
available_devices = device_utils.DeviceUtils.HealthyDevices(
self._blacklist, enable_device_files_cache=self._enable_device_cache)
if not available_devices:
raise device_errors.NoDevicesError
if self._device_serial:
self._devices = [d for d in available_devices
if d.adb.GetDeviceSerial() == self._device_serial]
if not self._devices:
raise device_errors.DeviceUnreachableError(
'Could not find device %r' % self._device_serial)
else:
self._devices = available_devices
if self._enable_device_cache:
for d in self._devices:
cache_path = _DeviceCachePath(d)
if os.path.exists(cache_path):
logging.info('Using device cache: %s', cache_path)
with open(cache_path) as f:
d.LoadCacheData(f.read())
# Delete cached file so that any exceptions cause it to be cleared.
os.unlink(cache_path)
if self._logcat_output_file:
self._logcat_output_dir = tempfile.mkdtemp()
if self._logcat_output_dir:
for d in self._devices:
logcat_file = os.path.join(
self._logcat_output_dir,
'%s_%s' % (d.adb.GetDeviceSerial(),
datetime.datetime.utcnow().strftime('%Y%m%dT%H%M%S')))
monitor = logcat_monitor.LogcatMonitor(
d.adb, clear=True, output_file=logcat_file)
self._logcat_monitors.append(monitor)
monitor.Start()
@property
def devices(self):
if not self._devices:
raise device_errors.NoDevicesError()
return self._devices
@property
def concurrent_adb(self):
return self._concurrent_adb
@property
def parallel_devices(self):
return parallelizer.SyncParallelizer(self.devices)
@property
def max_tries(self):
return self._max_tries
@property
def tool(self):
return self._tool_name
#override
def TearDown(self):
# Write the cache even when not using it so that it will be ready the first
# time that it is enabled. Writing it every time is also necessary so that
# an invalid cache can be flushed just by disabling it for one run.
for d in self._devices:
cache_path = _DeviceCachePath(d)
with open(cache_path, 'w') as f:
f.write(d.DumpCacheData())
logging.info('Wrote device cache: %s', cache_path)
for m in self._logcat_monitors:
m.Stop()
m.Close()
if self._logcat_output_file:
file_utils.MergeFiles(
self._logcat_output_file,
[m.output_file for m in self._logcat_monitors])
shutil.rmtree(self._logcat_output_dir)
def BlacklistDevice(self, device, reason='local_device_failure'):
device_serial = device.adb.GetDeviceSerial()
if self._blacklist:
self._blacklist.Extend([device_serial], reason=reason)
with self._devices_lock:
self._devices = [d for d in self._devices if str(d) != device_serial]
| bsd-3-clause |
kntem/webdeposit | modules/webaccess/lib/external_authentication_ldap.py | 25 | 9330 | # -*- coding: utf-8 -*-
##
## This file is part of Invenio.
## Copyright (C) 2007, 2008, 2009, 2010, 2011 CERN.
##
## Invenio is free software; you can redistribute it and/or
## modify it under the terms of the GNU General Public License as
## published by the Free Software Foundation; either version 2 of the
## License, or (at your option) any later version.
##
## Invenio is distributed in the hope that it will be useful, but
## WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with Invenio; if not, write to the Free Software Foundation, Inc.,
## 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""External user authentication for EPFL's LDAP instance.
This LDAP external authentication system relies on a collaborative LDAP
organized like this:
o=EPFL, c=CH
|
|
+--ou=groups
| |
| |
| +--- cn=xxx
| displayName= name of the group
| uniqueIdentifier= some local id for groups
|
|
|
+--ou=users
| |
| |
| +---uid= some local id for users (ex: grfavre)
| uniqueIdentifier= another local id (ex: 128933)
| mail=xxx@xxx.xx
| memberOf= id of a group
| memberOf= id of another group
|
+
This example of an LDAP authentication should help you develop yours in your
specific installation.
"""
__revision__ = \
"$Id$"
import ldap
from invenio.external_authentication import ExternalAuth, \
InvenioWebAccessExternalAuthError
CFG_EXTERNAL_AUTH_LDAP_SERVERS = ['ldap://scoldap.epfl.ch']
CFG_EXTERNAL_AUTH_LDAP_CONTEXT = "o=EPFL,c=CH"
CFG_EXTERNAL_AUTH_LDAP_USER_UID = ["uid", "uniqueIdentifier", "mail"]
CFG_EXTERNAL_AUTH_LDAP_MAIL_ENTRY = 'mail'
CFG_EXTERNAL_AUTH_LDAP_GROUP_MEMBERSHIP = 'memberOf'
CFG_EXTERNAL_AUTH_LDAP_GROUP_UID = 'uniqueIdentifier'
CFG_EXTERNAL_AUTH_LDAP_GROUP_NAME = 'displayName'
CFG_EXTERNAL_AUTH_LDAP_HIDDEN_GROUPS = ['EPFL-unit', 'users']
class ExternalAuthLDAP(ExternalAuth):
"""
External authentication example for a custom LDAP-based
authentication service.
"""
def __init__(self):
"""Initialize stuff here"""
ExternalAuth.__init__(self)
self.enforce_external_nicknames = True
def _ldap_try (self, command):
""" Try to run the specified command on the first LDAP server that
is not down."""
for server in CFG_EXTERNAL_AUTH_LDAP_SERVERS:
try:
connection = ldap.initialize(server)
return command(connection)
except ldap.SERVER_DOWN, error_message:
continue
raise InvenioWebAccessExternalAuthError
def auth_user(self, username, password, req=None):
"""
Check USERNAME and PASSWORD against the LDAP system.
Return (None, None) if authentication failed, or the (email address, user_dn) of the
person if the authentication was successful.
Raise InvenioWebAccessExternalAuthError in case of external troubles.
Note: for SSO the parameter are discarded and overloaded by Shibboleth
variables
"""
if not password:
return None, None
query = '(|' + ''.join (['(%s=%s)' % (attrib, username)
for attrib in
CFG_EXTERNAL_AUTH_LDAP_USER_UID]) \
+ ')'
def _check (connection):
users = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query)
# We pick the first result, as all the data we are interested
# in should be the same in all the entries.
if len(users):
user_dn, user_info = users [0]
else:
return None, None
try:
connection.simple_bind_s(user_dn, password)
except ldap.INVALID_CREDENTIALS:
# It is enough to fail on one server to consider the credential
# to be invalid
return None, None
return user_info[CFG_EXTERNAL_AUTH_LDAP_MAIL_ENTRY][0], user_dn
return self._ldap_try(_check)
def user_exists(self, email, req=None):
"""Check the external authentication system for existance of email.
@return: True if the user exists, False otherwise
"""
query = '(%s=%s)' % (CFG_EXTERNAL_AUTH_LDAP_MAIL_ENTRY, email)
def _check (connection):
users = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query)
return len(users) != 0
return self._ldap_try(_check)
def fetch_user_nickname(self, username, password=None, req=None):
"""Given a username and a password, returns the right nickname belonging
to that user (username could be an email).
"""
query = '(|' + ''.join (['(%s=%s)' % (attrib, username)
for attrib in
CFG_EXTERNAL_AUTH_LDAP_USER_UID]) \
+ ')'
def _get_nickname(connection):
users = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query)
# We pick the first result, as all the data we are interested
# in should be the same in all the entries.
if len(users):
user_dn, user_info = users [0]
else:
return None
emails = user_info[CFG_EXTERNAL_AUTH_LDAP_MAIL_ENTRY]
if len(emails):
email = emails[0]
else:
return False
(left_part, right_part) = email.split('@')
nickname = left_part.replace('.', ' ').title()
if right_part != 'epfl.ch':
nickname += ' - ' + right_part
return nickname
return self._ldap_try(_get_nickname)
def fetch_user_groups_membership(self, username, password=None, req=None):
"""Given a username and a password, returns a dictionary of groups
and their description to which the user is subscribed.
Raise InvenioWebAccessExternalAuthError in case of troubles.
"""
query_person = '(|' + ''.join (['(%s=%s)' % (attrib, username)
for attrib in
CFG_EXTERNAL_AUTH_LDAP_USER_UID]) \
+ ')'
def _get_groups(connection):
users = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query_person)
if len(users):
user_dn, user_info = users [0]
else:
return {}
groups = {}
group_ids = user_info[CFG_EXTERNAL_AUTH_LDAP_GROUP_MEMBERSHIP]
for group_id in group_ids:
query_group = '(%s=%s)' % (CFG_EXTERNAL_AUTH_LDAP_GROUP_UID,
group_id)
ldap_group = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query_group)
if len(ldap_group):
group_dn, group_infos = ldap_group[0]
group_name = group_infos[CFG_EXTERNAL_AUTH_LDAP_GROUP_NAME][0]
if group_name in CFG_EXTERNAL_AUTH_LDAP_HIDDEN_GROUPS:
continue
groups[group_id] = group_name
return groups
return self._ldap_try(_get_groups)
def fetch_user_preferences(self, username, password=None, req=None):
"""Given a username and a password, returns a dictionary of keys and
values, corresponding to external infos and settings.
userprefs = {"telephone": "2392489",
"address": "10th Downing Street"}
(WEBUSER WILL erase all prefs that starts by EXTERNAL_ and will
store: "EXTERNAL_telephone"; all internal preferences can use whatever
name but starting with EXTERNAL). If a pref begins with HIDDEN_ it will
be ignored.
"""
query = '(|' + ''.join (['(%s=%s)' % (attrib, username)
for attrib in
CFG_EXTERNAL_AUTH_LDAP_USER_UID]) \
+ ')'
def _get_personal_infos(connection):
users = connection.search_s(CFG_EXTERNAL_AUTH_LDAP_CONTEXT,
ldap.SCOPE_SUBTREE,
query)
if len(users):
user_dn, user_info = users [0]
return user_info
else:
return {}
return self._ldap_try(_get_personal_infos)
| gpl-2.0 |
EducationforKids/e4k | node_modules/gulp-sass/node_modules/node-sass/node_modules/pangyp/gyp/pylib/gyp/flock_tool.py | 1835 | 1748 | #!/usr/bin/env python
# Copyright (c) 2011 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""These functions are executed via gyp-flock-tool when using the Makefile
generator. Used on systems that don't have a built-in flock."""
import fcntl
import os
import struct
import subprocess
import sys
def main(args):
executor = FlockTool()
executor.Dispatch(args)
class FlockTool(object):
"""This class emulates the 'flock' command."""
def Dispatch(self, args):
"""Dispatches a string command to a method."""
if len(args) < 1:
raise Exception("Not enough arguments")
method = "Exec%s" % self._CommandifyName(args[0])
getattr(self, method)(*args[1:])
def _CommandifyName(self, name_string):
"""Transforms a tool name like copy-info-plist to CopyInfoPlist"""
return name_string.title().replace('-', '')
def ExecFlock(self, lockfile, *cmd_list):
"""Emulates the most basic behavior of Linux's flock(1)."""
# Rely on exception handling to report errors.
# Note that the stock python on SunOS has a bug
# where fcntl.flock(fd, LOCK_EX) always fails
# with EBADF, that's why we use this F_SETLK
# hack instead.
fd = os.open(lockfile, os.O_WRONLY|os.O_NOCTTY|os.O_CREAT, 0666)
if sys.platform.startswith('aix'):
# Python on AIX is compiled with LARGEFILE support, which changes the
# struct size.
op = struct.pack('hhIllqq', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
else:
op = struct.pack('hhllhhl', fcntl.F_WRLCK, 0, 0, 0, 0, 0, 0)
fcntl.fcntl(fd, fcntl.F_SETLK, op)
return subprocess.call(cmd_list)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mit |
droark/bitcoin | test/functional/wallet_fallbackfee.py | 42 | 1487 | #!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test wallet replace-by-fee capabilities in conjunction with the fallbackfee."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_raises_rpc_error
class WalletRBFTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.setup_clean_chain = True
def skip_test_if_missing_module(self):
self.skip_if_no_wallet()
def run_test(self):
self.nodes[0].generate(101)
# sending a transaction without fee estimations must be possible by default on regtest
self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1)
# test sending a tx with disabled fallback fee (must fail)
self.restart_node(0, extra_args=["-fallbackfee=0"])
assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].sendtoaddress(self.nodes[0].getnewaddress(), 1))
assert_raises_rpc_error(-4, "Fee estimation failed", lambda: self.nodes[0].fundrawtransaction(self.nodes[0].createrawtransaction([], {self.nodes[0].getnewaddress(): 1})))
assert_raises_rpc_error(-6, "Fee estimation failed", lambda: self.nodes[0].sendmany("", {self.nodes[0].getnewaddress(): 1}))
if __name__ == '__main__':
WalletRBFTest().main()
| mit |
didrocks/quickly | data/templates/ubuntu-application/test.py | 1 | 1301 | #!/usr/bin/python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
# Copyright 2011 Tony Byrne
#
# This file is part of Quickly ubuntu-application template
#
#This program is free software: you can redistribute it and/or modify it
#under the terms of the GNU General Public License version 3, as published
#by the Free Software Foundation.
#This program is distributed in the hope that it will be useful, but
#WITHOUT ANY WARRANTY; without even the implied warranties of
#MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
#PURPOSE. See the GNU General Public License for more details.
#You should have received a copy of the GNU General Public License along
#with this program. If not, see <http://www.gnu.org/licenses/>.
import sys
import subprocess
import gettext
from gettext import gettext as _
gettext.textdomain('quickly')
from quickly import templatetools
def usage():
templatetools.print_usage('quickly test')
def help():
print _("""This command tests your project using the contents of the tests directory""")
templatetools.handle_additional_parameters(sys.argv, help, usage=usage)
#search and find all tests
command = ["nosetests"]
command.extend(sys.argv[1:])
return_code = subprocess.call(command)
sys.exit(return_code)
| gpl-3.0 |
noironetworks/nova | nova/tests/unit/api/openstack/compute/test_extensions.py | 27 | 8620 | # Copyright 2013 IBM Corp.
# Copyright 2014 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
import stevedore
import webob.exc
from nova.api import openstack
from nova.api.openstack import compute
from nova.api.openstack.compute import extension_info
from nova.api.openstack import extensions
from nova import exception
from nova import test
CONF = cfg.CONF
class fake_bad_extension(object):
name = "fake_bad_extension"
alias = "fake-bad"
class fake_stevedore_enabled_extensions(object):
def __init__(self, namespace, check_func, invoke_on_load=False,
invoke_args=(), invoke_kwds=None):
self.extensions = []
def map(self, func, *args, **kwds):
pass
def __iter__(self):
return iter(self.extensions)
class fake_loaded_extension_info(object):
def __init__(self):
self.extensions = {}
def register_extension(self, ext):
self.extensions[ext] = ext
return True
def get_extensions(self):
return {'core1': None, 'core2': None, 'noncore1': None}
class ExtensionLoadingTestCase(test.NoDBTestCase):
def _set_v21_core(self, core_extensions):
openstack.API_V21_CORE_EXTENSIONS = core_extensions
def test_extensions_loaded(self):
app = compute.APIRouterV21()
self.assertIn('servers', app._loaded_extension_info.extensions)
def test_check_bad_extension(self):
loaded_ext_info = extension_info.LoadedExtensionInfo()
self.assertFalse(loaded_ext_info._check_extension(fake_bad_extension))
def test_extensions_blacklist(self):
app = compute.APIRouterV21()
self.assertIn('os-hosts', app._loaded_extension_info.extensions)
CONF.set_override('extensions_blacklist', ['os-hosts'], 'osapi_v21')
app = compute.APIRouterV21()
self.assertNotIn('os-hosts', app._loaded_extension_info.extensions)
@mock.patch('nova.api.openstack.APIRouterV21._register_resources_list')
def test_extensions_inherit(self, mock_register):
app = compute.APIRouterV21()
self.assertIn('servers', app._loaded_extension_info.extensions)
self.assertIn('os-volumes', app._loaded_extension_info.extensions)
mock_register.assert_called_with(mock.ANY, mock.ANY)
ext_no_inherits = mock_register.call_args_list[0][0][0]
ext_has_inherits = mock_register.call_args_list[1][0][0]
# os-volumes inherits from servers
name_list = [ext.obj.alias for ext in ext_has_inherits]
self.assertIn('os-volumes', name_list)
name_list = [ext.obj.alias for ext in ext_no_inherits]
self.assertIn('servers', name_list)
def test_extensions_whitelist_accept(self):
# NOTE(maurosr): just to avoid to get an exception raised for not
# loading all core api.
v21_core = openstack.API_V21_CORE_EXTENSIONS
openstack.API_V21_CORE_EXTENSIONS = set(['servers'])
self.addCleanup(self._set_v21_core, v21_core)
app = compute.APIRouterV21()
self.assertIn('os-hosts', app._loaded_extension_info.extensions)
CONF.set_override('extensions_whitelist', ['servers', 'os-hosts'],
'osapi_v21')
app = compute.APIRouterV21()
self.assertIn('os-hosts', app._loaded_extension_info.extensions)
def test_extensions_whitelist_block(self):
# NOTE(maurosr): just to avoid to get an exception raised for not
# loading all core api.
v21_core = openstack.API_V21_CORE_EXTENSIONS
openstack.API_V21_CORE_EXTENSIONS = set(['servers'])
self.addCleanup(self._set_v21_core, v21_core)
app = compute.APIRouterV21()
self.assertIn('os-hosts', app._loaded_extension_info.extensions)
CONF.set_override('extensions_whitelist', ['servers'], 'osapi_v21')
app = compute.APIRouterV21()
self.assertNotIn('os-hosts', app._loaded_extension_info.extensions)
def test_blacklist_overrides_whitelist(self):
# NOTE(maurosr): just to avoid to get an exception raised for not
# loading all core api.
v21_core = openstack.API_V21_CORE_EXTENSIONS
openstack.API_V21_CORE_EXTENSIONS = set(['servers'])
self.addCleanup(self._set_v21_core, v21_core)
app = compute.APIRouterV21()
self.assertIn('os-hosts', app._loaded_extension_info.extensions)
CONF.set_override('extensions_whitelist', ['servers', 'os-hosts'],
'osapi_v21')
CONF.set_override('extensions_blacklist', ['os-hosts'], 'osapi_v21')
app = compute.APIRouterV21()
self.assertNotIn('os-hosts', app._loaded_extension_info.extensions)
self.assertIn('servers', app._loaded_extension_info.extensions)
self.assertEqual(1, len(app._loaded_extension_info.extensions))
def test_get_missing_core_extensions(self):
v21_core = openstack.API_V21_CORE_EXTENSIONS
openstack.API_V21_CORE_EXTENSIONS = set(['core1', 'core2'])
self.addCleanup(self._set_v21_core, v21_core)
self.assertEqual(0, len(
compute.APIRouterV21.get_missing_core_extensions(
['core1', 'core2', 'noncore1'])))
missing_core = compute.APIRouterV21.get_missing_core_extensions(
['core1'])
self.assertEqual(1, len(missing_core))
self.assertIn('core2', missing_core)
missing_core = compute.APIRouterV21.get_missing_core_extensions([])
self.assertEqual(2, len(missing_core))
self.assertIn('core1', missing_core)
self.assertIn('core2', missing_core)
missing_core = compute.APIRouterV21.get_missing_core_extensions(
['noncore1'])
self.assertEqual(2, len(missing_core))
self.assertIn('core1', missing_core)
self.assertIn('core2', missing_core)
def test_core_extensions_present(self):
self.stubs.Set(stevedore.enabled, 'EnabledExtensionManager',
fake_stevedore_enabled_extensions)
self.stubs.Set(extension_info, 'LoadedExtensionInfo',
fake_loaded_extension_info)
v21_core = openstack.API_V21_CORE_EXTENSIONS
openstack.API_V21_CORE_EXTENSIONS = set(['core1', 'core2'])
self.addCleanup(self._set_v21_core, v21_core)
# if no core API extensions are missing then an exception will
# not be raised when creating an instance of compute.APIRouterV21
compute.APIRouterV21()
def test_core_extensions_missing(self):
self.stubs.Set(stevedore.enabled, 'EnabledExtensionManager',
fake_stevedore_enabled_extensions)
self.stubs.Set(extension_info, 'LoadedExtensionInfo',
fake_loaded_extension_info)
self.assertRaises(exception.CoreAPIMissing, compute.APIRouterV21)
def test_extensions_expected_error(self):
@extensions.expected_errors(404)
def fake_func():
raise webob.exc.HTTPNotFound()
self.assertRaises(webob.exc.HTTPNotFound, fake_func)
def test_extensions_expected_error_from_list(self):
@extensions.expected_errors((404, 403))
def fake_func():
raise webob.exc.HTTPNotFound()
self.assertRaises(webob.exc.HTTPNotFound, fake_func)
def test_extensions_unexpected_error(self):
@extensions.expected_errors(404)
def fake_func():
raise webob.exc.HTTPConflict()
self.assertRaises(webob.exc.HTTPInternalServerError, fake_func)
def test_extensions_unexpected_error_from_list(self):
@extensions.expected_errors((404, 413))
def fake_func():
raise webob.exc.HTTPConflict()
self.assertRaises(webob.exc.HTTPInternalServerError, fake_func)
def test_extensions_unexpected_policy_not_authorized_error(self):
@extensions.expected_errors(404)
def fake_func():
raise exception.PolicyNotAuthorized(action="foo")
self.assertRaises(exception.PolicyNotAuthorized, fake_func)
| apache-2.0 |
borysiasty/inasafe | safe/gui/widgets/test/test_dock_regressions.py | 1 | 8222 | from unittest import TestCase
# this import required to enable PyQt API v2
# noinspection PyUnresolvedReferences
import qgis # pylint: disable=unused-import
from qgis.core import QgsMapLayerRegistry
from PyQt4 import QtCore
from safe.test.utilities import get_qgis_app
QGIS_APP, CANVAS, IFACE, PARENT = get_qgis_app()
from safe.impact_functions import register_impact_functions
from safe.test.utilities import (
test_data_path,
load_layer,
set_canvas_crs,
GEOCRS,
setup_scenario)
from safe.utilities.keyword_io import KeywordIO
from safe.gui.widgets.dock import Dock
# noinspection PyArgumentList
class TestDockRegressions(TestCase):
"""Regression tests for the InaSAFE GUI."""
@classmethod
def setUpClass(cls):
cls.dock = Dock(IFACE)
def setUp(self):
"""Fixture run before all tests.
These tests require that you manually load the layers you need.
"""
register_impact_functions()
self.dock.show_only_visible_layers_flag = True
self.dock.cboHazard.setCurrentIndex(0)
self.dock.cboExposure.setCurrentIndex(0)
self.dock.cboFunction.setCurrentIndex(0)
self.dock.run_in_thread_flag = False
self.dock.show_only_visible_layers_flag = False
self.dock.set_layer_from_title_flag = False
self.dock.zoom_to_impact_flag = False
self.dock.hide_exposure_flag = False
self.dock.show_intermediate_layers = False
self.dock.user_extent = None
self.dock.user_extent_crs = None
# For these tests we will generally use explicit overlap
# between hazard, exposure and view, so make that default
# see also safe/test/utilities.py where this is globally
# set to HazardExposure
settings = QtCore.QSettings()
settings.setValue('inasafe/analysis_extents_mode', 'HazardExposure')
QgsMapLayerRegistry.instance().removeAllMapLayers()
self.dock.cboHazard.clear()
self.dock.cboExposure.clear()
# noinspection PyUnusedLocal
def test_regression_2553_no_resample(self):
"""Test for regression 2553 (no resampling).
see :
https://github.com/inasafe/inasafe/issues/2553
We want to verify that population with resampling should produce
a result within a reasonable range of the same analysis but doing
population with no resampling.
"""
hazard_path = test_data_path(
'hazard', 'continuous_flood_unaligned_big_size.tif')
exposure_path = test_data_path(
'exposure', 'people_allow_resampling_false.tif')
hazard_layer, hazard_layer_purpose = load_layer(hazard_path)
# Check if there is a regression about keywords being updated from
# another layer - see #2605
keywords = KeywordIO(hazard_layer)
self.assertIn('flood unaligned', keywords.to_message().to_text())
exposure_layer, exposure_layer_purpose = load_layer(
exposure_path)
keywords = KeywordIO(exposure_layer)
self.assertIn(
'*Allow resampling*, false------',
keywords.to_message().to_text())
QgsMapLayerRegistry.instance().addMapLayers(
[hazard_layer, exposure_layer])
# Count the total value of all exposure pixels
# this is arse about face but width is actually giving height
height = exposure_layer.width()
# this is arse about face but height is actually giving width
width = exposure_layer.height()
provider = exposure_layer.dataProvider()
# Bands count from 1!
block = provider.block(1, provider.extent(), height, width)
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
# This is the nicer way but wierdly it gets nan for every cell
total_population = 0.0
cell_count = 0
row = 0
# Iterate down each column to match the layout produced by r.stats
while row < width:
column = 0
while column < height:
cell_count += 1
value = block.value(row, column)
if value > 0:
total_population += value
column += 1
row += 1
print "Total value of all cells is: %d" % total_population
print "Number of cells counted: %d" % cell_count
# 131 computed using r.sum
self.assertAlmostEqual(total_population, 131.0177006121)
result, message = setup_scenario(
self.dock,
hazard='flood unaligned',
exposure='People never resample',
function='Need evacuation',
function_id='FloodEvacuationRasterHazardFunction')
self.assertTrue(result, message)
# Press RUN
self.dock.accept()
safe_layer = self.dock.impact_function.impact
keywords = safe_layer.get_keywords()
evacuated = float(keywords['evacuated'])
self.assertLess(evacuated, total_population)
expected_evacuated = 131.0
self.assertEqual(evacuated, expected_evacuated)
# noinspection PyUnusedLocal
def test_regression_2553_with_resample(self):
"""Test for regression 2553 (with resampling).
see :
https://github.com/inasafe/inasafe/issues/2553
We want to verify that population with resampling should produce
a result within a reasonable range of the same analysis but doing
population with no resampling.
"""
hazard_path = test_data_path(
'hazard', 'continuous_flood_unaligned_big_size.tif')
exposure_path = test_data_path(
'exposure', 'people_allow_resampling_true.tif')
hazard_layer, hazard_layer_purpose = load_layer(hazard_path)
# Check if there is a regression about keywords being updated from
# another layer - see #2605
keywords = KeywordIO(hazard_layer)
self.assertIn('flood unaligned', keywords.to_message().to_text())
# check we have the right layer properties
exposure_layer, exposure_layer_purpose = load_layer(
exposure_path)
keywords = KeywordIO(exposure_layer)
self.assertNotIn(
'*Allow resampling*, false------',
keywords.to_message().to_text())
QgsMapLayerRegistry.instance().addMapLayers(
[hazard_layer, exposure_layer])
# Count the total value of all exposure pixels
# this is arse about face but width is actually giving height
height = exposure_layer.width()
# this is arse about face but height is actually giving width
width = exposure_layer.height()
provider = exposure_layer.dataProvider()
# Bands count from 1!
block = provider.block(1, provider.extent(), height, width)
# Enable on-the-fly reprojection
set_canvas_crs(GEOCRS, True)
# This is the nicer way but wierdly it gets nan for every cell
total_population = 0.0
cell_count = 0
row = 0
# Iterate down each column to match the layout produced by r.stats
while row < width:
column = 0
while column < height:
cell_count += 1
value = block.value(row, column)
if value > 0:
total_population += value
column += 1
row += 1
print "Total value of all cells is: %d" % total_population
print "Number of cells counted: %d" % cell_count
result, message = setup_scenario(
self.dock,
hazard='flood unaligned',
exposure='People allow resampling',
function='Need evacuation',
function_id='FloodEvacuationRasterHazardFunction')
self.assertTrue(result, message)
# Press RUN
self.dock.accept()
safe_layer = self.dock.impact_function.impact
keywords = safe_layer.get_keywords()
evacuated = float(keywords['evacuated'])
self.assertLess(evacuated, total_population)
expected_evacuated = 127.0
self.assertEqual(evacuated, expected_evacuated)
| gpl-3.0 |
cfg2015/EPT-2015-2 | addons/base_import_module/models/ir_module.py | 238 | 4795 | import logging
import os
import sys
import zipfile
from os.path import join as opj
import openerp
from openerp.osv import osv
from openerp.tools import convert_file
from openerp.tools.translate import _
_logger = logging.getLogger(__name__)
MAX_FILE_SIZE = 100 * 1024 * 1024 # in megabytes
class view(osv.osv):
_inherit = "ir.module.module"
def import_module(self, cr, uid, module, path, force=False, context=None):
known_mods = self.browse(cr, uid, self.search(cr, uid, []))
known_mods_names = dict([(m.name, m) for m in known_mods])
installed_mods = [m.name for m in known_mods if m.state == 'installed']
terp = openerp.modules.load_information_from_description_file(module, mod_path=path)
values = self.get_values_from_terp(terp)
unmet_dependencies = set(terp['depends']).difference(installed_mods)
if unmet_dependencies:
msg = _("Unmet module dependencies: %s")
raise osv.except_osv(_('Error !'), msg % ', '.join(unmet_dependencies))
mod = known_mods_names.get(module)
if mod:
self.write(cr, uid, mod.id, dict(state='installed', **values))
mode = 'update' if not force else 'init'
else:
assert terp.get('installable', True), "Module not installable"
self.create(cr, uid, dict(name=module, state='installed', **values))
mode = 'init'
for kind in ['data', 'init_xml', 'update_xml']:
for filename in terp[kind]:
_logger.info("module %s: loading %s", module, filename)
noupdate = False
if filename.endswith('.csv') and kind in ('init', 'init_xml'):
noupdate = True
pathname = opj(path, filename)
idref = {}
convert_file(cr, module, filename, idref, mode=mode, noupdate=noupdate, kind=kind, pathname=pathname)
path_static = opj(path, 'static')
ir_attach = self.pool['ir.attachment']
if os.path.isdir(path_static):
for root, dirs, files in os.walk(path_static):
for static_file in files:
full_path = opj(root, static_file)
with open(full_path, 'r') as fp:
data = fp.read().encode('base64')
url_path = '/%s%s' % (module, full_path.split(path)[1].replace(os.path.sep, '/'))
url_path = url_path.decode(sys.getfilesystemencoding())
filename = os.path.split(url_path)[1]
values = dict(
name=filename,
datas_fname=filename,
url=url_path,
res_model='ir.ui.view',
type='binary',
datas=data,
)
att_id = ir_attach.search(cr, uid, [('url', '=', url_path), ('type', '=', 'binary'), ('res_model', '=', 'ir.ui.view')], context=context)
if att_id:
ir_attach.write(cr, uid, att_id, values, context=context)
else:
ir_attach.create(cr, uid, values, context=context)
return True
def import_zipfile(self, cr, uid, module_file, force=False, context=None):
if not module_file:
raise Exception("No file sent.")
if not zipfile.is_zipfile(module_file):
raise osv.except_osv(_('Error !'), _('File is not a zip file!'))
success = []
errors = dict()
module_names = []
with zipfile.ZipFile(module_file, "r") as z:
for zf in z.filelist:
if zf.file_size > MAX_FILE_SIZE:
msg = _("File '%s' exceed maximum allowed file size")
raise osv.except_osv(_('Error !'), msg % zf.filename)
with openerp.tools.osutil.tempdir() as module_dir:
z.extractall(module_dir)
dirs = [d for d in os.listdir(module_dir) if os.path.isdir(opj(module_dir, d))]
for mod_name in dirs:
module_names.append(mod_name)
try:
# assert mod_name.startswith('theme_')
path = opj(module_dir, mod_name)
self.import_module(cr, uid, mod_name, path, force=force, context=context)
success.append(mod_name)
except Exception, e:
errors[mod_name] = str(e)
r = ["Successfully imported module '%s'" % mod for mod in success]
for mod, error in errors.items():
r.append("Error while importing module '%s': %r" % (mod, error))
return '\n'.join(r), module_names
| agpl-3.0 |
aoakeson/home-assistant | homeassistant/components/switch/vera.py | 3 | 2616 | """
Support for Vera switches.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/switch.vera/
"""
import logging
import homeassistant.util.dt as dt_util
from homeassistant.components.switch import SwitchDevice
from homeassistant.const import (
ATTR_ARMED, ATTR_BATTERY_LEVEL, ATTR_LAST_TRIP_TIME, ATTR_TRIPPED,
STATE_OFF, STATE_ON)
from homeassistant.components.vera import (
VeraDevice, VERA_DEVICES, VERA_CONTROLLER)
DEPENDENCIES = ['vera']
_LOGGER = logging.getLogger(__name__)
def setup_platform(hass, config, add_devices_callback, discovery_info=None):
"""Find and return Vera switches."""
add_devices_callback(
VeraSwitch(device, VERA_CONTROLLER) for
device in VERA_DEVICES['switch'])
class VeraSwitch(VeraDevice, SwitchDevice):
"""Representation of a Vera Switch."""
def __init__(self, vera_device, controller):
"""Initialize the Vera device."""
self._state = False
VeraDevice.__init__(self, vera_device, controller)
@property
def device_state_attributes(self):
"""Return the state attributes of the device."""
attr = {}
if self.vera_device.has_battery:
attr[ATTR_BATTERY_LEVEL] = self.vera_device.battery_level + '%'
if self.vera_device.is_armable:
armed = self.vera_device.is_armed
attr[ATTR_ARMED] = 'True' if armed else 'False'
if self.vera_device.is_trippable:
last_tripped = self.vera_device.last_trip
if last_tripped is not None:
utc_time = dt_util.utc_from_timestamp(int(last_tripped))
attr[ATTR_LAST_TRIP_TIME] = dt_util.datetime_to_str(
utc_time)
else:
attr[ATTR_LAST_TRIP_TIME] = None
tripped = self.vera_device.is_tripped
attr[ATTR_TRIPPED] = 'True' if tripped else 'False'
attr['Vera Device Id'] = self.vera_device.vera_device_id
return attr
def turn_on(self, **kwargs):
"""Turn device on."""
self.vera_device.switch_on()
self._state = STATE_ON
self.update_ha_state()
def turn_off(self, **kwargs):
"""Turn device off."""
self.vera_device.switch_off()
self._state = STATE_OFF
self.update_ha_state()
@property
def is_on(self):
"""Return true if device is on."""
return self._state
def update(self):
"""Called by the vera device callback to update state."""
self._state = self.vera_device.is_switched_on()
| mit |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.