code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
|---|---|---|---|---|---|
from __future__ import absolute_import
class Ngram(object):
def __init__(self, token):
self.token = token
self.count = 1
self.after = []
def __str__(self):
return str({
'after': self.after,
'count': self.count
})
def __repr__(self):
return str({
'after': self.after,
'count': self.count
})
def __len__(self):
return len(self.token)
def __eq__(self, other):
if type(self) is type(other):
return self.__dict__ == other.__dict__
return False
def add_after(self, token, reach):
if len(self.after) < reach:
self.after.append({})
target_dict = self.after[reach - 1]
if token in target_dict:
target_dict[token] += 1
else:
target_dict[token] = 1
|
pennetti/voicebox
|
server/src/voicebox/ngram.py
|
Python
|
mit
| 878
|
class Pair:
def __init__(self, x, y):
self.x = x
self.y = y
def __repr__(self):
return 'Pair({0.x!r}, {0.y!r})'.format(self)
def __str__(self):
return '({0.x}, {0.y})'.format(self)
|
tuanavu/python-cookbook-3rd
|
src/8/changing_the_string_representation_of_instances/example.py
|
Python
|
mit
| 226
|
#!/usr/bin/env python
# Copyright (c) 2017-2021 F5 Networks, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from mock import MagicMock
import pytest
import f5_cccl.resource.ltm.monitor.udp_monitor as target
@pytest.fixture
def udp_config():
return {"name": "test_monitor",
"partition": "Test",
"interval": 1,
"timeout": 10,
"send": "GET /\r\n",
"recv": "SERVER"}
@pytest.fixture
def bigip():
bigip = MagicMock()
return bigip
def test_create_w_defaults(udp_config):
monitor = target.UDPMonitor(
name=udp_config['name'],
partition=udp_config['partition'])
assert monitor
assert monitor.name == "test_monitor"
assert monitor.partition == "Test"
data = monitor.data
assert data.get('interval') == 5
assert data.get('timeout') == 16
assert data.get('send') == ""
assert data.get('recv') == ""
def test_create_w_config(udp_config):
monitor = target.UDPMonitor(
**udp_config
)
assert monitor
assert monitor.name == "test_monitor"
assert monitor.partition == "Test"
data = monitor.data
assert data.get('interval') == 1
assert data.get('timeout') == 10
assert data.get('send') == "GET /\r\n"
assert data.get('recv') == "SERVER"
def test_get_uri_path(bigip, udp_config):
monitor = target.UDPMonitor(**udp_config)
assert (monitor._uri_path(bigip) ==
bigip.tm.ltm.monitor.udps.udp)
def test_create_icr_monitor(udp_config):
monitor = target.IcrUDPMonitor(**udp_config)
assert isinstance(monitor, target.UDPMonitor)
def test_create_api_monitor(udp_config):
monitor = target.ApiUDPMonitor(**udp_config)
assert isinstance(monitor, target.UDPMonitor)
def test_create_monitors_invalid(udp_config):
# Set interval to be larger than timeout,
# ICR Monitor will be created, API Monitor will not
udp_config['interval'] = 30
monitor = target.IcrUDPMonitor(**udp_config)
assert isinstance(monitor, target.IcrUDPMonitor)
with pytest.raises(ValueError):
monitor = target.ApiUDPMonitor(**udp_config)
|
f5devcentral/f5-cccl
|
f5_cccl/resource/ltm/monitor/test/test_udp_monitor.py
|
Python
|
apache-2.0
| 2,642
|
# -*- coding: utf-8 -*-
#
# bitme documentation build configuration file, created by
# sphinx-quickstart on Sun Oct 27 20:01:55 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
pyscript_dir = os.path.dirname(os.path.realpath(__file__))
sys.path.insert(0, os.path.join(pyscript_dir, '..', '..', 'bitme'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.intersphinx', 'sphinx.ext.ifconfig']
autoclass_content = 'both'
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'bitme'
copyright = u'2013, Erik Gregg'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.0.7'
# The full version, including alpha/beta/rc tags.
release = '0.0.7'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'bitmedoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'bitme.tex', u'bitme Documentation',
u'Erik Gregg', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'bitme', u'bitme Documentation',
[u'Erik Gregg'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'bitme', u'bitme Documentation',
u'Erik Gregg', 'bitme', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/': None}
|
bitme/python-bitme
|
doc/source/conf.py
|
Python
|
mit
| 8,009
|
# -*- coding: utf-8 -*-
# Part of Odoo. See LICENSE file for full copyright and licensing details.
{
'name': 'Manufacturing Expiry',
'version': '1.0',
'category': 'Manufacturing/Manufacturing',
'summary': 'Manufacturing Expiry',
'description': """
Technical module.
""",
'depends': ['mrp', 'product_expiry'],
'data': [
'wizard/confirm_expiry_view.xml',
],
'installable': True,
'auto_install': True,
'application': False,
}
|
ygol/odoo
|
addons/mrp_product_expiry/__manifest__.py
|
Python
|
agpl-3.0
| 480
|
# Copyright (c) 2003-2006 LOGILAB S.A. (Paris, FRANCE).
# http://www.logilab.fr/ -- mailto:contact@logilab.fr
#
# This program is free software; you can redistribute it and/or modify it under
# the terms of the GNU General Public License as published by the Free Software
# Foundation; either version 2 of the License, or (at your option) any later
# version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""check for signs of poor design
see http://intranet.logilab.fr/jpl/view?rql=Any%20X%20where%20X%20eid%201243
FIXME: missing 13, 15, 16
"""
from logilab.astng import Function, If, InferenceError
from pylint.interfaces import IASTNGChecker
from pylint.checkers import BaseChecker
import re
# regexp for ignored argument name
IGNORED_ARGUMENT_NAMES = re.compile('_.*')
def class_is_abstract(klass):
"""return true if the given class node should be considered as an abstract
class
"""
for attr in klass.values():
if isinstance(attr, Function):
if attr.is_abstract(pass_is_abstract=False):
return True
return False
MSGS = {
'R0901': ('Too many ancestors (%s/%s)',
'Used when class has too many parent classes, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0902': ('Too many instance attributes (%s/%s)',
'Used when class has too many instance attributes, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0903': ('Too few public methods (%s/%s)',
'Used when class has too few public methods, so be sure it\'s \
really worth it.'),
'R0904': ('Too many public methods (%s/%s)',
'Used when class has too many public methods, try to reduce \
this to get a more simple (and so easier to use) class.'),
'R0911': ('Too many return statements (%s/%s)',
'Used when a function or method has too many return statement, \
making it hard to follow.'),
'R0912': ('Too many branches (%s/%s)',
'Used when a function or method has too many branches, \
making it hard to follow.'),
'R0913': ('Too many arguments (%s/%s)',
'Used when a function or method takes too many arguments.'),
'R0914': ('Too many local variables (%s/%s)',
'Used when a function or method has too many local variables.'),
'R0915': ('Too many statements (%s/%s)',
'Used when a function or method has too many statements. You \
should then split it in smaller functions / methods.'),
'R0921': ('Abstract class not referenced',
'Used when an abstract class is not used as ancestor anywhere.'),
'R0922': ('Abstract class is only referenced %s times',
'Used when an abstract class is used less than X times as \
ancestor.'),
'R0923': ('Interface not implemented',
'Used when an interface class is not implemented anywhere.'),
}
class MisdesignChecker(BaseChecker):
"""checks for sign of poor/misdesign:
* number of methods, attributes, local variables...
* size, complexity of functions, methods
"""
__implements__ = (IASTNGChecker,)
# configuration section name
name = 'design'
# messages
msgs = MSGS
priority = -2
# configuration options
options = (('max-args',
{'default' : 5, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of arguments for function / method'}
),
('ignored-argument-names',
{'default' : IGNORED_ARGUMENT_NAMES,
'type' :'regexp', 'metavar' : '<regexp>',
'help' : 'Argument names that match this expression will be '
'ignored. Default to name with leading underscore'}
),
('max-locals',
{'default' : 15, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of locals for function / method body'}
),
('max-returns',
{'default' : 6, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of return / yield for function / '
'method body'}
),
('max-branchs',
{'default' : 12, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of branch for function / method body'}
),
('max-statements',
{'default' : 50, 'type' : 'int', 'metavar' : '<int>',
'help': 'Maximum number of statements in function / method '
'body'}
),
('max-parents',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of parents for a class (see R0901).'}
),
('max-attributes',
{'default' : 7,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of attributes for a class \
(see R0902).'}
),
('min-public-methods',
{'default' : 2,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Minimum number of public methods for a class \
(see R0903).'}
),
('max-public-methods',
{'default' : 20,
'type' : 'int',
'metavar' : '<num>',
'help' : 'Maximum number of public methods for a class \
(see R0904).'}
),
)
def __init__(self, linter=None):
BaseChecker.__init__(self, linter)
self.stats = None
self._returns = None
self._branchs = None
self._used_abstracts = None
self._used_ifaces = None
self._abstracts = None
self._ifaces = None
self._stmts = 0
def open(self):
"""initialize visit variables"""
self.stats = self.linter.add_stats()
self._returns = []
self._branchs = []
self._used_abstracts = {}
self._used_ifaces = {}
self._abstracts = []
self._ifaces = []
def close(self):
"""check that abstract/interface classes are used"""
for abstract in self._abstracts:
if not abstract in self._used_abstracts:
self.add_message('R0921', node=abstract)
elif self._used_abstracts[abstract] < 2:
self.add_message('R0922', node=abstract,
args=self._used_abstracts[abstract])
for iface in self._ifaces:
if not iface in self._used_ifaces:
self.add_message('R0923', node=iface)
def visit_class(self, node):
"""check size of inheritance hierarchy and number of instance attributes
"""
self._inc_branch()
# Is the total inheritance hierarchy is 7 or less?
nb_parents = len(list(node.ancestors()))
if nb_parents > self.config.max_parents:
self.add_message('R0901', node=node,
args=(nb_parents, self.config.max_parents))
# Does the class contain less than 20 attributes for
# non-GUI classes (40 for GUI)?
# FIXME detect gui classes
if len(node.instance_attrs) > self.config.max_attributes:
self.add_message('R0902', node=node,
args=(len(node.instance_attrs),
self.config.max_attributes))
# update abstract / interface classes structures
if class_is_abstract(node):
self._abstracts.append(node)
elif node.type == 'interface' and node.name != 'Interface':
self._ifaces.append(node)
for parent in node.ancestors(False):
if parent.name == 'Interface':
continue
self._used_ifaces[parent] = 1
try:
for iface in node.interfaces():
self._used_ifaces[iface] = 1
except InferenceError:
# XXX log ?
pass
for parent in node.ancestors():
try:
self._used_abstracts[parent] += 1
except KeyError:
self._used_abstracts[parent] = 1
def leave_class(self, node):
"""check number of public methods"""
nb_public_methods = 0
for method in node.methods():
if not method.name.startswith('_'):
nb_public_methods += 1
# Does the class contain less than 20 public methods ?
if nb_public_methods > self.config.max_public_methods:
self.add_message('R0904', node=node,
args=(nb_public_methods,
self.config.max_public_methods))
# stop here for exception, metaclass and interface classes
if node.type != 'class':
return
# Does the class contain more than 5 public methods ?
if nb_public_methods < self.config.min_public_methods:
self.add_message('R0903', node=node,
args=(nb_public_methods,
self.config.min_public_methods))
def visit_function(self, node):
"""check function name, docstring, arguments, redefinition,
variable names, max locals
"""
self._inc_branch()
# init branch and returns counters
self._returns.append(0)
self._branchs.append(0)
# check number of arguments
args = node.args.args
if args is not None:
ignored_args_num = len(
[arg for arg in args
if self.config.ignored_argument_names.match(arg.name)])
argnum = len(args) - ignored_args_num
if argnum > self.config.max_args:
self.add_message('R0913', node=node,
args=(len(args), self.config.max_args))
else:
ignored_args_num = 0
# check number of local variables
locnum = len(node.locals) - ignored_args_num
if locnum > self.config.max_locals:
self.add_message('R0914', node=node,
args=(locnum, self.config.max_locals))
# init statements counter
self._stmts = 1
def leave_function(self, node):
"""most of the work is done here on close:
checks for max returns, branch, return in __init__
"""
returns = self._returns.pop()
if returns > self.config.max_returns:
self.add_message('R0911', node=node,
args=(returns, self.config.max_returns))
branchs = self._branchs.pop()
if branchs > self.config.max_branchs:
self.add_message('R0912', node=node,
args=(branchs, self.config.max_branchs))
# check number of statements
if self._stmts > self.config.max_statements:
self.add_message('R0915', node=node,
args=(self._stmts, self.config.max_statements))
def visit_return(self, _):
"""count number of returns"""
if not self._returns:
return # return outside function, reported by the base checker
self._returns[-1] += 1
def visit_default(self, node):
"""default visit method -> increments the statements counter if
necessary
"""
if node.is_statement:
self._stmts += 1
def visit_tryexcept(self, node):
"""increments the branchs counter"""
branchs = len(node.handlers)
if node.orelse:
branchs += 1
self._inc_branch(branchs)
self._stmts += branchs
def visit_tryfinally(self, _):
"""increments the branchs counter"""
self._inc_branch(2)
self._stmts += 2
def visit_if(self, node):
"""increments the branchs counter"""
branchs = 1
# don't double count If nodes coming from some 'elif'
if node.orelse and (len(node.orelse)>1 or
not isinstance(node.orelse[0], If)):
branchs += 1
self._inc_branch(branchs)
self._stmts += branchs
def visit_while(self, node):
"""increments the branchs counter"""
branchs = 1
if node.orelse:
branchs += 1
self._inc_branch(branchs)
visit_for = visit_while
def _inc_branch(self, branchsnum=1):
"""increments the branchs counter"""
branchs = self._branchs
for i in xrange(len(branchs)):
branchs[i] += branchsnum
# FIXME: make a nice report...
def register(linter):
"""required method to auto register this checker """
linter.register_checker(MisdesignChecker(linter))
|
isohybrid/dotfile
|
vim/bundle/git:--github.com-klen-python-mode/pylibs/pylint/checkers/design_analysis.py
|
Python
|
bsd-2-clause
| 13,656
|
import logging
import anyjson as json
from django.conf import settings as django_settings
from django.http import HttpResponse, HttpResponseBadRequest
from redis_utils import redis_client, RedisError
log = logging.getLogger(__name__)
def get_builds(request):
'''
url handler that returns all known build uids
request params:
:date - restrict result to builds on a particular day (optional)
format YYYY-MM-DD
:hour - restrict to builds in a particular hour (optional)
format HH, valid iff the date is provided
:changes - if true, restrict to builds with source changes
(optional), valid iff date or date and time provided
'''
date_string = request.GET.get('date', None)
hour_string = request.GET.get('hour', None)
changesets = request.GET.get('changes', None)
if hour_string and not date_string:
return HttpResponseBadRequest(
json.serialize({'message': 'hour option requires date'}),
content_type="application/json")
redis_key = "metrics.hashes"
if date_string:
if hour_string:
date_string += "." + hour_string
redis_key = "change:" if changesets else "build:"
redis_key += date_string
try:
r = redis_client('default')
hashes = r.smembers(redis_key)
except RedisError as e:
log.error('redis error: %s', e)
hashes = set()
result = []
for h in hashes:
t,u = h.split(':')
result.append({'type': t, 'uid': u})
return HttpResponse(json.serialize(result),
content_type="application/json")
def get_build_jobs(request, uid=None):
"""
url handler that returns a list of jobs spawned by a specific build uid
uid - the 32 digit alphanumeric build uid
"""
if not uid:
return HttpResponseBadRequest(
json.serialize({'message': 'valid uid required'}),
content_type="application/json")
redis_key = "build:%s" % (uid)
try:
r = redis_client('default')
hashes = r.smembers(redis_key)
except RedisError as e:
log.error('redis error: %s', e)
hashes = set()
result = []
for build_hash in hashes:
t, job_info = build_hash.split(":")
uid, master, build_number = job_info.split(".")
result.append({'type': t, 'uid': uid, 'master': master,
'build_number': build_number})
return HttpResponse(json.serialize(result),
content_type="application/json")
def get_changeset_info(request, uid=None):
"""
url handler that returns an object with changeset information for a
particular build uid
uid - the 32 digit alphanumeric build uid
"""
if not uid:
return HttpResponseBadRequest(
json.serialize({'message': 'valid uid required'}),
content_type="application/json")
redis_key = "change:%s" % (uid)
try:
r = redis_client('default')
changeset_info = r.hgetall(redis_key)
except RedisError as e:
log.error('redis error: %s', e)
changeset_info = {}
return HttpResponse(json.serialize(changeset_info),
content_type="application/json")
def get_machine_events(request, event_type=None):
"""
url handler that returns machine events and counts for how often they
have occured
event_type - restrict event types to one of `connect`, `disconnect`,
'build'
"""
if event_type not in ('connect', 'disconnect', 'build'):
return HttpResponseBadRequest(
json.serialize({'message': ('event_type must be one of',
'`connect`, `disconnect`, or `builds`')}),
content_type="application/json")
redis_key = "metrics:%s" % (event_type)
try:
r = redis_client('default')
events = r.hgetall(redis_key)
except RedisError as e:
log.error('redis error: %s', e)
events = {}
metrics = []
for k, v in events.iteritems():
event, machine = k.rsplit(':', 1)
if event_type is 'build':
t, event = event.split(':')
else:
t = "machine"
metrics.append({'type': t, 'event': event, 'count': v,
'machine_name': machine})
return HttpResponse(json.serialize(metrics),
content_type="application/json")
def get_job_info(request, uid=None, master=None, build_number=None):
"""
url handler that returns all information about a job, pulled from the Pulse
stream
uid - the build uid for the job
master - the master managing the jobs
build_number - the build number
"""
if not (uid and master and build_number):
return HttpResponseBadRequest(
json.serialize({"message": ("uid, master, and buildnumber are",
"required")}), content_type="application/json")
redis_key = "job:%s.%s.%s" % (uid, master, build_number)
try:
r = redis_client('default')
job = r.hgetall(redis_key)
except RedisError as e:
log.error('redis error: %s', e)
job = {}
return HttpResponse(json.serialize(job), content_type="application/json")
|
peterbe/bramble
|
bramble/base/api.py
|
Python
|
mpl-2.0
| 5,364
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import obci_log_model
class DummyLogModel(obci_log_model.LogModel):
def __init__(self):
super(DummyLogModel, self).__init__()
self._ind = 0
self._peers_log = {'amplifier':
{'peer_id': 'amplifier', 'logs': []},
'mx':
{'peer_id': 'mx', 'logs': []}
} # 'logs keyed by peer id
def next_log(self):
time.sleep(0.05)
self._ind += 1
if self._ind % 2 == 0:
return 'amplifier', 'AMP ' + str(self._ind)
else:
return 'mx', 'MX ' + str(self._ind)
def post_run(self):
pass
|
BrainTech/openbci
|
obci/control/gui/obci_log_model_dummy.py
|
Python
|
gpl-3.0
| 732
|
""" Basic tests for table plot visualization """
import os
from click.testing import CliRunner
import perun.cli as cli
import perun.vcs as vcs
import perun.testing.utils as test_utils
import perun.testing.asserts as asserts
TABLE_TEST_DIR = os.path.join(os.path.split(__file__)[0], 'references', "table_files")
__author__ = 'Tomas Fiedor'
def output_to_list(output):
"""
:param list output: list of lines
:return: sorted list of lines without newlines and filtered out empty lines
"""
return sorted([l.rstrip() for l in output if l.rstrip()])
def assert_files_match(lhs, rhs):
"""Asserts that two files handles match
:param handle lhs: left file handle
:param handle rhs: right file handle
"""
assert output_to_list(lhs.readlines()) == output_to_list(rhs.readlines())
def assert_files_match_output(result, rhs):
"""Asserts that file and stdout output match
:param list result: left stdout
:param handle rhs: right file handle
"""
assert output_to_list(result.output.split('\n')) == output_to_list(rhs.readlines())
def test_table_cli(pcs_full, postprocess_profiles):
"""Test outputing profiles as tables"""
runner = CliRunner()
result = runner.invoke(cli.show, [
'0@i', 'tableof', '--to-stdout', 'resources'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_resources_ref_basic'), 'r') as trb:
assert_files_match_output(result, trb)
models_profile = test_utils.profile_filter(postprocess_profiles, 'complexity-models.perf', return_type='name')
added = test_utils.prepare_profile(
pcs_full.get_job_directory(), models_profile, vcs.get_minor_head()
)
result = runner.invoke(cli.add, ['--keep-profile', '{}'.format(added)])
asserts.predicate_from_cli(result, result.exit_code == 0)
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_basic'), 'r') as trb:
assert_files_match_output(result, trb)
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '-h', 'uid', '-h', 'model', '-h', 'coeffs'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_pruned'), 'r') as trb:
assert_files_match_output(result, trb)
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '-h', 'non-existant', '-h', 'model', '-h', 'coeffs'
])
asserts.predicate_from_cli(result, result.exit_code == 2)
asserts.predicate_from_cli(result, "invalid choice for table header: non-existant" in result.output)
# Test different format
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', '-f', 'latex', 'models', '-h', 'uid', '-h', 'model', '-h', 'coeffs'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_latex'), 'r') as trb:
assert_files_match_output(result, trb)
# Test output to file
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--output-file', 'test_output', 'models', '-h', 'uid', '-h', 'model', '-h', 'coeffs'
])
output_file = os.path.join(os.getcwd(), 'test_output')
asserts.predicate_from_cli(result, result.exit_code == 0)
assert os.path.exists(output_file)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_pruned'), 'r') as trb:
with open(output_file, 'r') as of:
assert_files_match(trb, of)
# Test sorts and filters
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '--sort-by', 'r_square', '--filter-by', 'model', 'linear', '--filter-by', 'model', 'quadratic'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_sorted_filtered'), 'r') as trb:
assert_files_match_output(result, trb)
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '--sort-by', 'class'
])
asserts.predicate_from_cli(result, "Error: invalid key choice for sorting the table: class " in str(result.output))
asserts.predicate_from_cli(result, result.exit_code == 2)
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '--filter-by', 'class', 'linear'
])
asserts.predicate_from_cli(result, "Error: invalid key choice for filtering: class" in str(result.output))
asserts.predicate_from_cli(result, result.exit_code == 2)
# Test sorts and filters
result = runner.invoke(cli.show, [
'0@p', 'tableof', '--to-stdout', 'models', '--filter-by', 'r_square', '0', '--filter-by', 'model', 'linear'
])
asserts.predicate_from_cli(result, result.exit_code == 0)
with open(os.path.join(TABLE_TEST_DIR, 'table_models_ref_empty'), 'r') as trb:
assert_files_match_output(result, trb)
|
tfiedor/perun
|
tests/test_table.py
|
Python
|
gpl-3.0
| 5,117
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2012-2021 SoftBank Robotics. All rights reserved.
# Use of this source code is governed by a BSD-style license (see the COPYING file).
""" Remove the given build config. """
from __future__ import absolute_import
from __future__ import unicode_literals
from __future__ import print_function
import qisys.parsers
import qibuild.worktree
def configure_parser(parser):
""" Configure Parser. """
qisys.parsers.worktree_parser(parser)
parser.add_argument("name")
def do(args):
""" Main Entry Point. """
name = args.name
qibuild_cfg = qibuild.config.QiBuildConfig()
qibuild_cfg.read()
del qibuild_cfg.configs[name]
# Also remove default config from global qibuild.xml file, so
# that we don't get a default config pointing to a non-existing
# config
for worktree in qibuild_cfg.worktrees.values():
if worktree.defaults.config == name:
qibuild_cfg.set_default_config_for_worktree(worktree.path, None)
qibuild_cfg.write()
|
aldebaran/qibuild
|
python/qibuild/actions/rm_config.py
|
Python
|
bsd-3-clause
| 1,053
|
from osgeo import ogr
import os
shapefile = "states.shp"
driver = ogr.GetDriverByName("ESRI Shapefile")
dataSource = driver.Open(shapefile, 0)
layer = dataSource.GetLayer()
for feature in layer:
geom = feature.GetGeometryRef()
print geom.Centroid().ExportToWkt()
|
roscoeZA/GeoGigSync
|
ogr2ogr_convert.py
|
Python
|
cc0-1.0
| 272
|
import pickle
from django.core.signing import JSONSerializer as BaseJSONSerializer
class PickleSerializer:
"""
Simple wrapper around pickle to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return pickle.dumps(obj, pickle.HIGHEST_PROTOCOL)
def loads(self, data):
return pickle.loads(data)
JSONSerializer = BaseJSONSerializer
|
edmorley/django
|
django/contrib/sessions/serializers.py
|
Python
|
bsd-3-clause
| 394
|
# -*- coding: utf-8 -*-
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('outreach', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='outreachevent',
name='description',
field=models.CharField(max_length=800, null=True, blank=True),
),
]
|
sfu-fas/coursys
|
outreach/migrations/0002_make_description_longer.py
|
Python
|
gpl-3.0
| 389
|
# -*- coding: utf-8 -*
import string
DESCRIPTION = ""\
"""
_ __ _ ___
/ \| \ / \|_ _|
( o ) o ) o || |
\_/|__/|_n_||_|
-------------------------------------------
_ __ _ ___
/ \ | \ / \ |_ _|
( o ) o ) o | | |
\_/racle |__/atabase |_n_|ttacking |_|ool
-------------------------------------------
By Quentin Hardy (quentin.hardy@protonmail.com or quentin.hardy@bt.com)
"""
CURRENT_VERSION = "Version 5.1 - 2021/03/12"
DEFAULT_SID_MIN_SIZE = 1
DEFAULT_SID_MAX_SIZE = 2
MAX_HELP_POSITION=22
MAX_SUB_HELP_POSITION=45
MAX_SPECIAL_SUB_HELP_POSITION = 60
MAX_HELP_WIDTH = 150
DEFAULT_SID_FILE = "resources/sids.txt"
DEFAULT_SERVICE_NAME_FILE = "resources/sids.txt"
DEFAULT_ACCOUNT_FILE = "accounts/accounts.txt"
DEFAULT_LOGINS_FILE = "accounts/logins.txt"
DEFAULT_PWDS_FILE = "accounts/pwds.txt"
DEFAULT_TIME_SLEEP = 0
DEFAULT_SID_CHARSET = string.ascii_uppercase
EXIT_NO_SIDS = 100
EXIT_NO_ACCOUNTS = 101
EXIT_BAD_CONNECTION = 102
EXIT_BAD_CMD_PARAMETER = 103
EXIT_MISS_ARGUMENT = 104
EXIT_MISS_MODULE = 105
ALL_IS_OK=0
TIMEOUT_VALUE = 5
PASSWORD_EXTENSION_FILE = ".odat.save"
CHALLENGE_EXT_FILE = ".odat.challenge"
MAX_WIDTH_TEXTTABLES = 120
DEFAULT_ENCODING = 'utf8'
TIMEOUT_TNS_CMD = 30
DEFAULT_LOCAL_LISTENING_PORT_TNS_POISON = 1522
DEFAULT_SLEEPING_TIME_TNS_POISON = 10
MAX_TIMEOUT_VALUE_TNS_POISON = 10
#SEARCH module
PATTERNS_COLUMNS_WITH_PWDS = [
'%mdp%',
'%pwd%',
'%pass%',
"%contraseña%",
"%clave%",
"%chiave%",
"%пароль%",
"%wachtwoord%",
"%hasło%",
"%senha%",
]
EXPLOITABLE_SYSTEM_PRIVILEGES = [
'CREATE ANY PROCEDURE',
'ANALYZE ANY',
'CREATE ANY TRIGGER',
'CREATE ANY INDEX',
]
|
quentinhardy/odat
|
Constants.py
|
Python
|
lgpl-3.0
| 1,740
|
import os
import importlib
from django.conf import settings
from geotrek.common.parsers import Parser
if 'geotrek.zoning' in settings.INSTALLED_APPS:
import geotrek.zoning.parsers # noqa
if 'geotrek.sensitivity' in settings.INSTALLED_APPS:
import geotrek.sensitivity.parsers # noqa
def subclasses(cls):
all_subclasses = []
for subclass in cls.__subclasses__():
all_subclasses.append(subclass)
all_subclasses.extend(subclasses(subclass))
return all_subclasses
def create_tmp_destination(name):
save_dir = os.path.join(settings.TMP_DIR, os.path.splitext(name)[0])
if not os.path.exists(settings.TMP_DIR):
os.mkdir(settings.TMP_DIR)
if not os.path.exists(save_dir):
os.mkdir(save_dir)
return save_dir, os.path.join(save_dir, name)
parsers_module = None
def discover_available_parsers(user):
global parsers_module
choices = []
choices_url = []
if not parsers_module:
module_path = os.path.join(settings.VAR_DIR, 'conf/parsers.py')
spec = importlib.util.spec_from_file_location('parsers', module_path)
parsers_module = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(parsers_module)
except FileNotFoundError:
pass
classes = subclasses(Parser)
for index, cls in enumerate(classes):
if cls.__module__.startswith('parsers') or cls.__module__.startswith('geotrek'):
if not cls.label or not cls.model:
continue
codename = '{}.import_{}'.format(cls.model._meta.app_label, cls.model._meta.model_name)
if not user.has_perm(codename):
continue
if not getattr(cls, 'url', None) and not getattr(cls, 'base_url', None):
choices.append((index, cls.label))
else:
choices_url.append((index, cls.label))
choices = sorted(choices, key=lambda x: x[1])
choices_url = sorted(choices_url, key=lambda x: x[1])
return choices, choices_url, classes
|
GeotrekCE/Geotrek-admin
|
geotrek/common/utils/import_celery.py
|
Python
|
bsd-2-clause
| 2,058
|
# -*- coding: utf-8 -*-
'''
Genesis Add-on
Copyright (C) 2015 lambda
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import re,urllib
from resources.lib.libraries import client
from resources.lib.libraries import jsunpack
def resolve(url):
try:
headers = '|%s' % urllib.urlencode({'User-Agent': client.agent(), 'Referer': url})
url = re.compile('//.+?/(?:video|embed)/([0-9a-zA-Z-_]+)').findall(url)[0]
url = 'http://videowood.tv/embed/%s' % url
result = client.request(url)
result = re.compile('(eval.*?)\n').findall(result)[-1]
result = jsunpack.unpack(result)
url = client.parseDOM(result, 'embed', ret='src')
url += re.compile("[\'|\"]file[\'|\"] *: *[\'|\"](.+?)[\'|\"]").findall(result)
url = [i.replace('\\', '') for i in url if not i.endswith(('.srt', '.png', '.jpg'))]
url = 'http://' + url[0].split('://', 1)[-1]
url += headers
return url
except:
return
|
AMOboxTV/AMOBox.LegoBuild
|
plugin.video.titan/resources/lib/resolvers/videowood.py
|
Python
|
gpl-2.0
| 1,605
|
__all__ = [
"autoregulation.py",
"feedforward_loop.py",
"__init__.py",
"multi_input.py",
"simple_regulation.py",
"single_input.py"
]
|
kietjohn/network_motif
|
motif/__init__.py
|
Python
|
mit
| 141
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tests for google.appengine.tools.devappserver2.shutdown."""
import os
import signal
import time
import unittest
import google
import mox
from google.appengine.tools.devappserver2 import shutdown
class ShutdownTest(unittest.TestCase):
def setUp(self):
self.mox = mox.Mox()
self.mox.StubOutWithMock(os, 'abort')
shutdown._shutting_down = False
shutdown._num_terminate_requests = 0
self._sigint_handler = signal.getsignal(signal.SIGINT)
self._sigterm_handler = signal.getsignal(signal.SIGTERM)
def tearDown(self):
self.mox.UnsetStubs()
signal.signal(signal.SIGINT, self._sigint_handler)
signal.signal(signal.SIGTERM, self._sigterm_handler)
def test_async_quit(self):
self.mox.ReplayAll()
shutdown.async_quit()
self.assertTrue(shutdown._shutting_down)
self.mox.VerifyAll()
def test_async_terminate(self):
self.mox.ReplayAll()
shutdown._async_terminate()
self.assertTrue(shutdown._shutting_down)
shutdown._async_terminate()
self.mox.VerifyAll()
def test_async_terminate_abort(self):
os.abort()
self.mox.ReplayAll()
shutdown._async_terminate()
self.assertTrue(shutdown._shutting_down)
shutdown._async_terminate()
shutdown._async_terminate()
self.mox.VerifyAll()
def test_install_signal_handlers(self):
shutdown.install_signal_handlers()
self.assertEqual(shutdown._async_terminate, signal.getsignal(signal.SIGINT))
self.assertEqual(shutdown._async_terminate,
signal.getsignal(signal.SIGTERM))
def test_wait_until_shutdown(self):
self.mox.StubOutWithMock(time, 'sleep')
time.sleep(1).WithSideEffects(lambda _: shutdown.async_quit())
self.mox.ReplayAll()
shutdown.wait_until_shutdown()
self.mox.VerifyAll()
def test_wait_until_shutdown_raise_interrupted_io(self):
def quit_and_raise(*_):
shutdown.async_quit()
raise IOError
self.mox.StubOutWithMock(time, 'sleep')
time.sleep(1).WithSideEffects(quit_and_raise)
self.mox.ReplayAll()
shutdown.wait_until_shutdown()
self.mox.VerifyAll()
if __name__ == '__main__':
unittest.main()
|
ychen820/microblog
|
y/google-cloud-sdk/platform/google_appengine/google/appengine/tools/devappserver2/shutdown_test.py
|
Python
|
bsd-3-clause
| 2,746
|
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Convolutional-recurrent layers.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.contrib.keras.python.keras import activations
from tensorflow.contrib.keras.python.keras import backend as K
from tensorflow.contrib.keras.python.keras import constraints
from tensorflow.contrib.keras.python.keras import initializers
from tensorflow.contrib.keras.python.keras import regularizers
from tensorflow.contrib.keras.python.keras.engine import InputSpec
from tensorflow.contrib.keras.python.keras.layers.recurrent import Recurrent
from tensorflow.contrib.keras.python.keras.utils import conv_utils
from tensorflow.python.framework import tensor_shape
class ConvRecurrent2D(Recurrent):
"""Abstract base class for convolutional recurrent layers.
Do not use in a model -- it's not a functional layer!
Arguments:
filters: Integer, the dimensionality of the output space
(i.e. the number output of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
dimensions of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the strides of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, time, ..., channels)`
while `channels_first` corresponds to
inputs with shape `(batch, time, channels, ...)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
return_sequences: Boolean. Whether to return the last output
in the output sequence, or the full sequence.
go_backwards: Boolean (default False).
If True, rocess the input sequence backwards.
stateful: Boolean (default False). If True, the last state
for each sample at index i in a batch will be used as initial
state for the sample of index i in the following batch.
Input shape:
5D tensor with shape `(num_samples, timesteps, channels, rows, cols)`.
Output shape:
- if `return_sequences`: 5D tensor with shape
`(num_samples, timesteps, channels, rows, cols)`.
- else, 4D tensor with shape `(num_samples, channels, rows, cols)`.
# Masking
This layer supports masking for input data with a variable number
of timesteps. To introduce masks to your data,
use an `Embedding` layer with the `mask_zero` parameter
set to `True`.
**Note:** for the time being, masking is only supported with Theano.
# Note on using statefulness in RNNs
You can set RNN layers to be 'stateful', which means that the states
computed for the samples in one batch will be reused as initial states
for the samples in the next batch.
This assumes a one-to-one mapping between
samples in different successive batches.
To enable statefulness:
- specify `stateful=True` in the layer constructor.
- specify a fixed batch size for your model, by passing
a `batch_input_size=(...)` to the first layer in your model.
This is the expected shape of your inputs *including the batch
size*.
It should be a tuple of integers, e.g. `(32, 10, 100)`.
To reset the states of your model, call `.reset_states()` on either
a specific layer, or on your entire model.
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1),
return_sequences=False,
go_backwards=False,
stateful=False,
**kwargs):
super(ConvRecurrent2D, self).__init__(**kwargs)
self.filters = filters
self.kernel_size = conv_utils.normalize_tuple(kernel_size, 2, 'kernel_size')
self.strides = conv_utils.normalize_tuple(strides, 2, 'strides')
self.padding = conv_utils.normalize_padding(padding)
self.data_format = conv_utils.normalize_data_format(data_format)
self.dilation_rate = conv_utils.normalize_tuple(dilation_rate, 2,
'dilation_rate')
self.return_sequences = return_sequences
self.go_backwards = go_backwards
self.stateful = stateful
self.input_spec = [InputSpec(ndim=5)]
self.state_spec = None
def _compute_output_shape(self, input_shape):
if isinstance(input_shape, list):
input_shape = input_shape[0]
input_shape = tensor_shape.TensorShape(input_shape).as_list()
if self.data_format == 'channels_first':
rows = input_shape[3]
cols = input_shape[4]
elif self.data_format == 'channels_last':
rows = input_shape[2]
cols = input_shape[3]
rows = conv_utils.conv_output_length(
rows,
self.kernel_size[0],
padding=self.padding,
stride=self.strides[0],
dilation=self.dilation_rate[0])
cols = conv_utils.conv_output_length(
cols,
self.kernel_size[1],
padding=self.padding,
stride=self.strides[1],
dilation=self.dilation_rate[1])
if self.return_sequences:
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], self.filters, rows, cols])
elif self.data_format == 'channels_last':
return tensor_shape.TensorShape(
[input_shape[0], input_shape[1], rows, cols, self.filters])
else:
if self.data_format == 'channels_first':
return tensor_shape.TensorShape(
[input_shape[0], self.filters, rows, cols])
elif self.data_format == 'channels_last':
return tensor_shape.TensorShape(
[input_shape[0], rows, cols, self.filters])
def get_config(self):
config = {
'filters': self.filters,
'kernel_size': self.kernel_size,
'strides': self.strides,
'padding': self.padding,
'data_format': self.data_format,
'dilation_rate': self.dilation_rate,
'return_sequences': self.return_sequences,
'go_backwards': self.go_backwards,
'stateful': self.stateful
}
base_config = super(ConvRecurrent2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
class ConvLSTM2D(ConvRecurrent2D):
"""Convolutional LSTM.
It is similar to an LSTM layer, but the input transformations
and recurrent transformations are both convolutional.
Arguments:
filters: Integer, the dimensionality of the output space
(i.e. the number output of filters in the convolution).
kernel_size: An integer or tuple/list of n integers, specifying the
dimensions of the convolution window.
strides: An integer or tuple/list of n integers,
specifying the strides of the convolution.
Specifying any stride value != 1 is incompatible with specifying
any `dilation_rate` value != 1.
padding: One of `"valid"` or `"same"` (case-insensitive).
data_format: A string,
one of `channels_last` (default) or `channels_first`.
The ordering of the dimensions in the inputs.
`channels_last` corresponds to inputs with shape
`(batch, time, ..., channels)`
while `channels_first` corresponds to
inputs with shape `(batch, time, channels, ...)`.
It defaults to the `image_data_format` value found in your
Keras config file at `~/.keras/keras.json`.
If you never set it, then it will be "channels_last".
dilation_rate: An integer or tuple/list of n integers, specifying
the dilation rate to use for dilated convolution.
Currently, specifying any `dilation_rate` value != 1 is
incompatible with specifying any `strides` value != 1.
activation: Activation function to use.
If you don't specify anything, no activation is applied
(ie. "linear" activation: `a(x) = x`).
recurrent_activation: Activation function to use
for the recurrent step.
use_bias: Boolean, whether the layer uses a bias vector.
kernel_initializer: Initializer for the `kernel` weights matrix,
used for the linear transformation of the inputs..
recurrent_initializer: Initializer for the `recurrent_kernel`
weights matrix,
used for the linear transformation of the recurrent state..
bias_initializer: Initializer for the bias vector.
unit_forget_bias: Boolean.
If True, add 1 to the bias of the forget gate at initialization.
Use in combination with `bias_initializer="zeros"`.
This is recommended in [Jozefowicz et
al.](http://www.jmlr.org/proceedings/papers/v37/jozefowicz15.pdf)
kernel_regularizer: Regularizer function applied to
the `kernel` weights matrix.
recurrent_regularizer: Regularizer function applied to
the `recurrent_kernel` weights matrix.
bias_regularizer: Regularizer function applied to the bias vector.
activity_regularizer: Regularizer function applied to
the output of the layer (its "activation")..
kernel_constraint: Constraint function applied to
the `kernel` weights matrix.
recurrent_constraint: Constraint function applied to
the `recurrent_kernel` weights matrix.
bias_constraint: Constraint function applied to the bias vector.
return_sequences: Boolean. Whether to return the last output
in the output sequence, or the full sequence.
go_backwards: Boolean (default False).
If True, rocess the input sequence backwards.
stateful: Boolean (default False). If True, the last state
for each sample at index i in a batch will be used as initial
state for the sample of index i in the following batch.
dropout: Float between 0 and 1.
Fraction of the units to drop for
the linear transformation of the inputs.
recurrent_dropout: Float between 0 and 1.
Fraction of the units to drop for
the linear transformation of the recurrent state.
Input shape:
- if data_format='channels_first'
5D tensor with shape:
`(samples,time, channels, rows, cols)`
- if data_format='channels_last'
5D tensor with shape:
`(samples,time, rows, cols, channels)`
Output shape:
- if `return_sequences`
- if data_format='channels_first'
5D tensor with shape:
`(samples, time, filters, output_row, output_col)`
- if data_format='channels_last'
5D tensor with shape:
`(samples, time, output_row, output_col, filters)`
- else
- if data_format ='channels_first'
4D tensor with shape:
`(samples, filters, output_row, output_col)`
- if data_format='channels_last'
4D tensor with shape:
`(samples, output_row, output_col, filters)`
where o_row and o_col depend on the shape of the filter and
the padding
Raises:
ValueError: in case of invalid constructor arguments.
References:
- [Convolutional LSTM Network: A Machine Learning Approach for
Precipitation Nowcasting](http://arxiv.org/abs/1506.04214v1)
The current implementation does not include the feedback loop on the
cells output
"""
def __init__(self,
filters,
kernel_size,
strides=(1, 1),
padding='valid',
data_format=None,
dilation_rate=(1, 1),
activation='tanh',
recurrent_activation='hard_sigmoid',
use_bias=True,
kernel_initializer='glorot_uniform',
recurrent_initializer='orthogonal',
bias_initializer='zeros',
unit_forget_bias=True,
kernel_regularizer=None,
recurrent_regularizer=None,
bias_regularizer=None,
activity_regularizer=None,
kernel_constraint=None,
recurrent_constraint=None,
bias_constraint=None,
return_sequences=False,
go_backwards=False,
stateful=False,
dropout=0.,
recurrent_dropout=0.,
**kwargs):
super(ConvLSTM2D, self).__init__(
filters,
kernel_size,
strides=strides,
padding=padding,
data_format=data_format,
dilation_rate=dilation_rate,
return_sequences=return_sequences,
go_backwards=go_backwards,
stateful=stateful,
**kwargs)
self.activation = activations.get(activation)
self.recurrent_activation = activations.get(recurrent_activation)
self.use_bias = use_bias
self.kernel_initializer = initializers.get(kernel_initializer)
self.recurrent_initializer = initializers.get(recurrent_initializer)
self.bias_initializer = initializers.get(bias_initializer)
self.unit_forget_bias = unit_forget_bias
self.kernel_regularizer = regularizers.get(kernel_regularizer)
self.recurrent_regularizer = regularizers.get(recurrent_regularizer)
self.bias_regularizer = regularizers.get(bias_regularizer)
self.activity_regularizer = regularizers.get(activity_regularizer)
self.kernel_constraint = constraints.get(kernel_constraint)
self.recurrent_constraint = constraints.get(recurrent_constraint)
self.bias_constraint = constraints.get(bias_constraint)
self.dropout = min(1., max(0., dropout))
self.recurrent_dropout = min(1., max(0., recurrent_dropout))
self.state_spec = [InputSpec(ndim=4), InputSpec(ndim=4)]
def build(self, input_shape):
if isinstance(input_shape, list):
input_shape = input_shape[0]
input_shape = tuple(tensor_shape.TensorShape(input_shape).as_list())
batch_size = input_shape[0] if self.stateful else None
self.input_spec[0] = InputSpec(shape=(batch_size, None) + input_shape[2:])
if self.stateful:
self.reset_states()
else:
# initial states: 2 all-zero tensor of shape (filters)
self.states = [None, None]
if self.data_format == 'channels_first':
channel_axis = 2
else:
channel_axis = -1
if input_shape[channel_axis] is None:
raise ValueError('The channel dimension of the inputs '
'should be defined. Found `None`.')
input_dim = input_shape[channel_axis]
state_shape = [None] * 4
state_shape[channel_axis] = input_dim
state_shape = tuple(state_shape)
self.state_spec = [
InputSpec(shape=state_shape),
InputSpec(shape=state_shape)
]
kernel_shape = self.kernel_size + (input_dim, self.filters * 4)
self.kernel_shape = kernel_shape
recurrent_kernel_shape = self.kernel_size + (self.filters, self.filters * 4)
self.kernel = self.add_weight(
shape=kernel_shape,
initializer=self.kernel_initializer,
name='kernel',
regularizer=self.kernel_regularizer,
constraint=self.kernel_constraint)
self.recurrent_kernel = self.add_weight(
shape=recurrent_kernel_shape,
initializer=self.recurrent_initializer,
name='recurrent_kernel',
regularizer=self.recurrent_regularizer,
constraint=self.recurrent_constraint)
if self.use_bias:
self.bias = self.add_weight(
shape=(self.filters * 4,),
initializer=self.bias_initializer,
name='bias',
regularizer=self.bias_regularizer,
constraint=self.bias_constraint)
if self.unit_forget_bias:
bias_value = np.zeros((self.filters * 4,))
bias_value[self.filters:self.filters * 2] = 1.
K.set_value(self.bias, bias_value)
else:
self.bias = None
self.kernel_i = self.kernel[:, :, :, :self.filters]
self.recurrent_kernel_i = self.recurrent_kernel[:, :, :, :self.filters]
self.kernel_f = self.kernel[:, :, :, self.filters:self.filters * 2]
self.recurrent_kernel_f = self.recurrent_kernel[:, :, :, self.filters:
self.filters * 2]
self.kernel_c = self.kernel[:, :, :, self.filters * 2:self.filters * 3]
self.recurrent_kernel_c = self.recurrent_kernel[:, :, :, self.filters * 2:
self.filters * 3]
self.kernel_o = self.kernel[:, :, :, self.filters * 3:]
self.recurrent_kernel_o = self.recurrent_kernel[:, :, :, self.filters * 3:]
if self.use_bias:
self.bias_i = self.bias[:self.filters]
self.bias_f = self.bias[self.filters:self.filters * 2]
self.bias_c = self.bias[self.filters * 2:self.filters * 3]
self.bias_o = self.bias[self.filters * 3:]
else:
self.bias_i = None
self.bias_f = None
self.bias_c = None
self.bias_o = None
self.built = True
def get_initial_state(self, inputs):
# (samples, timesteps, rows, cols, filters)
initial_state = K.zeros_like(inputs)
# (samples, rows, cols, filters)
initial_state = K.sum(initial_state, axis=1)
shape = list(self.kernel_shape)
shape[-1] = self.filters
initial_state = self.input_conv(
initial_state, K.zeros(tuple(shape)), padding=self.padding)
initial_states = [initial_state for _ in range(2)]
return initial_states
def reset_states(self):
if not self.stateful:
raise RuntimeError('Layer must be stateful.')
input_shape = self.input_spec[0].shape
output_shape = self._compute_output_shape(input_shape)
if not input_shape[0]:
raise ValueError('If a RNN is stateful, a complete '
'input_shape must be provided '
'(including batch size). '
'Got input shape: ' + str(input_shape))
if self.return_sequences:
out_row, out_col, out_filter = output_shape[2:]
else:
out_row, out_col, out_filter = output_shape[1:]
if hasattr(self, 'states'):
K.set_value(self.states[0],
np.zeros((input_shape[0], out_row, out_col, out_filter)))
K.set_value(self.states[1],
np.zeros((input_shape[0], out_row, out_col, out_filter)))
else:
self.states = [
K.zeros((input_shape[0], out_row, out_col, out_filter)),
K.zeros((input_shape[0], out_row, out_col, out_filter))
]
def get_constants(self, inputs, training=None):
constants = []
if self.implementation == 0 and 0 < self.dropout < 1:
ones = K.zeros_like(inputs)
ones = K.sum(ones, axis=1)
ones += 1
def dropped_inputs():
return K.dropout(ones, self.dropout)
dp_mask = [
K.in_train_phase(dropped_inputs, ones, training=training)
for _ in range(4)
]
constants.append(dp_mask)
else:
constants.append([K.cast_to_floatx(1.) for _ in range(4)])
if 0 < self.recurrent_dropout < 1:
shape = list(self.kernel_shape)
shape[-1] = self.filters
ones = K.zeros_like(inputs)
ones = K.sum(ones, axis=1)
ones = self.input_conv(ones, K.zeros(shape), padding=self.padding)
ones += 1.
def dropped_inputs(): # pylint: disable=function-redefined
return K.dropout(ones, self.recurrent_dropout)
rec_dp_mask = [
K.in_train_phase(dropped_inputs, ones, training=training)
for _ in range(4)
]
constants.append(rec_dp_mask)
else:
constants.append([K.cast_to_floatx(1.) for _ in range(4)])
return constants
def input_conv(self, x, w, b=None, padding='valid'):
conv_out = K.conv2d(
x,
w,
strides=self.strides,
padding=padding,
data_format=self.data_format,
dilation_rate=self.dilation_rate)
if b is not None:
conv_out = K.bias_add(conv_out, b, data_format=self.data_format)
return conv_out
def reccurent_conv(self, x, w):
conv_out = K.conv2d(
x, w, strides=(1, 1), padding='same', data_format=self.data_format)
return conv_out
def step(self, inputs, states):
assert len(states) == 4
h_tm1 = states[0]
c_tm1 = states[1]
dp_mask = states[2]
rec_dp_mask = states[3]
x_i = self.input_conv(
inputs * dp_mask[0], self.kernel_i, self.bias_i, padding=self.padding)
x_f = self.input_conv(
inputs * dp_mask[1], self.kernel_f, self.bias_f, padding=self.padding)
x_c = self.input_conv(
inputs * dp_mask[2], self.kernel_c, self.bias_c, padding=self.padding)
x_o = self.input_conv(
inputs * dp_mask[3], self.kernel_o, self.bias_o, padding=self.padding)
h_i = self.reccurent_conv(h_tm1 * rec_dp_mask[0], self.recurrent_kernel_i)
h_f = self.reccurent_conv(h_tm1 * rec_dp_mask[1], self.recurrent_kernel_f)
h_c = self.reccurent_conv(h_tm1 * rec_dp_mask[2], self.recurrent_kernel_c)
h_o = self.reccurent_conv(h_tm1 * rec_dp_mask[3], self.recurrent_kernel_o)
i = self.recurrent_activation(x_i + h_i)
f = self.recurrent_activation(x_f + h_f)
c = f * c_tm1 + i * self.activation(x_c + h_c)
o = self.recurrent_activation(x_o + h_o)
h = o * self.activation(c)
return h, [h, c]
def get_config(self):
config = {
'activation':
activations.serialize(self.activation),
'recurrent_activation':
activations.serialize(self.recurrent_activation),
'use_bias':
self.use_bias,
'kernel_initializer':
initializers.serialize(self.kernel_initializer),
'recurrent_initializer':
initializers.serialize(self.recurrent_initializer),
'bias_initializer':
initializers.serialize(self.bias_initializer),
'unit_forget_bias':
self.unit_forget_bias,
'kernel_regularizer':
regularizers.serialize(self.kernel_regularizer),
'recurrent_regularizer':
regularizers.serialize(self.recurrent_regularizer),
'bias_regularizer':
regularizers.serialize(self.bias_regularizer),
'activity_regularizer':
regularizers.serialize(self.activity_regularizer),
'kernel_constraint':
constraints.serialize(self.kernel_constraint),
'recurrent_constraint':
constraints.serialize(self.recurrent_constraint),
'bias_constraint':
constraints.serialize(self.bias_constraint),
'dropout':
self.dropout,
'recurrent_dropout':
self.recurrent_dropout
}
base_config = super(ConvLSTM2D, self).get_config()
return dict(list(base_config.items()) + list(config.items()))
|
npuichigo/ttsflow
|
third_party/tensorflow/tensorflow/contrib/keras/python/keras/layers/convolutional_recurrent.py
|
Python
|
apache-2.0
| 24,296
|
'''
Created on Jan 10, 2017
@author: tamsyn
'''
import basefilter
import rblquery
import filterfactory
class RBLFilter(basefilter.BaseFilter):
'''
classdocs
'''
def __init__(self, msg, params):
'''
Constructor
'''
basefilter.BaseFilter.__init__(self, "RBL")
basefilter.BaseFilter.set_object(self, msg)
basefilter.BaseFilter.set_param(self, params)
self.filter = rblquery.Query(params['ip'])
def Execute(self):
self.result = self.filter.spamhaus_lookup()
if __name__ == '__main__':
param = {}
param['ip'] = '123.27.90.88'
param['timeout'] = 2
rbl = RBLFilter("", param)
rbl.Execute()
res = rbl.get_result()
print("Execute result : %s" % res)
elif __name__ == 'rblfilter':
filterfactory.factory.register_filter(RBLFilter)
|
tamsynlin/dragon-master
|
sample/rblfilter.py
|
Python
|
mit
| 903
|
import cutil
import logging
from selenium import webdriver
from web_wrapper.web import Web
from web_wrapper.selenium_utils import SeleniumUtils
logger = logging.getLogger(__name__)
class DriverSeleniumPhantomJS(Web, SeleniumUtils):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.driver_type = 'selenium_phantomjs'
self.default_service_args = self.driver_args.get('service_args', [])
self.driver_args['service_args'] = self.default_service_args
self.dcap = dict(webdriver.DesiredCapabilities.PHANTOMJS)
self.set_headers(self.current_headers, update=False)
self.set_proxy(self.current_proxy, update=False)
self._create_session()
# Headers Set/Get
def set_headers(self, headers, update=True):
logger.debug("Set phantomjs headers")
self.current_headers = headers
# Clear headers
self.dcap = dict(webdriver.DesiredCapabilities.PHANTOMJS)
for key, value in headers.items():
self.dcap['phantomjs.page.customHeaders.{}'.format(key)] = value
if update is True:
# Recreate webdriver with new header
self._update()
def get_headers(self):
# TODO: Try and get from phantom directly to be accurate
return self.current_headers
def update_headers(self, headers, update=True):
self.current_headers.update(headers)
self.set_headers(self.current_headers, update=update)
# Cookies Set/Get
def get_cookies(self):
return self.driver.get_cookies()
def set_cookies(self, cookies):
# TODO: Does not seem to actually set them correctly
self.driver.delete_all_cookies()
for cookie in cookies:
print(cookie)
self.driver.add_cookie({k: cookie[k] for k in ('name', 'value', 'path', 'expirationDate', 'expiry', 'domain') if k in cookie})
def update_cookies(self, cookies):
self.current_cookies.expand(cookies)
self.set_cookies(self.current_cookies)
# Proxy Set/Get
def set_proxy(self, proxy, update=True):
"""
Set proxy for requests session
"""
update_web_driver = False
if self.current_proxy != proxy:
# Did we change proxies?
update_web_driver = True
self.current_proxy = proxy
if proxy is None:
self.driver_args['service_args'] = self.default_service_args
else:
proxy_parts = cutil.get_proxy_parts(proxy)
self.driver_args['service_args'].extend(['--proxy={host}:{port}'.format(**proxy_parts),
'--proxy-type={schema}'.format(**proxy_parts),
])
if proxy_parts.get('user') is not None:
self.driver_args['service_args'].append('--proxy-auth={user}:{password}'.format(**proxy_parts))
# Recreate webdriver with new proxy settings
if update is True and update_web_driver is True:
self._update()
def get_proxy(self):
return self.current_proxy
# Session
def _create_session(self):
"""
Creates a fresh session with no/default headers and proxies
"""
logger.debug("Create new phantomjs web driver")
self.driver = webdriver.PhantomJS(desired_capabilities=self.dcap,
**self.driver_args)
self.set_cookies(self.current_cookies)
self.driver.set_window_size(1920, 1080)
def _update(self):
"""
Re create the web driver with the new proxy or header settings
"""
logger.debug("Update phantomjs web driver")
self.quit()
self._create_session()
def reset(self):
"""
Kills old session and creates a new one with no proxies or headers
"""
# Kill old connection
self.quit()
# Clear proxy data
self.driver_args['service_args'] = self.default_service_args
# Clear headers
self.dcap = dict(webdriver.DesiredCapabilities.PHANTOMJS)
# Create new web driver
self._create_session()
def quit(self):
"""
Generic function to close distroy and session data
"""
if self.driver is not None:
self.driver.quit()
self.driver = None
|
xtream1101/web-wrapper
|
web_wrapper/driver_selenium_phantomjs.py
|
Python
|
mit
| 4,415
|
#
# Code by Alexander Pruss and under the MIT license
#
# mengersponge [levels [options]]
# levels is a level count, up to 5
# options is a string of characters containing possibly the options 's' for 'slice' (cut off a diagonal slice) and 'c' for 'color'
#
from mine import *
import mcpi.settings as settings
import sys
RAINBOW = (block.STAINED_GLASS_RED,block.STAINED_GLASS_ORANGE,block.STAINED_GLASS_YELLOW,
block.STAINED_GLASS_GREEN,block.STAINED_GLASS_BLUE,block.STAINED_GLASS_PURPLE)
def deleteCubes(x0,y0,z0,length,colorIndex=None):
nextColorIndex = colorIndex + 1 if colorIndex is not None else None
length /= 3
if length < 1:
return
for x in range(3):
for y in range(3):
for z in range(3):
posX = x0+x*length
posY = y0+y*length
posZ = z0+z*length
if (x == 1 and y == 1) or (x == 1 and z == 1) or (y == 1 and z == 1):
if colorIndex is not None:
mc.setBlocks(posX,posY,posZ,
posX+length-1,posY+length-1,posZ+length-1,RAINBOW[colorIndex])
else:
mc.setBlocks(posX,posY,posZ,
posX+length-1,posY+length-1,posZ+length-1,block.AIR)
else:
deleteCubes(posX,posY,posZ,length,nextColorIndex)
def slice(x0,y0,z0,length):
for x in range(0,length):
for y in range(0,length):
for z in range(0,length):
if x+y+z >= 1.5*length:
mc.setBlock(x0+x,y0+y,z0+z,block.AIR)
mc = Minecraft()
playerPos = mc.player.getPos()
if settings.isPE:
length = 3*3*3
else:
length = 3*3*3*3
if len(sys.argv) > 1:
length = 3**int(sys.argv[1])
colorIndex = None
if len(sys.argv) > 2:
colorIndex = 0 if 'c' in sys.argv[2] else None
mc.setBlocks(playerPos.x,playerPos.y,playerPos.z,
playerPos.x+length-1,playerPos.y+length-1,playerPos.z+length-1,block.WOOL_PURPLE)
deleteCubes(playerPos.x,playerPos.y,playerPos.z,length,colorIndex=colorIndex)
if len(sys.argv)>2 and 's' in sys.argv[2]:
mc.postToChat("Slicing")
slice(playerPos.x,playerPos.y,playerPos.z,length)
|
arpruss/raspberryjam-pe
|
p2/scripts3/mengersponge.py
|
Python
|
mit
| 2,297
|
from copy import deepcopy
from common.serializers.serialization import pool_state_serializer
from plenum.common.constants import TARGET_NYM, DATA, ALIAS, SERVICES
from plenum.common.ledger import Ledger
from plenum.server.pool_req_handler import PoolRequestHandler as PHandler
from indy_common.auth import Authoriser
from indy_common.constants import NODE
from indy_node.persistence.idr_cache import IdrCache
from state.state import State
class PoolRequestHandler(PHandler):
def __init__(self, ledger: Ledger, state: State,
domainState: State, idrCache: IdrCache):
super().__init__(ledger, state, domainState)
self.stateSerializer = pool_state_serializer
self.idrCache = idrCache
def isSteward(self, nym, isCommitted: bool=True):
return self.idrCache.hasSteward(nym, isCommitted)
def authErrorWhileUpdatingNode(self, request):
origin = request.identifier
isTrustee = self.idrCache.hasTrustee(origin, isCommitted=False)
if not isTrustee:
error = super().authErrorWhileUpdatingNode(request)
if error:
return error
origin = request.identifier
operation = request.operation
nodeNym = operation.get(TARGET_NYM)
data = operation.get(DATA, {})
error = self.dataErrorWhileValidatingUpdate(data, nodeNym)
if error:
return error
isStewardOfNode = self.isStewardOfNode(
origin, nodeNym, isCommitted=False)
actorRole = self.idrCache.getRole(origin, isCommitted=False)
nodeInfo = self.getNodeData(nodeNym, isCommitted=False)
data = deepcopy(data)
data.pop(ALIAS, None)
vals = []
msgs = []
for k in data:
oldVal = nodeInfo.get(k, None) if nodeInfo else None
newVal = data[k]
if k == SERVICES:
if not oldVal:
oldVal = []
if not newVal:
newVal = []
if oldVal != newVal:
r, msg = Authoriser.authorised(NODE, actorRole,
field=k,
oldVal=oldVal,
newVal=newVal,
isActorOwnerOfSubject=isStewardOfNode)
vals.append(r)
msgs.append(msg)
msg = None if all(vals) else '\n'.join(msgs)
return msg
|
spivachuk/sovrin-node
|
indy_node/server/pool_req_handler.py
|
Python
|
apache-2.0
| 2,510
|
#MenuTitle: HT LetterSpacer UI
#
# Letterspacer, an auto-spacing tool
# Copyright (C) 2009 - 2018, The Letterspacer Project Authors
#
# Version 1.1
import HT_LetterSpacer_script
try:
from importlib import reload
except:
pass
reload(HT_LetterSpacer_script)
HT_LetterSpacer_script.HTLetterspacerScript(ui=True)
|
huertatipografica/HTLetterspacer
|
HT_LetterSpacer_UI.py
|
Python
|
gpl-3.0
| 313
|
import math
class NaiveBayesClassifier(object):
def __init__(self, x, y):
self.classes = set(y)
self.class_count = len(set(y))
self.train = zip(x,y)
def classProb(self, clss, input, method='regular'):
try:
x_probab = []
clssProbability = len(filter(lambda u: u[1] == clss, self.train))/float(len(self.train))
if method == 'regular':
clss_instances = filter(lambda u: u[1] == clss, self.train)
for i in range(len(input)):
x_occurrence = len(filter(lambda u: u[0][i]== input[i], clss_instances))
x_probab.append(x_occurrence/len(clss_instances))
if method == 'gaussian':
x_mean, x_stdev = self._classMean(clss), self._classStd(clss)
for i in range(len(input)):
x, mean, stdev = input[i], x_mean[i], x_stdev[i]
if stdev == 0:
continue
exponent = math.exp(-(math.pow(x-mean,2)/float(2*math.pow(stdev,2))))
x_probab.append((1.0 / (math.sqrt(2*math.pi) * stdev)) * exponent)
return (reduce(lambda x,y: x*y, x_probab) * clssProbability)
except Exception as e:
print e
def _classMean(self, clss):
def mean(values):
return sum(values)/len(values)
clss_instances = filter(lambda u: u[1] == clss, self.train)
X = [u[0] for u in clss_instances]
x_mean = tuple([mean(attr) for attr in zip(*X)])
return x_mean
def _classStd(self, clss):
def stdev(values):
avg = sum(values)/len(values)
variance = sum([pow(x-avg,2) for x in values])/float(len(values)-1)
return math.sqrt(variance)
clss_instances = filter(lambda u: u[1] == clss, self.train)
X = [u[0] for u in clss_instances]
x_stdev = tuple([stdev(attr) for attr in zip(*X)])
return x_stdev
def predictClass(self, input, method='regular'):
probabilityMap = {}
for clss in self.classes:
probabilityMap[clss] = classProb(clss, input, method)
print 'Prediction:', max(probabilityMap, key=probabilityMap.get)
|
meet-vora/mlp-classifier
|
models/naiveScratch.py
|
Python
|
mit
| 1,876
|
# $Id: __init__.py,v 1.1.1.1 2005/10/29 18:20:48 provos Exp $
from dpkt import *
import ip, ah, aim, arp, asn1, cdp, dhcp, dns, dtp, esp, ethernet, gre, hsrp, \
http, icmp, icmp6, igmp, ip6, ipx, loopback, netbios, netflow, ospf, \
pcap, pim, rpc, smb, stp, stun, tcp, telnet, tftp, tns, udp, \
vrrp, yahoo
|
Banjong1990/honey
|
dpkt/dpkt/__init__.py
|
Python
|
gpl-2.0
| 330
|
import numpy as np
import requests
import unicornhat as hat
URL = 'https://api.tfl.gov.uk/Line/Mode/tube,overground,dlr/Status'
LINES = [
'bakerloo', #
'central', #
'circle', #
'district',
'dlr',
'hammersmith-city',
'jubilee', #
'metropolitan', #
'northern', #
'london-overground', #
'piccadilly', #
'victoria', #
'waterloo-city',
]
STATUSES = {'Good Service': 'GOOD',
'Minor Delays': 'OK'} # Otherwise status is 'BAD'
def update():
requests.packages.urllib3.disable_warnings()
resp = requests.get(URL).json()
statuses = {el['id']: el['lineStatuses'][0]['statusSeverityDescription']
for el in resp}
return {k: STATUSES.get(statuses[k], 'BAD') for k in statuses.keys()}
def reset():
hat.off()
def reset_for_coloring():
hat.brightness(0.1)
hat.set_pixel(x, y, r, g, b)
hat.show()
def main():
status = update()
met_status = status.pop('metropolitan')
if __name__ == '__main__':
main()
|
noelevans/sandpit
|
rpi/all_tube_status.py
|
Python
|
mit
| 1,045
|
import time
from java.lang import Thread, Runnable
from java.awt import Canvas, Dimension
from java.awt.event import KeyListener, KeyEvent, ComponentListener
from java.awt.image import MemoryImageSource
from synchronize import make_synchronized
import jgl.GL
import jgl.GLU
import jgl.GLUT
from Game import Game
import gl_draw
class JGLBabalCanvas(Canvas, Runnable, KeyListener, ComponentListener):
def __init__(self, w, h, map_file=None):
self.initSize = Dimension(w, h)
self.GL = jgl.GL()
self.GLU = jgl.GLU(self.GL)
self.GL.glXMakeCurrent(self, 0, 0)
self.game = Game(map_file)
self._resized = 0
self._image = None
self._image_source = None
self.addKeyListener(self)
self.addComponentListener(self)
gl_draw.init(self.GL, self.GLU, self.game)
def start(self):
self.game.reset_timer()
self.thread = Thread(self)
self.thread.start()
def stop(self):
self.thread = None
def preferredSize(self):
return self.initSize
def minimumSize(self):
return self.initSize
def update(self, g):
self.paint(g)
def paint(self, g):
#print 'START paint'
if self._image:
g.drawImage(self._image, 0, 0, self)
#print 'END paint'
paint = make_synchronized(paint)
def _draw(self):
#print 'START draw'
self.game.update()
gl_draw.draw()
if self._resized:
self._image_source = self.GL.glXGetImageSource()
self._image_source.setAnimated(1)
self._image = self.createImage(self._image_source)
self._resized = 0
self.repaint()
elif self._image_source:
self._image_source.newPixels()
#print 'END draw'
_draw = make_synchronized(_draw)
def run(self):
me = Thread.currentThread( );
me.setPriority(Thread.MIN_PRIORITY);
while self.thread == Thread.currentThread():
try:
Thread.sleep(1)
except InterruptedException:
return
self._draw()
def keyPressed(self, e):
code = e.getKeyCode()
if code == KeyEvent.VK_LEFT:
self.game.begin_left()
elif code == KeyEvent.VK_RIGHT:
self.game.begin_right()
elif code == KeyEvent.VK_UP:
self.game.begin_accel()
elif code == KeyEvent.VK_DOWN:
self.game.begin_decel()
elif code == KeyEvent.VK_SPACE:
self.game.begin_jump()
keyPressed = make_synchronized(keyPressed)
def keyReleased(self, e):
code = e.getKeyCode()
if code == KeyEvent.VK_LEFT:
self.game.end_left()
elif code == KeyEvent.VK_RIGHT:
self.game.end_right()
elif code == KeyEvent.VK_UP:
self.game.end_accel()
elif code == KeyEvent.VK_DOWN:
self.game.end_decel()
elif code == KeyEvent.VK_SPACE:
self.game.end_jump()
keyReleased = make_synchronized(keyReleased)
def keyTyped(self, e):
if e.getKeyChar() == 'p' or e.getKeyChar() == 'P':
self.game.toggle_pause()
keyTyped = make_synchronized(keyTyped)
def componentResized(self, e):
size = self.getSize()
gl_draw.reshape(size.width, size.height)
self._resized = 1
componentResized = make_synchronized(componentResized)
def componentMoved(self, e):
pass # required for ComponentListener interface
def componentHidden(self, e):
pass # required for ComponentListener interface
def componentShown(self, e):
pass # required for ComponentListener interface
|
borsboom/babal
|
src/JGLBabalCanvas.py
|
Python
|
gpl-2.0
| 3,751
|
# coding=utf8
#
# Copyright 2013 Dreamlab Onet.pl
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation;
# version 3.0.
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, visit
#
# http://www.gnu.org/licenses/lgpl.txt
#
import _pythonpath
from nose.tools import assert_equals
from nose.tools import assert_true
from rmock.core import RmockData
def test_rmockdata_calls():
rmock_data = RmockData()
rmock_data.register_call('func', (1, 2, 3), {'a': 1})
rmock_data.register_call('func', (3,), {'a': 1})
assert_equals(rmock_data.get_calls('func2'), [])
assert_equals(rmock_data.get_calls('func'),
[((1, 2, 3), {'a': 1}),
((3,), {'a': 1})])
assert_equals(rmock_data.get_calls(),
[('func', (1, 2, 3), {'a': 1}),
('func', (3,), {'a': 1})])
call = rmock_data.get_calls()[0]
assert_equals(call.funcname, 'func')
assert_equals(call.args, (1, 2, 3))
assert_equals(call.kwargs, {'a': 1})
def test_rmockdata_results():
rmock_data = RmockData()
rmock_data.set_result('func', 'func_result')
assert_equals(rmock_data.get_result('func'), 'func_result')
assert_equals(rmock_data.get_result('func', (20,), dict(a=30)), 'func_result')
rmock_data.set_result_with_params('func', 'func_result_spec', (20,), {'a': 30})
assert_equals(rmock_data.get_result('func', (20,), dict(a=30)), 'func_result_spec')
assert_equals(rmock_data.get_result('func22', (20,), dict(a=30)), None)
assert_equals(rmock_data.get_result('func22', (20,), dict(a=30)), None)
rmock_data.set_result('func3', 'generic')
rmock_data.set_result_with_params('func3', 'no args', (), {}, )
rmock_data.set_result_with_params('func3', 'one arg', (), {'a': 10}, )
assert_equals(rmock_data.get_result('func3'), 'no args')
assert_equals(rmock_data.get_result('func3', (), dict(a=10)), 'one arg')
assert_equals(rmock_data.get_result('func3', (), dict(a=10, b=20)), 'generic')
def test_rmockdata_result_replace():
rmock_data = RmockData()
rmock_data.set_result('func', 'func_result1')
assert_equals(rmock_data.get_result('func'), 'func_result1')
rmock_data.set_result('func', 'func_result2')
assert_equals(rmock_data.get_result('func'), 'func_result2')
assert_equals(len(rmock_data.get_all_results('func')), 1)
rmock_data.set_result_with_params('func2', 'func2_result1', (), {'a': 10})
assert_equals(rmock_data.get_result('func2', None, {'a': 10}), 'func2_result1')
rmock_data.set_result_with_params('func2', 'func2_result2', (), {'a': 10})
assert_equals(rmock_data.get_result('func2', None, {'a': 10}), 'func2_result2')
assert_equals(len(rmock_data.get_all_results('func2')), 1)
def test_rmockdata_calls_and_results():
rmock_data = RmockData()
rmock_data.set_result('myf', 123)
rmock_data.set_result_with_params('myf', 213, ('s',), {'z': 11})
assert_equals(rmock_data.register_call_and_get_result('myf', ('x',), {}),
123)
assert_equals(rmock_data.register_call_and_get_result('myf', ('s',), {'z': 11}),
213)
assert_equals(rmock_data.get_calls('myf'), [(('x',), {}),
(('s',), {'z': 11})])
def test_rmockdata_default_return_value():
rmock_data = RmockData()
assert_equals(rmock_data.get_result('func'), None)
rmock_data.set_default_result(123)
assert_equals(rmock_data.get_result('func'), 123)
rmock_data.set_default_result(None)
assert_equals(rmock_data.get_result('func'), None)
rmock_data.set_default_result(123)
rmock_data.set_result('func', 444)
assert_equals(rmock_data.get_result('func'), 444)
assert_equals(rmock_data.get_result('func2'), 123)
|
tikan/rmock
|
tests/unit_tests/test_rmock_data.py
|
Python
|
lgpl-3.0
| 4,347
|
from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.virtual_net_device', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator', u'ns3::AttributeConstructionList::CIterator')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator*', u'ns3::AttributeConstructionList::CIterator*')
typehandlers.add_type_alias(u'std::list< ns3::AttributeConstructionList::Item > const_iterator&', u'ns3::AttributeConstructionList::CIterator&')
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeChecker'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::AttributeValue'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::NixVector'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::Packet'])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor> [struct]
module.add_class('DefaultDeleter', import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor'])
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
module.add_class('Mac48Address', import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )', u'ns3::Mac48Address::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )*', u'ns3::Mac48Address::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Mac48Address )&', u'ns3::Mac48Address::TracedCallback&')
## mac48-address.h (module 'network'): ns3::Mac48Address [class]
root_module['ns3::Mac48Address'].implicitly_converts_to(root_module['ns3::Address'])
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
module.add_class('Mac8Address', import_from_module='ns.network')
## mac8-address.h (module 'network'): ns3::Mac8Address [class]
root_module['ns3::Mac8Address'].implicitly_converts_to(root_module['ns3::Address'])
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::ItemType [enumeration]
module.add_enum('ItemType', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::SupportLevel [enumeration]
module.add_enum('SupportLevel', ['SUPPORTED', 'DEPRECATED', 'OBSOLETE'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
typehandlers.add_type_alias(u'uint32_t', u'ns3::TypeId::hash_t')
typehandlers.add_type_alias(u'uint32_t*', u'ns3::TypeId::hash_t*')
typehandlers.add_type_alias(u'uint32_t&', u'ns3::TypeId::hash_t&')
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor [class]
module.add_class('EmptyAttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::AttributeAccessor'])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker [class]
module.add_class('EmptyAttributeChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker [class]
module.add_class('Mac48AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue [class]
module.add_class('Mac48AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
typehandlers.add_type_alias(u'void ( * ) ( )', u'ns3::NetDevice::LinkChangeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( )*', u'ns3::NetDevice::LinkChangeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( )&', u'ns3::NetDevice::LinkChangeTracedCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::ReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::ReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::ReceiveCallback&')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::NetDevice::PromiscReceiveCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::NetDevice::PromiscReceiveCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::NetDevice::PromiscReceiveCallback&')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::ProtocolHandler')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::ProtocolHandler*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::ProtocolHandler&')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::Node::DeviceAdditionListener')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::Node::DeviceAdditionListener*')
typehandlers.add_type_alias(u'ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::Node::DeviceAdditionListener&')
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )', u'ns3::Packet::TracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )*', u'ns3::Packet::TracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > )&', u'ns3::Packet::TracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )', u'ns3::Packet::AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )*', u'ns3::Packet::AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Address const & )&', u'ns3::Packet::AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )', u'ns3::Packet::TwoAddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )*', u'ns3::Packet::TwoAddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const > const, ns3::Address const &, ns3::Address const & )&', u'ns3::Packet::TwoAddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )', u'ns3::Packet::Mac48AddressTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )*', u'ns3::Packet::Mac48AddressTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, ns3::Mac48Address )&', u'ns3::Packet::Mac48AddressTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )', u'ns3::Packet::SizeTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )*', u'ns3::Packet::SizeTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( uint32_t, uint32_t )&', u'ns3::Packet::SizeTracedCallback&')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )', u'ns3::Packet::SinrTracedCallback')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )*', u'ns3::Packet::SinrTracedCallback*')
typehandlers.add_type_alias(u'void ( * ) ( ns3::Ptr< ns3::Packet const >, double )&', u'ns3::Packet::SinrTracedCallback&')
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## virtual-net-device.h (module 'virtual-net-device'): ns3::VirtualNetDevice [class]
module.add_class('VirtualNetDevice', parent=root_module['ns3::NetDevice'])
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::Address const &, ns3::Address const &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', u'ns3::VirtualNetDevice::SendCallback')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::Address const &, ns3::Address const &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >*', u'ns3::VirtualNetDevice::SendCallback*')
typehandlers.add_type_alias(u'ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::Address const &, ns3::Address const &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >&', u'ns3::VirtualNetDevice::SendCallback&')
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['bool', 'ns3::Ptr<ns3::Packet>', 'const ns3::Address &', 'const ns3::Address &', 'unsigned short', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['ns3::ObjectBase *', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<const ns3::Packet>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::Ptr<const ns3::Packet>', 'unsigned short', 'const ns3::Address &', 'const ns3::Address &', 'ns3::NetDevice::PacketType', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> [class]
module.add_class('CallbackImpl', import_from_module='ns.core', template_parameters=['void', 'ns3::Ptr<ns3::NetDevice>', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty', 'ns3::empty'], parent=root_module['ns3::CallbackImplBase'])
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias(u'uint32_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )', u'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )*', u'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias(u'uint64_t ( * ) ( char const *, std::size_t const )&', u'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeAccessor >'])
register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeChecker >'])
register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, root_module['ns3::DefaultDeleter< ns3::AttributeValue >'])
register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, root_module['ns3::DefaultDeleter< ns3::CallbackImplBase >'])
register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Hash::Implementation >'])
register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, root_module['ns3::DefaultDeleter< ns3::NixVector >'])
register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, root_module['ns3::DefaultDeleter< ns3::Packet >'])
register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::DefaultDeleter< ns3::TraceSourceAccessor >'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3Mac48Address_methods(root_module, root_module['ns3::Mac48Address'])
register_Ns3Mac8Address_methods(root_module, root_module['ns3::Mac8Address'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3EmptyAttributeAccessor_methods(root_module, root_module['ns3::EmptyAttributeAccessor'])
register_Ns3EmptyAttributeChecker_methods(root_module, root_module['ns3::EmptyAttributeChecker'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3Mac48AddressChecker_methods(root_module, root_module['ns3::Mac48AddressChecker'])
register_Ns3Mac48AddressValue_methods(root_module, root_module['ns3::Mac48AddressValue'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3VirtualNetDevice_methods(root_module, root_module['ns3::VirtualNetDevice'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Packet__gt___Const_ns3Address___amp___Const_ns3Address___amp___Unsigned_short_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, root_module['ns3::CallbackImpl< void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::CIterator ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'ns3::AttributeConstructionList::CIterator',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetRemainingSize() const [member function]
cls.add_method('GetRemainingSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::PeekU8() [member function]
cls.add_method('PeekU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(ns3::Buffer::Iterator start, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Adjust(int32_t adjustment) [member function]
cls.add_method('Adjust',
'void',
[param('int32_t', 'adjustment')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
return
def register_Ns3DefaultDeleter__Ns3AttributeAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeAccessor>::Delete(ns3::AttributeAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeAccessor *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeChecker_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeChecker>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeChecker> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeChecker > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeChecker>::Delete(ns3::AttributeChecker * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeChecker *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3AttributeValue_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::AttributeValue>::DefaultDeleter(ns3::DefaultDeleter<ns3::AttributeValue> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::AttributeValue > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::AttributeValue>::Delete(ns3::AttributeValue * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::AttributeValue *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3CallbackImplBase_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::CallbackImplBase>::DefaultDeleter(ns3::DefaultDeleter<ns3::CallbackImplBase> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::CallbackImplBase > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::CallbackImplBase>::Delete(ns3::CallbackImplBase * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::CallbackImplBase *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3HashImplementation_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Hash::Implementation>::DefaultDeleter(ns3::DefaultDeleter<ns3::Hash::Implementation> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Hash::Implementation > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Hash::Implementation>::Delete(ns3::Hash::Implementation * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Hash::Implementation *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3NixVector_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::NixVector>::DefaultDeleter(ns3::DefaultDeleter<ns3::NixVector> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::NixVector > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::NixVector>::Delete(ns3::NixVector * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::NixVector *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3Packet_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::Packet>::DefaultDeleter(ns3::DefaultDeleter<ns3::Packet> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::Packet > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::Packet>::Delete(ns3::Packet * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Packet *', 'object')],
is_static=True)
return
def register_Ns3DefaultDeleter__Ns3TraceSourceAccessor_methods(root_module, cls):
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter() [constructor]
cls.add_constructor([])
## default-deleter.h (module 'core'): ns3::DefaultDeleter<ns3::TraceSourceAccessor>::DefaultDeleter(ns3::DefaultDeleter<ns3::TraceSourceAccessor> const & arg0) [constructor]
cls.add_constructor([param('ns3::DefaultDeleter< ns3::TraceSourceAccessor > const &', 'arg0')])
## default-deleter.h (module 'core'): static void ns3::DefaultDeleter<ns3::TraceSourceAccessor>::Delete(ns3::TraceSourceAccessor * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::TraceSourceAccessor *', 'object')],
is_static=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
deprecated=True, is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() const [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac8Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac8Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac8Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3Mac48Address_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('<')
cls.add_output_stream_operator()
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(ns3::Mac48Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48Address::Mac48Address(char const * str) [constructor]
cls.add_constructor([param('char const *', 'str')])
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac48Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyFrom(uint8_t const * buffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'buffer')])
## mac48-address.h (module 'network'): void ns3::Mac48Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'buffer')],
is_const=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv4Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv4Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast(ns3::Ipv6Address address) [member function]
cls.add_method('GetMulticast',
'ns3::Mac48Address',
[param('ns3::Ipv6Address', 'address')],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticast6Prefix() [member function]
cls.add_method('GetMulticast6Prefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): static ns3::Mac48Address ns3::Mac48Address::GetMulticastPrefix() [member function]
cls.add_method('GetMulticastPrefix',
'ns3::Mac48Address',
[],
is_static=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): bool ns3::Mac48Address::IsGroup() const [member function]
cls.add_method('IsGroup',
'bool',
[],
is_const=True)
## mac48-address.h (module 'network'): static bool ns3::Mac48Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3Mac8Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(ns3::Mac8Address const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac8Address const &', 'arg0')])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address() [constructor]
cls.add_constructor([])
## mac8-address.h (module 'network'): ns3::Mac8Address::Mac8Address(uint8_t addr) [constructor]
cls.add_constructor([param('uint8_t', 'addr')])
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::Allocate() [member function]
cls.add_method('Allocate',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Mac8Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyFrom(uint8_t const * pBuffer) [member function]
cls.add_method('CopyFrom',
'void',
[param('uint8_t const *', 'pBuffer')])
## mac8-address.h (module 'network'): void ns3::Mac8Address::CopyTo(uint8_t * pBuffer) const [member function]
cls.add_method('CopyTo',
'void',
[param('uint8_t *', 'pBuffer')],
is_const=True)
## mac8-address.h (module 'network'): static ns3::Mac8Address ns3::Mac8Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Mac8Address',
[],
is_static=True)
## mac8-address.h (module 'network'): static bool ns3::Mac8Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::type [variable]
cls.add_instance_attribute('type', 'ns3::PacketMetadata::Item::ItemType', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 1 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t v) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t v) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<const ns3::AttributeAccessor> accessor, ns3::Ptr<const ns3::AttributeChecker> checker, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')],
deprecated=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<const ns3::TraceSourceAccessor> accessor, std::string callback, ns3::TypeId::SupportLevel supportLevel=::ns3::TypeId::SupportLevel::SUPPORTED, std::string const & supportMsg="") [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor'), param('std::string', 'callback'), param('ns3::TypeId::SupportLevel', 'supportLevel', default_value='::ns3::TypeId::SupportLevel::SUPPORTED'), param('std::string const &', 'supportMsg', default_value='""')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(std::size_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(std::size_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::hash_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'ns3::TypeId::hash_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint16_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint16_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint16_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint16_t',
[],
is_static=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetSize() const [member function]
cls.add_method('GetSize',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(std::size_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('std::size_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::size_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'std::size_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(ns3::TypeId::hash_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(ns3::TypeId::hash_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): ns3::Ptr<const ns3::TraceSourceAccessor> ns3::TypeId::LookupTraceSourceByName(std::string name, ns3::TypeId::TraceSourceInformation * info) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name'), param('ns3::TypeId::TraceSourceInformation *', 'info')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(std::size_t i, ns3::Ptr<const ns3::AttributeValue> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('std::size_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetSize(std::size_t size) [member function]
cls.add_method('SetSize',
'ns3::TypeId',
[param('std::size_t', 'size')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t uid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'uid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::callback [variable]
cls.add_instance_attribute('callback', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportLevel [variable]
cls.add_instance_attribute('supportLevel', 'ns3::TypeId::SupportLevel', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::supportMsg [variable]
cls.add_instance_attribute('supportMsg', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): bool ns3::Object::IsInitialized() const [member function]
cls.add_method('IsInitialized',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<const ns3::Object> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')],
is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): std::string ns3::CallbackImplBase::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<const ns3::CallbackImplBase> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::ObjectBase*'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'void'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::NetDevice> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet const> '])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'unsigned short'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Address const&'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::NetDevice::PacketType'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'bool'])
## callback.h (module 'core'): static std::string ns3::CallbackImplBase::GetCppTypeid() [member function]
cls.add_method('GetCppTypeid',
'std::string',
[],
is_static=True, visibility='protected', template_parameters=[u'ns3::Ptr<ns3::Packet> '])
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3EmptyAttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor(ns3::EmptyAttributeAccessor const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeAccessor::EmptyAttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object'), param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker(ns3::EmptyAttributeChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeChecker::EmptyAttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_const=True, is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3Mac48AddressChecker_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressChecker::Mac48AddressChecker(ns3::Mac48AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressChecker const &', 'arg0')])
return
def register_Ns3Mac48AddressValue_methods(root_module, cls):
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue() [constructor]
cls.add_constructor([])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48Address const & value) [constructor]
cls.add_constructor([param('ns3::Mac48Address const &', 'value')])
## mac48-address.h (module 'network'): ns3::Mac48AddressValue::Mac48AddressValue(ns3::Mac48AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::Mac48AddressValue const &', 'arg0')])
## mac48-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Mac48AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): bool ns3::Mac48AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## mac48-address.h (module 'network'): ns3::Mac48Address ns3::Mac48AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Mac48Address',
[],
is_const=True)
## mac48-address.h (module 'network'): std::string ns3::Mac48AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## mac48-address.h (module 'network'): void ns3::Mac48AddressValue::Set(ns3::Mac48Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Mac48Address const &', 'value')])
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::NetDevice::PromiscReceiveCallback cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::NetDevice::ReceiveCallback cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): ns3::Time ns3::Node::GetLocalTime() const [member function]
cls.add_method('GetLocalTime',
'ns3::Time',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Node::ProtocolHandler handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Node::DeviceAdditionListener listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Node::ProtocolHandler handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<const ns3::Packet> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header, uint32_t size) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header'), param('uint32_t', 'size')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> nixVector) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'nixVector')])
## packet.h (module 'network'): std::string ns3::Packet::ToString() const [member function]
cls.add_method('ToString',
'std::string',
[],
is_const=True)
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3VirtualNetDevice_methods(root_module, cls):
## virtual-net-device.h (module 'virtual-net-device'): ns3::VirtualNetDevice::VirtualNetDevice(ns3::VirtualNetDevice const & arg0) [constructor]
cls.add_constructor([param('ns3::VirtualNetDevice const &', 'arg0')])
## virtual-net-device.h (module 'virtual-net-device'): ns3::VirtualNetDevice::VirtualNetDevice() [constructor]
cls.add_constructor([])
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::AddLinkChangeCallback(ns3::Callback<void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Address ns3::VirtualNetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Address ns3::VirtualNetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Ptr<ns3::Channel> ns3::VirtualNetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): uint32_t ns3::VirtualNetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): uint16_t ns3::VirtualNetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Address ns3::VirtualNetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Address ns3::VirtualNetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): ns3::Ptr<ns3::Node> ns3::VirtualNetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): static ns3::TypeId ns3::VirtualNetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::Receive(ns3::Ptr<ns3::Packet> packet, uint16_t protocol, ns3::Address const & source, ns3::Address const & destination, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'destination'), param('ns3::NetDevice::PacketType', 'packetType')])
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetIsPointToPoint(bool isPointToPoint) [member function]
cls.add_method('SetIsPointToPoint',
'void',
[param('bool', 'isPointToPoint')])
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetNeedsArp(bool needsArp) [member function]
cls.add_method('SetNeedsArp',
'void',
[param('bool', 'needsArp')])
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetPromiscReceiveCallback(ns3::NetDevice::PromiscReceiveCallback cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetReceiveCallback(ns3::NetDevice::ReceiveCallback cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetSendCallback(ns3::VirtualNetDevice::SendCallback transmitCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Packet >, ns3::Address const &, ns3::Address const &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'transmitCb')])
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::SetSupportsSendFrom(bool supportsSendFrom) [member function]
cls.add_method('SetSupportsSendFrom',
'void',
[param('bool', 'supportsSendFrom')])
## virtual-net-device.h (module 'virtual-net-device'): bool ns3::VirtualNetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_const=True, is_virtual=True)
## virtual-net-device.h (module 'virtual-net-device'): void ns3::VirtualNetDevice::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<const ns3::AttributeChecker> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<const ns3::AttributeChecker> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3, ns3::Address const & arg4, ns3::NetDevice::PacketType arg5) [member operator]
cls.add_method('operator()',
'bool',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3'), param('ns3::Address const &', 'arg4'), param('ns3::NetDevice::PacketType', 'arg5')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImpl<bool, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3) [member operator]
cls.add_method('operator()',
'bool',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Bool_Ns3Ptr__lt__ns3Packet__gt___Const_ns3Address___amp___Const_ns3Address___amp___Unsigned_short_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< bool, ns3::Ptr< ns3::Packet >, ns3::Address const &, ns3::Address const &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackImpl<bool, ns3::Ptr<ns3::Packet>, const ns3::Address &, const ns3::Address &, unsigned short, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::Packet> arg0, ns3::Address const & arg1, ns3::Address const & arg2, short unsigned int arg3) [member operator]
cls.add_method('operator()',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'arg0'), param('ns3::Address const &', 'arg1'), param('ns3::Address const &', 'arg2'), param('short unsigned int', 'arg3')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Ns3ObjectBase___star___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): ns3::ObjectBase * ns3::CallbackImpl<ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()() [member operator]
cls.add_method('operator()',
'ns3::ObjectBase *',
[],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__const_ns3Packet__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::Packet const >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<const ns3::Packet>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<const ns3::Packet> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Ptr__lt__const_ns3Packet__gt___Unsigned_short_Const_ns3Address___amp___Const_ns3Address___amp___Ns3NetDevicePacketType_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<const ns3::Packet>, unsigned short, const ns3::Address &, const ns3::Address &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0, ns3::Ptr<const ns3::Packet> arg1, short unsigned int arg2, ns3::Address const & arg3, ns3::Address const & arg4, ns3::NetDevice::PacketType arg5) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0'), param('ns3::Ptr< ns3::Packet const >', 'arg1'), param('short unsigned int', 'arg2'), param('ns3::Address const &', 'arg3'), param('ns3::Address const &', 'arg4'), param('ns3::NetDevice::PacketType', 'arg5')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3CallbackImpl__Void_Ns3Ptr__lt__ns3NetDevice__gt___Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_Ns3Empty_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::CallbackImpl(ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> const & arg0) [constructor]
cls.add_constructor([param('ns3::CallbackImpl< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty > const &', 'arg0')])
## callback.h (module 'core'): static std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::DoGetTypeid() [member function]
cls.add_method('DoGetTypeid',
'std::string',
[],
is_static=True)
## callback.h (module 'core'): std::string ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::GetTypeid() const [member function]
cls.add_method('GetTypeid',
'std::string',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackImpl<void, ns3::Ptr<ns3::NetDevice>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>::operator()(ns3::Ptr<ns3::NetDevice> arg0) [member operator]
cls.add_method('operator()',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'arg0')],
is_pure_virtual=True, is_virtual=True, custom_name=u'__call__')
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, std::size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('std::size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.add_cpp_namespace('FatalImpl'), root_module)
register_functions_ns3_Hash(module.add_cpp_namespace('Hash'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.add_cpp_namespace('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
|
tomhenderson/ns-3-dev-git
|
src/virtual-net-device/bindings/modulegen__gcc_LP64.py
|
Python
|
gpl-2.0
| 265,434
|
from network import *
class QNetwork(Network):
def __init__(self, conf):
""" Set up remaining layers, loss function, gradient compute and apply
ops, network parameter synchronization ops, and summary ops. """
super(QNetwork, self).__init__(conf)
with tf.name_scope(self.name):
self.target_ph = tf.placeholder(
"float32", [None], name = 'target')
if self.arch == "NIPS":
#fc4
self.w4, self.b4, self.output_layer = self._fc('fc4', self.o3, self.num_actions, activation = "linear")
self.params = [self.w1, self.b1, self.w2, self.b2, self.w3, self.b3,
self.w4, self.b4]
else: #NATURE
#fc5
self.w5, self.b5, self.output_layer = self._fc('fc5', self.o4, self.num_actions, activation = "linear")
self.params = [self.w1, self.b1, self.w2, self.b2, self.w3, self.b3,
self.w4, self.b4, self.w5, self.b5]
# Loss
# Multiply the output of the network by a one hot vector 1 for the
# executed action. This will make the loss due to non-selected
# actions to be zero.
if "target" not in self.name:
output_selected_action = tf.reduce_sum(tf.mul(self.output_layer,
self.selected_action_ph), reduction_indices = 1)
diff = tf.sub(self.target_ph, output_selected_action)
# HUBER LOSS
# If we simply take the squared clipped diff as our loss,
# then the gradient will be zero whenever the diff exceeds
# the clip bounds. To avoid this, we extend the loss
# linearly past the clip point to keep the gradient constant
# in that regime.
#
# This is equivalent to declaring d loss/d q_vals to be
# equal to the clipped diff, then backpropagating from
# there, which is what the DeepMind implementation does.
if self.clip_loss_delta > 0:
quadratic_part = tf.minimum(tf.abs(diff),
tf.constant(self.clip_loss_delta))
linear_part = tf.sub(tf.abs(diff), quadratic_part)
#self.loss = tf.reduce_mean(0.5 * tf.square(quadratic_part) +
# self.clip_loss_delta * linear_part)
self.loss = tf.add(tf.nn.l2_loss(quadratic_part),
tf.mul(tf.constant(self.clip_loss_delta), linear_part))
else:
#self.loss = tf.reduce_mean(0.5 * tf.square(diff))
self.loss = tf.nn.l2_loss(diff)
# Operations to compute gradients
with tf.control_dependencies(None):
grads = tf.gradients(self.loss, self.params)
# This is not really an operation, but a list of gradient Tensors
# When calling run() on it, the value of those Tensors
# (i.e., of the gradients) will be calculated.
self.clipped_grad_hist_op = None
if self.clip_norm_type == 'ignore':
# Unclipped gradients
self.get_gradients = grads
#self.get_gradients = [g for g, _ in self.grads_and_vars]
elif self.clip_norm_type == 'global':
# Clip network grads by network norm
self.get_gradients = tf.clip_by_global_norm(
grads, self.clip_norm)[0]
elif self.clip_norm_type == 'local':
# Clip layer grads by layer norm
self.get_gradients = [tf.clip_by_norm(
g, self.clip_norm) for g in grads]
# Placeholders for shared memory vars
self.params_ph = []
for p in self.params:
self.params_ph.append(tf.placeholder(tf.float32,
shape=p.get_shape(),
name="shared_memory_for_{}".format(
(p.name.split("/", 1)[1]).replace(":", "_"))))
# Ops to sync net with shared memory vars
self.sync_with_shared_memory = []
for i in xrange(len(self.params)):
self.sync_with_shared_memory.append(
self.params[i].assign(self.params_ph[i]))
|
traai/async-deep-rl
|
algorithms/q_network.py
|
Python
|
apache-2.0
| 4,926
|
from app import db
from app.models import OrderProduct
from datetime import datetime
class Product(db.Model):
__tablename__ = 'products'
id = db.Column(db.Integer, primary_key=True)
timestamp = db.Column(db.DateTime, default=datetime.utcnow)
company_id = db.Column(db.Integer, db.ForeignKey('companies.id'))
name = db.Column(db.String)
description = db.Column(db.String)
price = db.Column(db.Float)
order_product = db.relationship('OrderProduct', foreign_keys=OrderProduct.product_id, backref='product',
cascade="save-update, merge, delete", lazy='dynamic')
def dictionary(self):
return {
"id": self.id,
"company_id": self.company_id,
"name": self.name,
"description": self.description,
"price": self.price
}
def __str__(self):
return self.dictionary().__str__()
|
luisfcofv/Superhero
|
app/models/product.py
|
Python
|
mit
| 927
|
# Copyright (C) 2018 Red Hat, Inc.,
# This file is part of the sos project: https://github.com/sosreport/sos
#
# This copyrighted material is made available to anyone wishing to use,
# modify, copy, or redistribute it subject to the terms and conditions of
# version 2 of the GNU General Public License.
#
# See the LICENSE file in the source distribution for further information.
from sos.plugins import Plugin, RedHatPlugin
class OvirtNode(Plugin, RedHatPlugin):
"""oVirt Node specific information"""
packages = (
'imgbased',
'ovirt-node-ng-nodectl',
)
plugin_name = 'ovirt_node'
profiles = ('virt',)
def setup(self):
# Add log files
self.add_copy_spec([
'/var/log/imgbased.log',
# Required for node versions < 4.2
'/tmp/imgbased.log',
])
# Collect runtime info
self.add_cmd_output([
'imgbase layout',
'nodectl --machine-readable check',
'nodectl info',
])
# vim: expandtab tabstop=4 shiftwidth=4
|
nijinashok/sos
|
sos/plugins/ovirt_node.py
|
Python
|
gpl-2.0
| 1,069
|
# Copyright 2017 NeuStar, Inc.All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
from .blacklist_id import BlacklistId
from .sponsor import Sponsor
from .account import Account
class Blacklists:
def __init__(self, connection, base_uri):
self.connection = connection
self.base_uri = base_uri+"/blacklists"
def get(self):
"""Get a list of blacklists."""
return self.connection.get(self.base_uri)
def post(self, account_id, name, **kwargs):
"""Create a new Blacklist.
Arguments:
account_id -- The account ID associated with the blacklist.
name -- The name of the blacklist.
Keyword Arguments:
sponsorId -- The sponsor ID associated with the blacklist (only required for NeustarAdmins)
description -- A description of the blacklist.
"""
properties = {"accountId": account_id, "name": name}
if kwargs is not None:
properties.update(kwargs)
return self.connection.post(self.base_uri, json.dumps(properties))
def blacklist_id(self, blacklist_id):
"""Create a Blacklist Id object."""
return BlacklistId(self.connection, self.base_uri, blacklist_id)
def sponsor(self):
"""Create a Sponsor object."""
return Sponsor(self.connection, self.base_uri)
def account(self):
"""Create an Account object."""
return Account(self.connection, self.base_uri)
|
sbarbett/ssp-sdk-python
|
src/blacklists.py
|
Python
|
apache-2.0
| 1,822
|
#!/usr/bin/python
# Copyright (c) 2014 Wladmir J. van der Laan
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
'''
Script to generate list of seed nodes for chainparams.cpp.
This script expects two text files in the directory that is passed as an
argument:
nodes_main.txt
nodes_test.txt
These files must consist of lines in the format
<ip>
<ip>:<port>
[<ipv6>]
[<ipv6>]:<port>
<onion>.onion
0xDDBBCCAA (IPv4 little-endian old pnSeeds format)
The output will be two data structures with the peers in binary format:
static SeedSpec6 pnSeed6_main[]={
...
}
static SeedSpec6 pnSeed6_test[]={
...
}
These should be pasted into `src/chainparamsseeds.h`.
'''
from __future__ import print_function, division
from base64 import b32decode
from binascii import a2b_hex
import sys, os
import re
# ipv4 in ipv6 prefix
pchIPv4 = bytearray([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0xff, 0xff])
# tor-specific ipv6 prefix
pchOnionCat = bytearray([0xFD,0x87,0xD8,0x7E,0xEB,0x43])
def name_to_ipv6(addr):
if len(addr)>6 and addr.endswith('.onion'):
vchAddr = b32decode(addr[0:-6], True)
if len(vchAddr) != 16-len(pchOnionCat):
raise ValueError('Invalid onion %s' % s)
return pchOnionCat + vchAddr
elif '.' in addr: # IPv4
return pchIPv4 + bytearray((int(x) for x in addr.split('.')))
elif ':' in addr: # IPv6
sub = [[], []] # prefix, suffix
x = 0
addr = addr.split(':')
for i,comp in enumerate(addr):
if comp == '':
if i == 0 or i == (len(addr)-1): # skip empty component at beginning or end
continue
x += 1 # :: skips to suffix
assert(x < 2)
else: # two bytes per component
val = int(comp, 16)
sub[x].append(val >> 8)
sub[x].append(val & 0xff)
nullbytes = 16 - len(sub[0]) - len(sub[1])
assert((x == 0 and nullbytes == 0) or (x == 1 and nullbytes > 0))
return bytearray(sub[0] + ([0] * nullbytes) + sub[1])
elif addr.startswith('0x'): # IPv4-in-little-endian
return pchIPv4 + bytearray(reversed(a2b_hex(addr[2:])))
else:
raise ValueError('Could not parse address %s' % addr)
def parse_spec(s, defaultport):
match = re.match('\[([0-9a-fA-F:]+)\](?::([0-9]+))?$', s)
if match: # ipv6
host = match.group(1)
port = match.group(2)
elif s.count(':') > 1: # ipv6, no port
host = s
port = ''
else:
(host,_,port) = s.partition(':')
if not port:
port = defaultport
else:
port = int(port)
host = name_to_ipv6(host)
return (host,port)
def process_nodes(g, f, structname, defaultport):
g.write('static SeedSpec6 %s[] = {\n' % structname)
first = True
for line in f:
comment = line.find('#')
if comment != -1:
line = line[0:comment]
line = line.strip()
if not line:
continue
if not first:
g.write(',\n')
first = False
(host,port) = parse_spec(line, defaultport)
hoststr = ','.join(('0x%02x' % b) for b in host)
g.write(' {{%s}, %i}' % (hoststr, port))
g.write('\n};\n')
def main():
if len(sys.argv)<2:
print(('Usage: %s <path_to_nodes_txt>' % sys.argv[0]), file=sys.stderr)
exit(1)
g = sys.stdout
indir = sys.argv[1]
g.write('#ifndef BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('#define BITCOIN_CHAINPARAMSSEEDS_H\n')
g.write('/**\n')
g.write(' * List of fixed seed nodes for the bitcoin network\n')
g.write(' * AUTOGENERATED by contrib/seeds/generate-seeds.py\n')
g.write(' *\n')
g.write(' * Each line contains a 16-byte IPv6 address and a port.\n')
g.write(' * IPv4 as well as onion addresses are wrapped inside a IPv6 address accordingly.\n')
g.write(' */\n')
with open(os.path.join(indir,'nodes_main.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_main', 15714)
g.write('\n')
with open(os.path.join(indir,'nodes_test.txt'),'r') as f:
process_nodes(g, f, 'pnSeed6_test', 25714)
g.write('#endif // BITCOIN_CHAINPARAMSSEEDS_H\n')
if __name__ == '__main__':
main()
|
vericoin/vericoin-core
|
contrib/seeds/generate-seeds.py
|
Python
|
mit
| 4,378
|
import json
import argparse
import csv
from datetime import datetime, timedelta
from itertools import izip_longest, izip
parser = argparse.ArgumentParser(description="Run a script to clean up json files for RAW")#Setting up our Argument Parser
parser.add_argument('-i', '--inputFile', help="input .json file", required="true", type=str)#Adding an input argument
parser.add_argument('-o', '--outputFile', help="output json file / path name", required="true", type=str )
args = vars(parser.parse_args())#Get some variables from parse_args dictionary (directory, output)
jsonInput = args['inputFile']
csvOutput = args['outputFile']
with open(jsonInput, 'rb') as jsonFile, open(csvOutput, 'w') as outFile:
csvWriter = csv.writer(outFile)
jsonData = json.load(jsonFile)
headerRow = ['Pickup No.','Pickup Date and Time', 'Pickup Length In Seconds', 'Session No.','Session Date and Time', 'Session Length In Seconds']
csvWriter.writerow(headerRow)
days = jsonData['days']
pickupIterator = 0
sessionIterator = 0
for x in days:
for i,z in izip_longest(x['pickups'], x['sessions'],fillvalue=None):
if (i == None):
pickupDate = ' '
pickupLength = ' '
else:
pickupDate = i['date']
pickupLength = i['lengthInSeconds']
pickupIterator += 1
if (z == None):
sessionDate = ' '
sessionLength = ' '
else:
sessionDate = z['date']
sessionLength = z['lengthInMinutes']
lengthInt = int(sessionLength)
lengthSec = lengthInt * 60
sessionIterator += 1
formatting = pickupIterator, pickupDate, pickupLength, sessionIterator, sessionDate, lengthSec
csvWriter.writerow(formatting)
|
rochester-rcl/rcl-utils
|
pythonScripts/jsonParse.py
|
Python
|
gpl-2.0
| 1,863
|
# vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2015-2018 Alexander Cogneau (acogneau) <alexander.cogneau@gmail.com>:
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
from PyQt5.QtNetwork import QNetworkCookie
from PyQt5.QtCore import QUrl
import pytest
from qutebrowser.browser.webkit import cookies
from qutebrowser.utils import usertypes
from qutebrowser.misc import lineparser, objects
pytestmark = pytest.mark.usefixtures('data_tmpdir')
COOKIE1 = b'foo1=bar; expires=Tue, 01-Jan-2036 08:00:01 GMT'
COOKIE2 = b'foo2=bar; expires=Tue, 01-Jan-2036 08:00:01 GMT'
SESSION_COOKIE = b'foo3=bar'
EXPIRED_COOKIE = b'foo4=bar; expires=Sat, 01-Jan-2000 08:00:01 GMT'
class LineparserSaveStub(lineparser.BaseLineParser):
"""A stub for LineParser's save().
Attributes:
data: The data before the write
saved: The .data before save()
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.saved = []
self.data = []
def save(self):
self.saved = self.data
def clear(self):
pass
def __iter__(self):
return iter(self.data)
def __getitem__(self, key):
return self.data[key]
def test_set_cookies_accept(config_stub, qtbot, monkeypatch):
"""Test setCookiesFromUrl with cookies enabled."""
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebKit)
config_stub.val.content.cookies.accept = 'all'
ram_jar = cookies.RAMCookieJar()
cookie = QNetworkCookie(b'foo', b'bar')
url = QUrl('http://example.com/')
with qtbot.waitSignal(ram_jar.changed):
assert ram_jar.setCookiesFromUrl([cookie], url)
# assert the cookies are added correctly
all_cookies = ram_jar.cookiesForUrl(url)
assert len(all_cookies) == 1
saved_cookie = all_cookies[0]
expected = cookie.name(), cookie.value()
assert saved_cookie.name(), saved_cookie.value() == expected
def test_set_cookies_never_accept(qtbot, config_stub, monkeypatch):
"""Test setCookiesFromUrl when cookies are not accepted."""
monkeypatch.setattr(objects, 'backend', usertypes.Backend.QtWebKit)
config_stub.val.content.cookies.accept = 'never'
ram_jar = cookies.RAMCookieJar()
url = QUrl('http://example.com/')
with qtbot.assertNotEmitted(ram_jar.changed):
assert not ram_jar.setCookiesFromUrl('test', url)
assert not ram_jar.cookiesForUrl(url)
def test_cookie_jar_init(config_stub, fake_save_manager):
"""Test the CookieJar constructor."""
line_parser_stub = [COOKIE1, COOKIE2]
jar = cookies.CookieJar(line_parser=line_parser_stub)
assert fake_save_manager.add_saveable.called
# Test that cookies are added to the jar
assert len(jar.allCookies()) == 2
raw_cookies = [c.toRawForm().data() for c in jar.allCookies()]
assert raw_cookies == [COOKIE1, COOKIE2]
def test_purge_old_cookies(config_stub, fake_save_manager):
"""Test that expired cookies are deleted."""
line_parser_stub = [COOKIE1, COOKIE2, SESSION_COOKIE, EXPIRED_COOKIE]
jar = cookies.CookieJar(line_parser=line_parser_stub)
assert len(jar.allCookies()) == 4
jar.purge_old_cookies()
# Test that old cookies are gone
raw_cookies = [cookie.toRawForm().data() for cookie in jar.allCookies()]
assert raw_cookies == [COOKIE1, COOKIE2, SESSION_COOKIE]
def test_save(config_stub, fake_save_manager, monkeypatch, qapp):
"""Test that expired and session cookies are not saved."""
monkeypatch.setattr(lineparser, 'LineParser', LineparserSaveStub)
jar = cookies.CookieJar()
jar._lineparser.data = [COOKIE1, COOKIE2, SESSION_COOKIE, EXPIRED_COOKIE]
# Update the cookies on the jar itself
jar.parse_cookies()
jar.save()
saved_cookies = [cookie.data() for cookie in jar._lineparser.saved]
assert saved_cookies == [COOKIE1, COOKIE2]
def test_cookies_changed_emit(config_stub, fake_save_manager,
monkeypatch, qtbot):
"""Test that self.changed is emitted."""
monkeypatch.setattr(lineparser, 'LineParser', LineparserSaveStub)
jar = cookies.CookieJar()
with qtbot.waitSignal(jar.changed):
config_stub.val.content.cookies.store = False
@pytest.mark.parametrize('store_cookies,empty', [(True, False), (False, True)])
def test_cookies_changed(config_stub, fake_save_manager, monkeypatch, qtbot,
store_cookies, empty):
"""Test that cookies are saved correctly."""
monkeypatch.setattr(lineparser, 'LineParser', LineparserSaveStub)
jar = cookies.CookieJar()
jar._lineparser.data = [COOKIE1, COOKIE2]
jar.parse_cookies()
config_stub.val.content.cookies.store = store_cookies
if empty:
assert not jar._lineparser.data
assert not jar._lineparser.saved
else:
assert jar._lineparser.data
|
toofar/qutebrowser
|
tests/unit/browser/webkit/test_cookies.py
|
Python
|
gpl-3.0
| 5,483
|
# stdlib imports
import os
import pickle
from operator import itemgetter
import types
import shutil
from cStringIO import StringIO
# numpy imports
import numpy as np
# scikit-learn imports
import sklearn
from sklearn.cross_validation import StratifiedKFold
from sklearn.metrics import roc_auc_score
from sklearn.ensemble import AdaBoostClassifier
from sklearn.tree import DecisionTreeClassifier, export_graphviz
# rootpy imports
from rootpy.extern.tabulartext import PrettyTable
# root_numpy imports
from root_numpy import rec2array, fill_hist
# local imports
from . import log; log = log[__name__]
from . import MMC_MASS, MMC_PT
from .plotting import plot_grid_scores
from . import variables, CACHE_DIR, BDT_DIR
from .systematics import systematic_name
from .grid_search import BoostGridSearchCV
def print_feature_ranking(clf, fields):
importances = clf.feature_importances_
indices = np.argsort(importances)[::-1]
log.info("Feature ranking:")
out = StringIO()
print >> out
print >> out
print >> out, r"\begin{tabular}{c|c|c}"
table = PrettyTable(["Rank", "Variable", "Importance"])
print >> out, r"\hline\hline"
print >> out, r"Rank & Variable & Importance\\"
for f, idx in enumerate(indices):
table.add_row([f + 1,
fields[idx],
'%.3f' % importances[idx]])
print >> out, r"%d & %s & %.3f\\" % (f + 1,
variables.VARIABLES[fields[idx]]['title'],
importances[idx])
print >> out, r"\end{tabular}"
print >> out
print >> out, table.get_string(hrules=1)
log.info(out.getvalue())
def histogram_scores(hist_template, scores,
min_score=None, max_score=None,
inplace=False):
if not inplace:
hist = hist_template.Clone(name=hist_template.name + "_scores")
hist.Reset()
else:
hist = hist_template
if min_score is not None:
log.info("cutting out scores below %f" % min_score)
if max_score is not None:
log.info("cutting out scores above %f" % max_score)
if isinstance(scores, np.ndarray):
if min_score is not None:
scores = scores[scores > min_score]
if max_score is not None:
scores = scores[scores < max_score]
fill_hist(hist, scores)
elif isinstance(scores, tuple):
# data
scores, weight = scores
if min_score is not None:
scores_idx = scores > min_score
scores = scores[scores_idx]
weight = weight[scores_idx]
if max_score is not None:
scores_idx = scores < max_score
scores = scores[scores_idx]
weight = weight[scores_idx]
assert (weight == 1).all()
fill_hist(hist, scores)
elif isinstance(scores, dict):
# non-data with possible systematics
# nominal case:
nom_scores, nom_weight = scores['NOMINAL']
if min_score is not None:
scores_idx = nom_scores > min_score
nom_scores = nom_scores[scores_idx]
nom_weight = nom_weight[scores_idx]
if max_score is not None:
scores_idx = nom_scores < max_score
nom_scores = nom_scores[scores_idx]
nom_weight = nom_weight[scores_idx]
fill_hist(hist, nom_scores, nom_weight)
# systematics
sys_hists = {}
for sys_term, (sys_scores, sys_weight) in scores.items():
if sys_term == 'NOMINAL':
continue
if min_score is not None:
scores_idx = sys_scores > min_score
sys_scores = sys_scores[scores_idx]
sys_weight = sys_weight[scores_idx]
if max_score is not None:
scores_idx = sys_scores < max_score
sys_scores = sys_scores[scores_idx]
sys_weight = sys_weight[scores_idx]
sys_hist = hist.Clone(
name=hist.name + "_" + systematic_name(sys_term))
sys_hist.Reset()
fill_hist(sys_hist, sys_scores, sys_weight)
sys_hists[sys_term] = sys_hist
hist.systematics = sys_hists
else:
raise TypeError("scores not an np.array, tuple or dict")
return hist
def write_score_hists(f, mass, scores_list, hist_template, no_neg_bins=True):
sys_hists = {}
for samp, scores_dict in scores_list:
for sys_term, (scores, weights) in scores_dict.items():
if sys_term == 'NOMINAL':
suffix = ''
else:
suffix = '_' + '_'.join(sys_term)
hist = hist_template.Clone(
name=samp.name + ('_{0}'.format(mass)) + suffix)
fill_hist(hist, scores, weights)
if sys_term not in sys_hists:
sys_hists[sys_term] = []
sys_hists[sys_term].append(hist)
f.cd()
for sys_term, hists in sys_hists.items():
bad_bins = []
if no_neg_bins:
# check for negative bins over all systematics and zero them out
# negative bins cause lots of problem in the limit setting
# negative bin contents effectively means
# the same as "no events here..."
total_hist = sum(hists)
for bin, content in enumerate(total_hist):
if content < 0:
log.warning("Found negative bin %d (%f) for "
"systematic %s" % (
bin, content, sys_term))
bad_bins.append(bin)
for hist in hists:
for bin in bad_bins:
# zero out bad bins
hist[bin] = 0.
hist.Write()
def make_dataset(signals, backgrounds,
category, region, fields,
cuts=None):
signal_arrs = []
signal_weight_arrs = []
background_arrs = []
background_weight_arrs = []
for signal in signals:
rec = signal.merged_records(
category=category,
region=region,
fields=fields,
cuts=cuts)
signal_weight_arrs.append(rec['weight'])
signal_arrs.append(rec2array(rec, fields))
for background in backgrounds:
rec = background.merged_records(
category=category,
region=region,
fields=fields,
cuts=cuts)
background_weight_arrs.append(rec['weight'])
background_arrs.append(rec2array(rec, fields))
signal_array = np.concatenate(signal_arrs)
signal_weight_array = np.concatenate(signal_weight_arrs)
background_array = np.concatenate(background_arrs)
background_weight_array = np.concatenate(background_weight_arrs)
return (signal_array, signal_weight_array,
background_array, background_weight_array)
def make_partitioned_dataset(signals, backgrounds,
category, region, fields,
partition_key,
cuts=None):
signal_arrs = []
signal_weight_arrs = []
background_arrs = []
background_weight_arrs = []
for signal in signals:
left, right = signal.partitioned_records(
category=category,
region=region,
fields=fields,
cuts=cuts,
key=partition_key)
signal_weight_arrs.append(
(left['weight'], right['weight']))
signal_arrs.append(
(rec2array(left, fields),
rec2array(right, fields)))
for background in backgrounds:
left, right = background.partitioned_records(
category=category,
region=region,
fields=fields,
cuts=cuts,
key=partition_key)
background_weight_arrs.append(
(left['weight'], right['weight']))
background_arrs.append(
(rec2array(left, fields),
rec2array(right, fields)))
return (signal_arrs, signal_weight_arrs,
background_arrs, background_weight_arrs)
def get_partition(s, sw, b, bw, partition_idx):
# select partition and merge arrays
s = np.concatenate(map(itemgetter(partition_idx), s))
sw = np.concatenate(map(itemgetter(partition_idx), sw))
b = np.concatenate(map(itemgetter(partition_idx), b))
bw = np.concatenate(map(itemgetter(partition_idx), bw))
return s, sw, b, bw
def prepare_dataset(signal_train, signal_weight_train,
background_train, background_weight_train,
max_sig=None,
max_bkg=None,
norm_sig_to_bkg=True,
same_size_sig_bkg=True,
remove_negative_weights=False):
if remove_negative_weights:
# remove samples from the training sample with a negative weight
signal_train = signal_train[signal_weight_train >= 0]
background_train = background_train[background_weight_train >= 0]
signal_weight_train = signal_weight_train[signal_weight_train >= 0]
background_weight_train = background_weight_train[background_weight_train >= 0]
log.info("removing events with negative weights")
if max_sig is not None and max_sig < len(signal_train):
subsample = np.random.permutation(len(signal_train))[:max_sig_train]
signal_train = signal_train[subsample]
signal_weight_train = signal_weight_train[subsample]
log.info("signal stats reduced to user-specified maximum")
if max_bkg is not None and max_bkg < len(background_train):
subsample = np.random.permutation(len(background_train))[:max_bkg_train]
background_train = background_train[subsample]
background_weight_train = background_weight_train[subsample]
log.info("background stats reduced to user-specified maximum")
if same_size_sig_bkg:
if len(background_train) > len(signal_train):
# random subsample of background so it's the same size as signal
subsample = np.random.permutation(
len(background_train))[:len(signal_train)]
background_train = background_train[subsample]
background_weight_train = background_weight_train[subsample]
log.info("number of background events reduced "
"to match number of signal events")
elif len(background_train) < len(signal_train):
# random subsample of signal so it's the same size as background
subsample = np.random.permutation(
len(signal_train))[:len(background_train)]
signal_train = signal_train[subsample]
signal_weight_train = signal_weight_train[subsample]
log.info("number of signal events reduced "
"to match number of background events")
if norm_sig_to_bkg:
# normalize signal to background
signal_weight_train *= (
background_weight_train.sum() / signal_weight_train.sum())
log.info("normalizing signal to match background")
log.info("training Samples:")
log.info("signal: %d events, %s features" % signal_train.shape)
log.info("sum(signal weights): %f" % signal_weight_train.sum())
log.info("background: %d events, %s features" % background_train.shape)
log.info("sum(background weights): %f" % background_weight_train.sum())
log.info("total: %d events" % (
signal_train.shape[0] +
background_train.shape[0]))
sample_train = np.concatenate((background_train, signal_train))
sample_weight_train = np.concatenate(
(background_weight_train, signal_weight_train))
labels_train = np.concatenate(
(np.zeros(len(background_train)), np.ones(len(signal_train))))
# random permutation of training sample
perm = np.random.permutation(len(labels_train))
sample_train = sample_train[perm]
sample_weight_train = sample_weight_train[perm]
labels_train = labels_train[perm]
return sample_train, labels_train, sample_weight_train
class Classifier(object):
# minimal list of spectators
SPECTATORS = [
MMC_PT,
MMC_MASS,
]
def __init__(self,
mass,
fields,
category,
region,
cuts=None,
spectators=None,
output_suffix="",
clf_output_suffix="",
partition_key='EventNumber',
transform=True,
mmc=True):
fields = fields[:]
if not mmc:
try:
fields.remove(MMC_MASS)
except ValueError:
pass
self.mass = mass
self.fields = fields
self.category = category
self.region = region
self.spectators = spectators
self.output_suffix = output_suffix
self.clf_output_suffix = clf_output_suffix
self.partition_key = partition_key
self.transform = transform
self.mmc = mmc
self.background_label = 0
self.signal_label = 1
if spectators is None:
spectators = []
# merge in minimal list of spectators
for spec in Classifier.SPECTATORS:
if spec not in spectators and spec not in fields:
spectators.append(spec)
self.all_fields = fields + spectators
assert 'weight' not in fields
# classifiers for the left and right partitions
# each trained on the opposite partition
self.clfs = None
def binning(self, year, overflow=None):
# get the binning (see the optimize-binning script)
with open(os.path.join(CACHE_DIR, 'binning/binning_{0}_{1}_{2}.pickle'.format(
self.category.name, self.mass, year % 1000))) as f:
binning = pickle.load(f)
if overflow is not None:
binning[0] -= overflow
binning[-1] += overflow
return binning
def load(self, swap=False):
"""
If swap is True then use the internal classifiers on the "wrong"
partitions. This is used when demonstrating stability in data. The
shape of the data distribution should be the same for both classifiers.
"""
use_cache = True
# attempt to load existing classifiers
clfs = [None, None]
for partition_idx in range(2):
category_name = self.category.get_parent().name
clf_filename = os.path.join(BDT_DIR,
'clf_{0}_{1}{2}_{3}.pickle'.format(
category_name, self.mass,
self.clf_output_suffix, partition_idx))
print clf_filename
log.info("attempting to open %s ..." % clf_filename)
if os.path.isfile(clf_filename):
# use a previously trained classifier
log.info("found existing classifier in %s" % clf_filename)
with open(clf_filename, 'r') as f:
clf = pickle.load(f)
out = StringIO()
print >> out
print >> out
print >> out, clf
log.info(out.getvalue())
print_feature_ranking(clf, self.fields)
if swap:
# DANGER
log.warning("will apply classifiers on swapped partitions")
clfs[partition_idx] = clf
else:
clfs[(partition_idx + 1) % 2] = clf
else:
log.warning("could not open %s" % clf_filename)
use_cache = False
break
if use_cache:
self.clfs = clfs
log.info("using previously trained classifiers")
return True
else:
log.warning(
"unable to load previously trained "
"classifiers; train new ones")
return False
def train(self,
signals,
backgrounds,
cuts=None,
max_sig=None,
max_bkg=None,
norm_sig_to_bkg=True,
same_size_sig_bkg=False,
remove_negative_weights=False,
max_trees=200,
min_trees=1,
learning_rate=0.1,
max_fraction=0.3,
min_fraction=0.001,
min_fraction_steps=200,
cv_nfold=10,
n_jobs=-1,
dry_run=False):
"""
Determine best BDTs on left and right partitions. Each BDT will then be
used on the other partition.
"""
signal_arrs, signal_weight_arrs, \
background_arrs, background_weight_arrs = make_partitioned_dataset(
signals, backgrounds,
category=self.category,
region=self.region,
fields=self.fields,
cuts=cuts,
partition_key=self.partition_key)
if not dry_run:
self.clfs = [None, None]
for partition_idx in range(2):
clf_filename = os.path.join(BDT_DIR,
'clf_{0}_{1}{2}_{3}'.format(
self.category.name, self.mass,
self.clf_output_suffix, partition_idx))
signal_train, signal_weight_train, \
background_train, background_weight_train = get_partition(
signal_arrs, signal_weight_arrs,
background_arrs, background_weight_arrs,
partition_idx)
sample_train, labels_train, sample_weight_train = prepare_dataset(
signal_train, signal_weight_train,
background_train, background_weight_train,
max_sig=max_sig,
max_bkg=max_bkg,
norm_sig_to_bkg=norm_sig_to_bkg,
same_size_sig_bkg=same_size_sig_bkg,
remove_negative_weights=remove_negative_weights)
if dry_run:
return
log.info("training a new classifier...")
if partition_idx == 0:
# grid search params
# min_samples_leaf
#min_leaf_high = int((sample_train.shape[0] / 8) *
# (cv_nfold - 1.) / cv_nfold)
#min_leaf_low = max(10, int(min_leaf_high / 100.))
#min_leaf_step = max((min_leaf_high - min_leaf_low) / 100, 1)
#min_samples_leaf = range(
# min_leaf_low, min_leaf_high, min_leaf_step)
# min_fraction_leaf
min_fraction_leaf = np.linspace(
min_fraction, max_fraction, min_fraction_steps)
grid_params = {
#'base_estimator__min_samples_leaf': min_samples_leaf,
'base_estimator__min_fraction_leaf': min_fraction_leaf,
}
# create a BDT
clf = AdaBoostClassifier(
DecisionTreeClassifier(),
learning_rate=learning_rate,
algorithm='SAMME.R',
random_state=0)
# more efficient grid-search for boosting
grid_clf = BoostGridSearchCV(
clf, grid_params,
max_n_estimators=max_trees,
min_n_estimators=min_trees,
#score_func=accuracy_score,
score_func=roc_auc_score, # area under the ROC curve
cv=StratifiedKFold(labels_train, cv_nfold),
n_jobs=n_jobs)
#grid_clf = GridSearchCV(
# clf, grid_params,
# score_func=accuracy_score,
# cv = StratifiedKFold(labels_train, cv_nfold),
# n_jobs=n_jobs)
log.info("")
log.info("using a %d-fold cross validation" % cv_nfold)
log.info("performing a grid search over these parameter values:")
for param, values in grid_params.items():
log.info('{0} {1}'.format(param.split('__')[-1], values))
log.info("Minimum number of trees: %d" % min_trees)
log.info("Maximum number of trees: %d" % max_trees)
log.info("")
log.info("training new classifiers ...")
# perform the cross-validated grid-search
grid_clf.fit(
sample_train, labels_train,
sample_weight=sample_weight_train)
clf = grid_clf.best_estimator_
grid_scores = grid_clf.grid_scores_
log.info("Best score: %f" % grid_clf.best_score_)
log.info("Best Parameters:")
log.info(grid_clf.best_params_)
# plot a grid of the scores
plot_grid_scores(
grid_scores,
best_point={
'base_estimator__min_fraction_leaf':
clf.base_estimator.min_fraction_leaf,
'n_estimators':
clf.n_estimators},
params={
'base_estimator__min_fraction_leaf':
'leaf fraction',
'n_estimators':
'trees'},
name=(self.category.name +
("_{0}".format(self.mass)) +
self.output_suffix +
("_{0}".format(partition_idx))))
# save grid scores
with open('{0}_grid_scores.pickle'.format(clf_filename), 'w') as f:
pickle.dump(grid_scores, f)
# scale up the min-leaf and retrain on the whole set
#min_samples_leaf = clf.base_estimator.min_samples_leaf
#clf = sklearn.clone(clf)
#clf.base_estimator.min_samples_leaf = int(
# min_samples_leaf *
# cv_nfold / float(cv_nfold - 1))
#clf.fit(sample_train, labels_train,
# sample_weight=sample_weight_train)
#log.info("After scaling up min_leaf")
#out = StringIO()
#print >> out
#print >> out
#print >> out, clf
#log.info(out.getvalue())
else: # training on the other partition
log.info("training a new classifier ...")
# use same params as in first partition
clf = sklearn.clone(clf)
out = StringIO()
print >> out
print >> out
print >> out, clf
log.info(out.getvalue())
clf.fit(sample_train, labels_train,
sample_weight=sample_weight_train)
# export to graphviz dot format
if os.path.isdir(clf_filename):
shutil.rmtree(clf_filename)
os.mkdir(clf_filename)
for itree, tree in enumerate(clf):
export_graphviz(
tree,
out_file=os.path.join(
clf_filename,
'tree_{0:04d}.dot'.format(itree)),
feature_names=self.all_fields)
with open('{0}.pickle'.format(clf_filename), 'w') as f:
pickle.dump(clf, f)
print_feature_ranking(clf, self.fields)
self.clfs[(partition_idx + 1) % 2] = clf
def classify(self, sample, category, region,
cuts=None, systematic='NOMINAL'):
if self.clfs == None:
raise RuntimeError("you must train the classifiers first")
partitions = sample.partitioned_records(
category=category,
region=region,
fields=self.fields,
cuts=cuts,
systematic=systematic,
num_partitions=2,
return_idx=True,
key=self.partition_key)
score_idx = [[], []]
for i, partition in enumerate(partitions):
for rec, idx in partition:
weight = rec['weight']
arr = rec2array(rec, self.fields)
# each classifier is never used on the partition that trained it
scores = self.clfs[i].decision_function(arr)
score_idx[i].append((idx, scores, weight))
# must preserve order of scores wrt the other fields!
# merge the scores and weights according to the idx
merged_scores = []
merged_weight = []
for left, right in zip(*score_idx):
left_idx, left_scores, left_weight = left
right_idx, right_scores, right_weight = right
insert_idx = np.searchsorted(left_idx, right_idx)
scores = np.insert(left_scores, insert_idx, right_scores)
weight = np.insert(left_weight, insert_idx, right_weight)
merged_scores.append(scores)
merged_weight.append(weight)
scores = np.concatenate(merged_scores)
weight = np.concatenate(merged_weight)
if self.transform:
log.info("classifier scores are transformed")
if isinstance(self.transform, types.FunctionType):
# user-defined transformation
scores = self.transform(scores)
else:
# logistic tranformation used by TMVA (MethodBDT.cxx)
scores = -1 + 2.0 / (1.0 +
np.exp(-self.clfs[0].n_estimators *
self.clfs[0].learning_rate * scores / 1.5))
return scores, weight
|
yukisakurai/hhana
|
mva/classify.py
|
Python
|
gpl-3.0
| 25,774
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=missing-docstring
# pylint: disable=invalid-name
import unittest
from citest.base import (
ExecutionContext,
JsonSnapshotHelper)
import citest.json_contract as jc
import citest.json_predicate as jp
_called_verifiers = []
_TEST_FOUND_ERROR_COMMENT='Found error.'
class TestObsoleteObservationFailureVerifier(jc.ObservationFailureVerifier):
def __init__(self, title, expect):
super(TestObsoleteObservationFailureVerifier, self).__init__(title)
self.__expect = expect
def _error_comment_or_none(self, error):
if error.args[0] == self.__expect:
return _TEST_FOUND_ERROR_COMMENT
return None
def _makeObservationVerifyResult(
valid, observation=None,
good_results=None, bad_results=None, failed_constraints=None):
default_result = jp.PredicateResult(valid=valid)
good_results = good_results or ([default_result] if valid else [])
bad_results = bad_results or ([] if valid else [default_result])
failed_constraints = failed_constraints or []
observation = observation or jc.Observation()
good_attempt_results = [jp.ObjectResultMapAttempt(observation, result)
for result in good_results]
bad_attempt_results = [jp.ObjectResultMapAttempt(observation, result)
for result in bad_results]
return jc.ObservationVerifyResult(
valid=valid, observation=observation,
good_results=good_attempt_results,
bad_results=bad_attempt_results,
failed_constraints=failed_constraints)
class FakeObservationVerifier(jc.ObservationVerifier):
def __init__(self, title, dnf_verifier, result):
super(FakeObservationVerifier, self).__init__(
title=title, dnf_verifiers=dnf_verifier)
self.__result = result
def __call__(self, context, observation):
_called_verifiers.append(self)
return self.__result
class ObservationVerifierTest(unittest.TestCase):
def assertEqual(self, expect, have, msg=''):
if not msg:
msg = 'EXPECTED\n{0!r}\nGOT\n{1!r}'.format(expect, have)
JsonSnapshotHelper.AssertExpectedValue(expect, have, msg)
def test_result_builder_add_good_result(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('A')
pred = jp.PathPredicate(None, jp.STR_EQ('A'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(True)
self.assertTrue(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual([], verify_results.bad_results)
self.assertEqual([], verify_results.failed_constraints)
self.assertEqual(map_result.good_object_result_mappings,
verify_results.good_results)
def test_result_builder_add_bad_result(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('A')
pred = jp.PathPredicate(None, jp.STR_EQ('B'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(False)
self.assertFalse(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual([], verify_results.good_results)
self.assertEqual([pred], verify_results.failed_constraints)
self.assertEqual(map_result.bad_object_result_mappings,
verify_results.bad_results)
def test_result_builder_add_mixed_results(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_object('GOOD')
observation.add_object('BAD')
pred = jp.PathPredicate(None, jp.STR_EQ('GOOD'))
builder = jc.ObservationVerifyResultBuilder(observation)
map_pred = jp.MapPredicate(pred)
map_result = map_pred(context, observation.objects)
builder.add_map_result(map_result)
verify_results = builder.build(False)
self.assertFalse(verify_results)
self.assertEqual(observation, verify_results.observation)
self.assertEqual(map_result.good_object_result_mappings,
verify_results.good_results)
self.assertEqual([], verify_results.failed_constraints)
self.assertEqual(map_result.bad_object_result_mappings,
verify_results.bad_results)
def test_result_observation_verifier_conjunction_ok(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
pred_results = []
for i in range(3):
this_result = jp.PredicateResult(True, comment='Pred {0}'.format(i))
pred_results.append(this_result)
result = _makeObservationVerifyResult(
valid=True, good_results=[this_result])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
builder.AND(fake_verifier)
# verify build can work multiple times
self.assertEqual(builder.build(), builder.build())
verifier = builder.build()
self.assertEqual([verifiers], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(True, good_results=pred_results)
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers, _called_verifiers)
def test_result_observation_verifier_conjunction_failure_aborts_early(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(3)]
for i in range(3):
result = _makeObservationVerifyResult(
valid=False, bad_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.AND(fake_verifier)
# verify build can work multiple times
self.assertEqual(builder.build(), builder.build())
verifier = builder.build()
self.assertEqual([verifiers], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(
False, bad_results=[pred_results[0]])
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers[:1], _called_verifiers)
def test_result_observation_verifier_disjunction_success_aborts_early(self):
context = ExecutionContext()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(2)]
for i in range(2):
result = _makeObservationVerifyResult(
valid=True, good_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.OR(fake_verifier)
verifier = builder.build()
self.assertEqual([verifiers[0:1], verifiers[1:2]], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(True, good_results=[pred_results[0]])
global _called_verifiers
_called_verifiers = []
got = verifier(context, jc.Observation())
self.assertEqual(expect, got)
self.assertEqual(verifiers[:1], _called_verifiers)
def test_result_observation_verifier_disjunction_failure(self):
context = ExecutionContext()
observation = jc.Observation()
builder = jc.ObservationVerifierBuilder(title='Test')
verifiers = []
results = []
pred_results = [jp.PredicateResult(False, comment='Result %d' % i)
for i in range(2)]
for i in range(2):
result = _makeObservationVerifyResult(observation=observation,
valid=False, bad_results=[pred_results[i]])
fake_verifier = FakeObservationVerifier(
title=i, dnf_verifier=[], result=result)
verifiers.append(fake_verifier)
results.append(result)
builder.OR(fake_verifier)
verifier = builder.build()
self.assertEqual([verifiers[0:1], verifiers[1:2]], verifier.dnf_verifiers)
expect = _makeObservationVerifyResult(
False, observation=observation, bad_results=pred_results)
global _called_verifiers
_called_verifiers = []
got = verifier(context, observation)
self.assertEqual(expect, got)
self.assertEqual(verifiers, _called_verifiers)
def test_obsolete_observation_failure_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError(error_text)
observation.add_error(error)
failure_verifier = TestObsoleteObservationFailureVerifier(
'Test', error_text)
failure_pred_result = jc.ObservationFailedError([error], valid=True)
expect_failure = jc.ObservationVerifyResult(
valid=True, observation=observation,
good_results=[jp.ObjectResultMapAttempt(observation,
failure_pred_result)],
bad_results=[], failed_constraints=[],
comment=_TEST_FOUND_ERROR_COMMENT)
got = failure_verifier(context, observation)
self.assertEqual(expect_failure, got)
builder = jc.ObservationVerifierBuilder(title='Test')
builder.EXPECT(failure_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=True, observation=observation,
good_results=expect_failure.good_results,
bad_results=[], failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
def test_observation_failure_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError(error_text)
observation.add_error(error)
exception_pred = jp.ExceptionMatchesPredicate(
ValueError, regex=error_text)
builder = jc.ObservationVerifierBuilder(title='Test')
builder.EXPECT(jc.ObservationErrorPredicate(jp.LIST_MATCHES([exception_pred])))
failure_verifier = builder.build()
observation_predicate_result = jc.ObservationPredicateResult(
True, observation, jp.LIST_MATCHES([exception_pred]),
jp.LIST_MATCHES([exception_pred])(context, [error]))
expect_failure = jc.ObservationVerifyResult(
True, observation,
good_results=[observation_predicate_result],
bad_results=[], failed_constraints=[])
got = failure_verifier(context, observation)
self.assertEqual(expect_failure, got)
def test_obsolete_observation_failure_not_ok(self):
error_text = 'the error'
context = ExecutionContext()
observation = jc.Observation()
error = ValueError('not the error')
observation.add_error(error)
failure_verifier = TestObsoleteObservationFailureVerifier(
'Test', error_text)
comment = failure_verifier._error_not_found_comment(observation)
failure_pred_result = jp.PredicateResult(valid=False, comment=comment)
expect_failure = jc.ObservationVerifyResult(
valid=False, observation=observation,
bad_results=[jp.ObjectResultMapAttempt(observation,
failure_pred_result)],
good_results=[], failed_constraints=[],
comment=comment)
self.assertEqual(expect_failure, failure_verifier(context, observation))
builder = jc.ObservationVerifierBuilder(title='Test Verifier')
builder.EXPECT(failure_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=False, observation=observation,
bad_results=expect_failure.bad_results,
good_results=[], failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
def test_obsolete_observation_failure_or_found(self):
context = ExecutionContext()
observation = jc.Observation()
observation.add_error(ValueError('not the error'))
failure_verifier = TestObsoleteObservationFailureVerifier(
'Verify', 'NotFound')
comment = failure_verifier._error_not_found_comment(observation)
failure_result = jp.PredicateResult(valid=False, comment=comment)
# We've already established this result is what we expect
bad_observation_result = failure_verifier(context, observation)
success_pred_result = jp.PredicateResult(valid=True)
good_observation_result = _makeObservationVerifyResult(
valid=True,
good_results=[success_pred_result],
observation=observation)
success_verifier = FakeObservationVerifier(
'Found', dnf_verifier=[], result=good_observation_result)
builder = jc.ObservationVerifierBuilder(title='Observation Verifier')
builder.EXPECT(failure_verifier).OR(success_verifier)
verifier = builder.build()
expect = jc.ObservationVerifyResult(
valid=True, observation=observation,
bad_results=bad_observation_result.bad_results,
good_results=good_observation_result.good_results,
failed_constraints=[])
got = verifier(context, observation)
self.assertEqual(expect, got)
if __name__ == '__main__':
unittest.main()
|
google/citest
|
tests/json_contract/observation_verifier_test.py
|
Python
|
apache-2.0
| 14,132
|
#! /usr/bin/python
# Copyright (c) 2014 Kyle Delaney
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# Neither the name of the project's author nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# 'AS IS' AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
# TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from __future__ import print_function
import time
from functools import partial
import logging
import os.path
import argparse
import subprocess
#nonstandard
import serial
#local
from text_display import LCD
import mediaplayer_mpris2 as player
from weather_clock import WeatherClock
TIME_PERIOD = 5
TIME_INTERVAL = 30
UPDATE_INTERVAL = 0.06
if __name__ == '__main__':
parser = argparse.ArgumentParser(description=
'Displays Banshee status, time, date, and weather on the Arduino-controlled display.')
parser.add_argument('city', help='City portion of Weather Underground URL')
parser.add_argument('state', help='Two-letter state code part of Weather Underground URL')
parser.add_argument('--port', default='/dev/ttyACM0', help='Serial port that Arduino is attached to')
args = parser.parse_args()
logging.basicConfig(filename=os.path.expanduser('~/display_daemon.py.log'), level=logging.DEBUG, format='[%(asctime)s] %(message)s')
arduino = None
lcd = None
weather_clock = None
media_player = None
try:
arduino = serial.Serial()
arduino.port=args.port
arduino.baudrate=9600
arduino.timeout=1
lcd = LCD(arduino)
def info_changed(current_time, temperature, conditions):
lcd.change_artist('{} {}°F {}'.format(time.strftime('%I:%M%p %a %d', time.localtime(current_time)), temperature, conditions))
weather_clock = WeatherClock('http://rss.wunderground.com/auto/rss_full/{}/{}.xml?units=english'.format(args.state, args.city))
weather_clock.interval = TIME_INTERVAL
weather_clock.handler = info_changed
def player_changed(state, title, artist, album):
if media_player.playing:
lcd.change_title('{} - {}'.format(title,artist))
else:
lcd.change_title('')
media_player = player.Status()
media_player.attach_listener(player_changed)
def recover():
arduino.close()
time.sleep(3)
except:
logging.exception('Error while starting up, cannot continue.')
exit()
logging.info('Started up.')
while True:
logging.getLogger().handlers[0].flush()
try:
if not arduino.isOpen():
arduino.open()
time.sleep(1)#Give Arduino some time to setup.
media_player.poll()
try:
weather_clock.poll()
except:
logging.exception("Error getting weather/time data, possibly malformed rss. ({})".format(weather_clock._weather._url))
except serial.SerialException:
logging.error('Encountered serial error, will wait and retry.')
recover()
except subprocess.CalledProcessError as e:
logging.error('"{}" encountered error: "{}"; will wait and retry.'.format(e.cmd, e.output))
recover()
except OSError as e:
logging.error('Encountered an OS error, likely a serial error: "{}"; will wait and retry.'.format(e.strerror))
recover()
except:
logging.exception('Unexpected error; will wait and retry.')
recover()
time.sleep(UPDATE_INTERVAL)
|
DrKylstein/Media-Gizmo
|
display_daemon.py
|
Python
|
bsd-3-clause
| 4,946
|
# -*- coding: utf-8 -*-
# Copyright 2015 Spanish National Research Council
# Copyright 2016 LIP - Lisbon
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import uuid
import mock
import six
import webob
from ooi.api import helpers
from ooi.tests import base
from ooi.tests import fakes
import webob.exc
class TestIDGetter(base.TestCase):
def test_resolve_id_relative_url(self):
res_url = uuid.uuid4().hex
base_url = "http://foobar.com/foo"
r = helpers._resolve_id(base_url, res_url)
self.assertEqual(base_url, r[0])
self.assertEqual(res_url, r[1])
def test_resolve_id_absolute(self):
res_id = uuid.uuid4().hex
res_url = "/%s" % res_id
base_url = "http://foobar.com/foo"
r = helpers._resolve_id(base_url, res_url)
self.assertEqual("http://foobar.com/", r[0])
self.assertEqual(res_id, r[1])
def test_resolve_id_no_resource_url(self):
base_url = "http://foobar.com/foo"
r = helpers._resolve_id(base_url, "")
self.assertEqual(base_url, r[0])
self.assertEqual("", r[1])
def test_get_id_no_kind_relative(self):
req_url = '/foo'
req = webob.Request.blank(req_url)
res_url = "%s" % uuid.uuid4().hex
r = helpers.get_id_with_kind(req, res_url)
self.assertEqual('%s%s' % (req.application_url, req_url), r[0])
self.assertEqual(res_url, r[1])
def test_get_id_no_kind_absolute(self):
req_url = '/foo'
req = webob.Request.blank(req_url)
res_id = uuid.uuid4().hex
res_url = "/bar/%s" % res_id
r = helpers.get_id_with_kind(req, res_url)
self.assertEqual('%s/bar' % (req.application_url), r[0])
self.assertEqual(res_id, r[1])
def test_get_id_kind_matching(self):
m = mock.MagicMock()
m.location = "foo/"
req_url = "/foo"
req = webob.Request.blank(req_url)
res_url = "%s" % uuid.uuid4().hex
r = helpers.get_id_with_kind(req, res_url, m)
self.assertEqual("%s%s" % (req.application_url, req_url), r[0])
self.assertEqual(res_url, r[1])
def test_get_id_kind_not_matching(self):
m = mock.MagicMock()
m.location = "foo/"
req_url = "/foo"
req = webob.Request.blank(req_url)
from ooi import exception
self.assertRaises(exception.Invalid,
helpers.get_id_with_kind,
req, "/bar/baz", m)
class TestExceptionHelper(base.TestCase):
@staticmethod
def get_fault(code):
return {
"computeFault": {
"code": code,
"message": "Fault!",
"details": "Error Details..."
}
}
def test_exception(self):
code_and_exception = {
400: webob.exc.HTTPBadRequest,
401: webob.exc.HTTPUnauthorized,
403: webob.exc.HTTPForbidden,
404: webob.exc.HTTPNotFound,
405: webob.exc.HTTPMethodNotAllowed,
406: webob.exc.HTTPNotAcceptable,
409: webob.exc.HTTPConflict,
413: webob.exc.HTTPRequestEntityTooLarge,
415: webob.exc.HTTPUnsupportedMediaType,
429: webob.exc.HTTPTooManyRequests,
501: webob.exc.HTTPNotImplemented,
503: webob.exc.HTTPServiceUnavailable,
# Any other thing should be a 500
500: webob.exc.HTTPInternalServerError,
507: webob.exc.HTTPInternalServerError,
}
for code, exception in six.iteritems(code_and_exception):
fault = self.get_fault(code)
resp = fakes.create_fake_json_resp(fault, code)
ret = helpers.exception_from_response(resp)
self.assertIsInstance(ret, exception)
self.assertEqual(fault["computeFault"]["message"], ret.explanation)
def test_error_handling_exception(self):
fault = {}
resp = fakes.create_fake_json_resp(fault, 404)
ret = helpers.exception_from_response(resp)
self.assertIsInstance(ret, webob.exc.HTTPInternalServerError)
class TestBaseHelper(base.TestController):
def setUp(self):
super(TestBaseHelper, self).setUp()
self.version = "version foo bar baz"
self.helper = helpers.OpenStackHelper(mock.MagicMock(), self.version)
def assertExpectedReq(self, method, path, body, request):
self.assertEqual(method, request.method)
self.assertEqual(path, request.path_info)
if body and request.content_type == "application/json":
self.assertDictEqual(body, request.json_body)
else:
self.assertEqual(body, request.text)
def test_new_request(self):
req = webob.Request.blank("foo")
new_req = self.helper._get_req(req, method="GET")
self.assertEqual(self.version, new_req.script_name)
self.assertEqual("foo", new_req.path_info)
self.assertIsNot(req, new_req)
def test_new_request_with_path(self):
req = webob.Request.blank("foo")
new_req = self.helper._get_req(req, path="bar", method="GET")
self.assertEqual("bar", new_req.path_info)
self.assertExpectedReq("GET", "bar", "", new_req)
def test_new_request_with_body(self):
req = webob.Request.blank("foo")
body = {"bar": 1}
new_req = self.helper._get_req(req, body=json.dumps(body),
method="POST")
self.assertExpectedReq("POST", "foo", body, new_req)
def test_new_request_with_content_type(self):
req = webob.Request.blank("foo")
new_req = self.helper._get_req(req, content_type="foo/bar",
method="GET")
self.assertEqual("foo/bar", new_req.content_type)
def test_get_from_response(self):
d = {"element": {"foo": "bar"}}
body = json.dumps(d)
response = webob.Response(status=200, body=body)
result = self.helper.get_from_response(response,
"element",
{})
self.assertEqual(d["element"], result)
def test_get_from_response_with_default(self):
d = {"element": {"foo": "bar"}}
body = json.dumps({})
response = webob.Response(status=200, body=body)
result = self.helper.get_from_response(response,
"element",
d["element"])
self.assertEqual(d["element"], result)
def test_get_from_response_with_exception(self):
d = {"unauthorized": {"message": "unauthorized"}}
body = json.dumps(d)
response = webob.Response(status=403, body=body)
self.assertRaises(webob.exc.HTTPForbidden,
self.helper.get_from_response,
response,
"foo",
{})
class TestOpenStackHelper(TestBaseHelper):
@mock.patch.object(helpers.OpenStackHelper, "_get_index_req")
def test_index(self, m):
resp = fakes.create_fake_json_resp({"servers": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.index(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_index_req")
def test_index_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.index,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_flavors_req")
def test_flavors(self, m):
resp = fakes.create_fake_json_resp({"flavors": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_flavors(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_flavors_req")
def test_flavors_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_flavors,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_images_req")
def test_images(self, m):
resp = fakes.create_fake_json_resp({"images": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_images(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_images_req")
def test_images_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_images,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_volumes_req")
def test_volumes(self, m):
resp = fakes.create_fake_json_resp({"volumes": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_volumes(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_volumes_req")
def test_volumes_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_volumes,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ips_req")
def test_floating_ips(self, m):
resp = fakes.create_fake_json_resp({"floating_ips": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_floating_ips(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ips_req")
def test_floating_ips_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_floating_ips,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ip_pools_req")
def test_floating_ip_pools(self, m):
resp = fakes.create_fake_json_resp({"floating_ip_pools": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_floating_ip_pools(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ip_pools_req")
def test_floating_ip_pools_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_floating_ip_pools,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_flavors_req")
def test_get_flavors(self, m):
resp = fakes.create_fake_json_resp({"flavors": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.get_flavors(None)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_flavors_req")
def test_get_flavors_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_flavors,
None)
m.assert_called_with(None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_delete_req")
def test_delete(self, m):
resp = fakes.create_fake_json_resp(None, 204)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
ret = self.helper.delete(None, server_uuid)
self.assertIsNone(ret)
m.assert_called_with(None, server_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_delete_req")
def test_delete_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.delete,
None,
server_uuid)
m.assert_called_with(None, server_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_delete_req")
def test_volume_delete(self, m):
resp = fakes.create_fake_json_resp(None, 204)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
vol_uuid = uuid.uuid4().hex
ret = self.helper.volume_delete(None, vol_uuid)
self.assertIsNone(ret)
m.assert_called_with(None, vol_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_delete_req")
def test_volume_delete_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
vol_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.volume_delete,
None,
vol_uuid)
m.assert_called_with(None, vol_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_run_action_req")
def test_run_action(self, m):
resp = fakes.create_fake_json_resp(None, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
action = "start"
ret = self.helper.run_action(None, action, server_uuid)
self.assertIsNone(ret)
m.assert_called_with(None, action, server_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_run_action_req")
def test_run_action_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
action = "bad action"
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.run_action,
None,
action,
server_uuid)
m.assert_called_with(None, action, server_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_server_req")
def test_get_server(self, m):
resp = fakes.create_fake_json_resp({"server": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
ret = self.helper.get_server(None, server_uuid)
self.assertEqual("FOO", ret)
m.assert_called_with(None, server_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_server_req")
def test_get_server_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_server,
None,
server_uuid)
m.assert_called_with(None, server_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_image_req")
def test_get_image(self, m):
resp = fakes.create_fake_json_resp({"image": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
image_uuid = uuid.uuid4().hex
ret = self.helper.get_image(None, image_uuid)
self.assertEqual("FOO", ret)
m.assert_called_with(None, image_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_image_req")
def test_get_image_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
image_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_image,
None,
image_uuid)
m.assert_called_with(None, image_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_flavor_req")
def test_get_flavor(self, m):
resp = fakes.create_fake_json_resp({"flavor": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
flavor_uuid = uuid.uuid4().hex
ret = self.helper.get_flavor(None, flavor_uuid)
self.assertEqual("FOO", ret)
m.assert_called_with(None, flavor_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_flavor_req")
def test_get_flavor_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
flavor_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_flavor,
None,
flavor_uuid)
m.assert_called_with(None, flavor_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_req")
def test_get_volume(self, m):
resp = fakes.create_fake_json_resp({"volume": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
volume_uuid = uuid.uuid4().hex
ret = self.helper.get_volume(None, volume_uuid)
self.assertEqual("FOO", ret)
m.assert_called_with(None, volume_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_req")
def test_get_volume_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
volume_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_volume,
None,
volume_uuid)
m.assert_called_with(None, volume_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_server_volumes_link_req")
def test_get_server_volume_links(self, m):
resp = fakes.create_fake_json_resp({"volumeAttachments": ["FOO"]}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
ret = self.helper.get_server_volumes_link(None, server_uuid)
self.assertEqual(["FOO"], ret)
m.assert_called_with(None, server_uuid)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_server_volumes_link_req")
def test_get_server_volume_links_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.get_server_volumes_link,
None,
server_uuid)
m.assert_called_with(None, server_uuid)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_create_server_req")
def test_create_server(self, m):
resp = fakes.create_fake_json_resp({"server": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
image = uuid.uuid4().hex
flavor = uuid.uuid4().hex
user_data = "foo"
key_name = "wtfoo"
bdm = []
ret = self.helper.create_server(None, name, image, flavor,
user_data=user_data,
key_name=key_name,
block_device_mapping_v2=bdm,
networks=None)
self.assertEqual("FOO", ret)
m.assert_called_with(None, name, image, flavor, user_data=user_data,
key_name=key_name, block_device_mapping_v2=bdm,
networks=None)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_create_server_req")
def test_create_server_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
image = uuid.uuid4().hex
flavor = uuid.uuid4().hex
user_data = "foo"
key_name = "wtfoo"
bdm = []
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.create_server,
None,
name,
image,
flavor,
user_data=user_data,
key_name=key_name,
block_device_mapping_v2=bdm)
m.assert_called_with(None, name, image, flavor, user_data=user_data,
key_name=key_name, block_device_mapping_v2=bdm,
networks=None)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_create_req")
def test_volume_create(self, m):
resp = fakes.create_fake_json_resp({"volume": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
size = "10"
ret = self.helper.volume_create(None, name, size)
self.assertEqual("FOO", ret)
m.assert_called_with(None, name, size)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_volume_create_req")
def test_volume_create_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
size = "10"
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.volume_create,
None,
name,
size)
m.assert_called_with(None, name, size)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper,
"_get_server_volumes_link_create_req")
def test_create_servervolume_link(self, m):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_id = uuid.uuid4().hex
vol_id = uuid.uuid4().hex
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.create_server_volumes_link,
None,
server_id,
vol_id)
m.assert_called_with(None, server_id, vol_id, dev=None)
@mock.patch.object(helpers.OpenStackHelper,
"_get_server_volumes_link_create_req")
def test_create_servervolume_with_exception(self, m):
server_id = uuid.uuid4().hex
vol_id = uuid.uuid4().hex
raw_resp = {"volumeAttachment": {
"device": "/dev/vdd",
"id": "a26887c6-c47b-4654-abb5-dfadf7d3f803",
"serverId": server_id,
"volumeId": vol_id,
}}
resp = fakes.create_fake_json_resp(raw_resp, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ret = self.helper.create_server_volumes_link(None, server_id, vol_id)
self.assertEqual(raw_resp["volumeAttachment"], ret)
m.assert_called_with(None, server_id, vol_id, dev=None)
@mock.patch.object(helpers.OpenStackHelper,
"_get_server_volumes_link_delete_req")
def test_delete_volume_link(self, m):
resp = fakes.create_fake_json_resp(None, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
vol_uuid = uuid.uuid4().hex
ret = self.helper.delete_server_volumes_link(None,
server_uuid,
vol_uuid)
self.assertIsNone(ret)
m.assert_called_with(None, server_uuid, vol_uuid)
@mock.patch.object(helpers.OpenStackHelper,
"_get_server_volumes_link_delete_req")
def test_delete_volume_link_w_exception(self, m):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
server_uuid = uuid.uuid4().hex
vol_uuid = uuid.uuid4().hex
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.delete_server_volumes_link,
None,
server_uuid,
vol_uuid)
m.assert_called_with(None, server_uuid, vol_uuid)
@mock.patch.object(helpers.OpenStackHelper,
"_get_floating_ip_allocate_req")
def test_floating_ip_allocate(self, m_allocate):
pool = "foo"
resp = fakes.create_fake_json_resp({"floating_ip": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m_allocate.return_value = req_mock
ret = self.helper.allocate_floating_ip(None, pool)
self.assertEqual("FOO", ret)
m_allocate.assert_called_with(None, pool)
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ip_release_req")
def test_floating_ip_release(self, m):
resp = fakes.create_fake_json_resp(None, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ip_uuid = uuid.uuid4().hex
ret = self.helper.release_floating_ip(None, ip_uuid)
self.assertIsNone(ret)
m.assert_called_with(None, ip_uuid)
@mock.patch.object(helpers.OpenStackHelper, "_get_floating_ip_release_req")
def test_floating_ip_release_w_exception(self, m):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ip_uuid = uuid.uuid4().hex
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.release_floating_ip,
None,
ip_uuid)
m.assert_called_with(None, ip_uuid)
@mock.patch.object(helpers.OpenStackHelper,
"_get_associate_floating_ip_req")
def test_associate_floating_ip(self, m):
resp = fakes.create_fake_json_resp({"floating_ip": "FOO"}, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ip = "192.168.0.20"
server = uuid.uuid4().hex
ret = self.helper.associate_floating_ip(None, server, ip)
self.assertIsNone(ret)
m.assert_called_with(None, server, ip)
@mock.patch.object(helpers.OpenStackHelper, "_get_remove_floating_ip_req")
def test_remove_floating_ip(self, m):
resp = fakes.create_fake_json_resp(None, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ip = "192.168.0.20"
server = uuid.uuid4().hex
ret = self.helper.remove_floating_ip(None, server, ip)
self.assertIsNone(ret)
m.assert_called_with(None, server, ip)
@mock.patch.object(helpers.OpenStackHelper, "_get_remove_floating_ip_req")
def test_remove_floating_ip_w_exception(self, m):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
ip = "192.168.0.20"
server = uuid.uuid4().hex
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.remove_floating_ip,
None,
server,
ip)
m.assert_called_with(None, server, ip)
class TestOpenStackHelperReqs(TestBaseHelper):
def _build_req(self, tenant_id, **kwargs):
environ = {"HTTP_X_PROJECT_ID": tenant_id}
return webob.Request.blank("/whatever", environ=environ, **kwargs)
def test_os_index_req(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_index_req(req)
path = "/%s/servers" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_os_delete_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_delete_req(req, server_uuid)
path = "/%s/servers/%s" % (tenant["id"], server_uuid)
self.assertExpectedReq("DELETE", path, "", os_req)
def test_os_volume_delete_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_volume_delete_req(req, server_uuid)
path = "/%s/os-volumes/%s" % (tenant["id"], server_uuid)
self.assertExpectedReq("DELETE", path, "", os_req)
def test_os_action_req(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
server_uuid = uuid.uuid4().hex
actions_map = {
"stop": {"os-stop": None},
"start": {"os-start": None},
"suspend": {"suspend": None},
"resume": {"resume": None},
"unpause": {"unpause": None},
"restart": {"reboot": {"type": "SOFT"}},
}
path = "/%s/servers/%s/action" % (tenant["id"], server_uuid)
for act, body in six.iteritems(actions_map):
os_req = self.helper._get_run_action_req(req, act, server_uuid)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_server_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_server_req(req, server_uuid)
path = "/%s/servers/%s" % (tenant["id"], server_uuid)
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_flavors_req(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_flavors_req(req)
path = "/%s/flavors/detail" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_flavor_req(self):
tenant = fakes.tenants["foo"]
flavor_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_flavor_req(req, flavor_uuid)
path = "/%s/flavors/%s" % (tenant["id"], flavor_uuid)
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_images_req(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_images_req(req)
path = "/%s/images/detail" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_image_req(self):
tenant = fakes.tenants["foo"]
image_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_image_req(req, image_uuid)
path = "/%s/images/%s" % (tenant["id"], image_uuid)
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_volume_links_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_server_volumes_link_req(req, server_uuid)
path = "/%s/servers/%s/os-volume_attachments" % (tenant["id"],
server_uuid)
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_create_volume_links_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
vol_uuid = uuid.uuid4().hex
dev = "foo"
req = self._build_req(tenant["id"])
os_req = self.helper._get_server_volumes_link_create_req(req,
server_uuid,
vol_uuid,
dev=dev)
path = "/%s/servers/%s/os-volume_attachments" % (tenant["id"],
server_uuid)
body = {"volumeAttachment": {"volumeId": vol_uuid, "device": dev}}
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_delete_volume_links_req(self):
tenant = fakes.tenants["foo"]
server_uuid = uuid.uuid4().hex
vol_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_server_volumes_link_delete_req(req,
server_uuid,
vol_uuid)
path = "/%s/servers/%s/os-volume_attachments/%s" % (tenant["id"],
server_uuid,
vol_uuid)
self.assertExpectedReq("DELETE", path, "", os_req)
def test_get_os_volumes_req(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_volumes_req(req)
path = "/%s/os-volumes" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_volume_req(self):
tenant = fakes.tenants["foo"]
vol_uuid = uuid.uuid4().hex
req = self._build_req(tenant["id"])
os_req = self.helper._get_volume_req(req, vol_uuid)
path = "/%s/os-volumes/%s" % (tenant["id"], vol_uuid)
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_floating_ips(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_floating_ips_req(req)
path = "/%s/os-floating-ips" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_floating_ip_pools(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
os_req = self.helper._get_floating_ip_pools_req(req)
path = "/%s/os-floating-ip-pools" % tenant["id"]
self.assertExpectedReq("GET", path, "", os_req)
def test_get_os_get_server_create(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "foo server"
image = "bar image"
flavor = "baz flavor"
body = {
"server": {
"name": name,
"imageRef": image,
"flavorRef": flavor,
}
}
path = "/%s/servers" % tenant["id"]
os_req = self.helper._get_create_server_req(req, name, image, flavor)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_get_server_create_with_user_data(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "foo server"
image = "bar image"
flavor = "baz flavor"
user_data = "bazonk"
body = {
"server": {
"name": name,
"imageRef": image,
"flavorRef": flavor,
"user_data": user_data,
},
}
path = "/%s/servers" % tenant["id"]
os_req = self.helper._get_create_server_req(req, name, image, flavor,
user_data=user_data)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_get_server_create_with_key_name(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "foo server"
image = "bar image"
flavor = "baz flavor"
key_name = "wtfoo"
body = {
"server": {
"name": name,
"imageRef": image,
"flavorRef": flavor,
"key_name": key_name,
},
}
path = "/%s/servers" % tenant["id"]
os_req = self.helper._get_create_server_req(req, name, image, flavor,
key_name=key_name)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_get_volume_create(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "foo server"
size = "10"
body = {
"volume": {
"display_name": name,
"size": size
}
}
path = "/%s/os-volumes" % tenant["id"]
os_req = self.helper._get_volume_create_req(req, name, size)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_floating_ip_allocate(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
pool = "foo"
body = {"pool": pool}
path = "/%s/os-floating-ips" % tenant["id"]
os_req = self.helper._get_floating_ip_allocate_req(req, pool)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_floating_ip_allocate_no_pool(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
pool = None
body = {"pool": pool}
path = "/%s/os-floating-ips" % tenant["id"]
os_req = self.helper._get_floating_ip_allocate_req(req, pool)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_floating_ip_release(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
ip = uuid.uuid4().hex
path = "/%s/os-floating-ips/%s" % (tenant["id"], ip)
os_req = self.helper._get_floating_ip_release_req(req, ip)
self.assertExpectedReq("DELETE", path, "", os_req)
def test_get_os_associate_floating_ip(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
server = uuid.uuid4().hex
ip = "192.168.0.20"
body = {"addFloatingIp": {"address": ip}}
path = "/%s/servers/%s/action" % (tenant["id"], server)
os_req = self.helper._get_associate_floating_ip_req(req, server, ip)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_remove_floating_ip(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
server = uuid.uuid4().hex
ip = "192.168.0.20"
body = {"removeFloatingIp": {"address": ip}}
path = "/%s/servers/%s/action" % (tenant["id"], server)
os_req = self.helper._get_remove_floating_ip_req(req, server, ip)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_get_keypair_create(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "fookey"
body = {
"keypair": {
"name": name,
}
}
path = "/%s/os-keypairs" % tenant["id"]
os_req = self.helper._get_keypair_create_req(req, name)
self.assertExpectedReq("POST", path, body, os_req)
def test_get_os_get_keypair_create_import(self):
tenant = fakes.tenants["foo"]
req = self._build_req(tenant["id"])
name = "fookey"
public_key = "fookeydata"
body = {
"keypair": {
"name": name,
"public_key": public_key
}
}
path = "/%s/os-keypairs" % tenant["id"]
os_req = self.helper._get_keypair_create_req(req, name,
public_key=public_key)
self.assertExpectedReq("POST", path, body, os_req)
@mock.patch.object(helpers.OpenStackHelper, "_get_keypair_create_req")
def test_keypair_create(self, m):
resp = fakes.create_fake_json_resp({"keypair": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
public_key = None
ret = self.helper.keypair_create(None, name)
self.assertEqual("FOO", ret)
m.assert_called_with(None, name, public_key=public_key)
@mock.patch.object(helpers.OpenStackHelper, "_get_keypair_create_req")
def test_keypair_create_key_import(self, m):
resp = fakes.create_fake_json_resp({"keypair": "FOO"}, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
public_key = "fookeydata"
ret = self.helper.keypair_create(None, name, public_key=public_key)
self.assertEqual("FOO", ret)
m.assert_called_with(None, name, public_key=public_key)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_keypair_create_req")
def test_keypair_create_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.keypair_create,
None,
name,
None)
m.assert_called_with(None, name, public_key=None)
m_exc.assert_called_with(resp)
@mock.patch("ooi.api.helpers.exception_from_response")
@mock.patch.object(helpers.OpenStackHelper, "_get_keypair_create_req")
def test_keypair_create_key_import_with_exception(self, m, m_exc):
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(fault, 500)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m.return_value = req_mock
name = uuid.uuid4().hex
public_key = "fookeydata"
m_exc.return_value = webob.exc.HTTPInternalServerError()
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.keypair_create,
None,
name,
public_key)
m.assert_called_with(None, name, public_key=public_key)
m_exc.assert_called_with(resp)
@mock.patch.object(helpers.OpenStackHelper, "_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_create_port(self, m_tenant, m_create):
ip = '22.0.0.1'
net_id = uuid.uuid4().hex
port_id = uuid.uuid4().hex
mac = '890234'
device_id = uuid.uuid4().hex
p = {"interfaceAttachment": {
"net_id": net_id,
"port_id": port_id,
"fixed_ips": [{"ip_address": ip}],
"mac_addr": mac, "port_state": "ACTIVE"
}}
response = fakes.create_fake_json_resp(p, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = response
m_create.return_value = req_mock
ret = self.helper.create_port(None, net_id, device_id)
self.assertEqual(device_id, ret['compute_id'])
self.assertEqual(ip, ret['ip'])
self.assertEqual(net_id, ret['network_id'])
self.assertEqual(mac, ret['mac'])
self.assertEqual(port_id, ret['ip_id'])
@mock.patch.object(helpers.OpenStackHelper, "_get_ports")
@mock.patch.object(helpers.OpenStackHelper, "_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_delete_port(self, m_tenant, m_delete, m_ports):
ip = '22.0.0.1'
net_id = uuid.uuid4().hex
mac = '890234'
device_id = uuid.uuid4().hex
port_id = uuid.uuid4().hex
p = [{"net_id": net_id,
"fixed_ips": [{"ip_address": ip}],
"mac_addr": mac, "port_id": port_id
}]
m_ports.return_value = p
response = fakes.create_fake_json_resp({}, 202)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = response
m_delete.return_value = req_mock
ret = self.helper.delete_port(None, device_id, mac)
self.assertEqual([], ret)
@mock.patch.object(helpers.OpenStackHelper,
"_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_get_network_id(self, m_ten, m_req):
m_ten.return_value = uuid.uuid4().hex
mac = uuid.uuid4().hex
device_id = uuid.uuid4().hex
net_id = uuid.uuid4().hex
ip = uuid.uuid4().hex
p = {"interfaceAttachments": [
{"net_id": net_id,
"fixed_ips": [{"ip_address": ip}],
"mac_addr": mac, "port_state": "ACTIVE"
}]}
resp = fakes.create_fake_json_resp(p, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m_req.return_value = req_mock
ret = self.helper.get_network_id(None, mac, device_id)
self.assertEqual(net_id, ret)
@mock.patch.object(helpers.OpenStackHelper,
"_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_get_network_id_empty(self, m_ten, m_req):
m_ten.return_value = uuid.uuid4().hex
mac = uuid.uuid4().hex
device_id = uuid.uuid4().hex
p = {"interfaceAttachments": []}
resp = fakes.create_fake_json_resp(p, 200)
req_mock = mock.MagicMock()
req_mock.get_response.return_value = resp
m_req.return_value = req_mock
self.assertRaises(webob.exc.HTTPNotFound,
self.helper.get_network_id,
None,
mac,
device_id)
@mock.patch.object(helpers.OpenStackHelper,
"_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_associate_associate_err(self, m_ten, m_req):
m_ten.return_value = uuid.uuid4().hex
net_id = uuid.uuid4().hex
device_id = uuid.uuid4().hex
ip = uuid.uuid4().hex
pool = uuid.uuid4().hex
resp = fakes.create_fake_json_resp(
{"floating_ip": {"ip": ip, "pool": pool}},
202
)
fault = {"computeFault": {"message": "bad", "code": 500}}
resp_ass = fakes.create_fake_json_resp(
fault,
500
)
req_all = mock.MagicMock()
req_all.get_response.return_value = resp
req_ass = mock.MagicMock()
req_ass.get_response.return_value = resp_ass
m_req.side_effect = [req_all,
req_ass]
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.assign_floating_ip,
None,
net_id, device_id)
@mock.patch.object(helpers.OpenStackHelper,
"_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_allocation_err(self, m_ten, m_req):
m_ten.return_value = uuid.uuid4().hex
net_id = uuid.uuid4().hex
device_id = uuid.uuid4().hex
fault = {"computeFault": {"message": "bad", "code": 500}}
resp = fakes.create_fake_json_resp(
fault,
500
)
req_all = mock.MagicMock()
req_all.get_response.return_value = resp
m_req.side_effect = [req_all]
self.assertRaises(webob.exc.HTTPInternalServerError,
self.helper.assign_floating_ip,
None,
net_id, device_id)
@mock.patch.object(helpers.OpenStackHelper,
"_get_req")
@mock.patch.object(helpers.OpenStackHelper, "tenant_from_req")
def test_associate_floating_ip(self, m_ten, m_req):
m_ten.return_value = uuid.uuid4().hex
net_id = uuid.uuid4().hex
device_id = uuid.uuid4().hex
ip = uuid.uuid4().hex
ip_id = uuid.uuid4().hex
pool = uuid.uuid4().hex
resp = fakes.create_fake_json_resp(
{"floating_ip": {"ip": ip, "pool": pool, 'id': ip_id}},
202
)
req_all = mock.MagicMock()
req_all.get_response.return_value = resp
resp_ass = fakes.create_fake_json_resp({}, 202)
req_ass = mock.MagicMock()
req_ass.get_response.return_value = resp_ass
m_req.side_effect = [req_all,
req_ass]
ret = self.helper.assign_floating_ip(None, net_id, device_id)
self.assertIsNotNone(ret)
self.assertEqual(net_id, ret['network_id'])
self.assertEqual(device_id, ret['compute_id'])
self.assertEqual(ip, ret['ip'])
self.assertEqual(pool, ret['pool'])
|
LIP-Computing/occi-net
|
ooi/tests/controllers/test_helpers.py
|
Python
|
apache-2.0
| 57,221
|
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "jeito.settings_local")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
eedf/jeito
|
manage.py
|
Python
|
mit
| 256
|
# coding: utf-8
from PyQt4.QtCore import QFileInfo
from qgis.core import QgsProject
from qgis.utils import iface
# Get the project instance
project = QgsProject.instance()
# Get layer tree
print(project.layerTreeRoot()) # QgsLayerTreeGroup
# Print the current project file name (might be empty in case no projects have been loaded)
print(project.fileName())
# Store values
project.writeEntry("myplugin", "mytext", "hello world")
project.writeEntry("myplugin", "myint", 10)
project.writeEntry("myplugin", "mydouble", 0.01)
project.writeEntry("myplugin", "mybool", True)
# Read values
mytext = project.readEntry("myplugin", "mytext", "default text")[0]
myint = project.readNumEntry("myplugin", "myint", 123)[0]
# Read values list
print(project.readListEntry('Scales', 'ScalesList'))
# Function to check if project already read
def loaded():
print('loaded')
# Combined with previous function, check when the project already read
iface.projectRead.connect(loaded)
# Load another project
project.read(QFileInfo('/home/user/projects/my_other_qgis_project.qgs'))
# Save the project to the same
project.write()
# ... or to a new file
project.write(QFileInfo('/home/user/projects/my_new_qgis_project.qgs'))
# Start a new empty project
iface.newProject()
|
webgeodatavore/pyqgis-samples
|
core/qgis-sample-QgsProject.py
|
Python
|
gpl-2.0
| 1,262
|
from CoolProp.HumidAirProp import HAProps
print("Validation against H.F. Nelson and H.J. Sauer,\"Formulation for High-Temperature Properties for Moist Air\", HVAC&R Research v.8 #3, 2002")
print("Note: More accurate formulation employed than in Nelson. Just for sanity checking")
print("Yields a negative relative humidity for Tdb=5C,Twb=-3C, point omitted")
tdb = [5, 5, 5, 25, 25, 25, 25, 50, 50, 50, 50, 50, 50, 50]
twb = [5, 2, -1, 25, 20, 15, 10, 50, 40, 30, 25, 22, 20, 19]
print(" ")
print("Table 6: Adiabatic Saturation")
print("P=101325 Pa, Altitude = 0 m")
print("========================================================================")
print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='W', Twb='Twb', Tdp='Tdp', Tdb='Tdb', v='v', h='h', s='s', R='RH'))
print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='-', Twb='C', Tdp='C', Tdb='C', v='m^3/kg_da', h='kJ/kg_da', s='kJ/kg_da/K', R='%'))
print("------------------------------------------------------------------------")
for (tdb_, twb_) in zip(tdb, twb):
h = HAProps('H', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325)
tdp = HAProps('Tdp', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) - 273.15
W = HAProps('W', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325)
R = HAProps('R', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325) * 100
v = HAProps('V', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 101.325)
s = 0
print("{Tdb:10.2f}{Twb:10.2f}{Tdp:10.2f}{R:10.1f}{W:10.5f}{h:10.2f}{v:10.3f}".format(W=W, Twb=twb_, Tdp=tdp, Tdb=tdb_, v=v, h=h, s=s, R=R))
print("------------------------------------------------------------------------")
print(" ")
print("Table 7: Adiabatic Saturation")
print("P=84,556 Pa, Altitude = 1500 m")
print("========================================================================")
print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='W', Twb='Twb', Tdp='Tdp', Tdb='Tdb', v='v', h='h', s='s', R='RH'))
print("{Tdb:10s}{Twb:10s}{Tdp:10s}{R:10s}{W:10s}{h:10s}{v:10s}".format(W='-', Twb='C', Tdp='C', Tdb='C', v='m^3/kg_da', h='kJ/kg_da', s='kJ/kg_da/K', R='%'))
print("------------------------------------------------------------------------")
for (tdb_, twb_) in zip(tdb, twb):
h = HAProps('H', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556)
tdp = HAProps('Tdp', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) - 273.15
W = HAProps('W', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556)
R = HAProps('R', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556) * 100
v = HAProps('V', 'T', tdb_ + 273.13, 'Twb', twb_ + 273.15, 'P', 84.556)
s = 0
print("{Tdb:10.2f}{Twb:10.2f}{Tdp:10.2f}{R:10.1f}{W:10.5f}{h:10.2f}{v:10.3f}".format(W=W, Twb=twb_, Tdp=tdp, Tdb=tdb_, v=v, h=h, s=s, R=R))
print("------------------------------------------------------------------------")
|
henningjp/CoolProp
|
Web/fluid_properties/Validation/NelsonValidation.py
|
Python
|
mit
| 2,931
|
import sublime_plugin
import os
def get_folder_for_view(view, folders):
for folder in folders:
if view.file_name() and view.file_name().startswith(folder):
return os.path.relpath(view.file_name(), folder)
return ""
def get_view_info(view, folders):
"""Returns the name for the passed view"""
return [
view.name() or os.path.basename(view.file_name()),
get_folder_for_view(view, folders)
]
def get_view_list(views, folders):
"""Returns a list of the names of the passed views"""
names = []
for view in views:
if view.file_name() or view.name():
names.append(get_view_info(view, folders))
return names
class PowerShiftCommand(sublime_plugin.WindowCommand):
"""Opens up a list of all currently open files"""
def run(self):
"""Displays the pop-up list and saves the current group index"""
self.group = self.window.active_group()
self.views = self.window.views()
self.window.show_quick_panel(
get_view_list(self.views, self.window.folders()),
self.shift_view
)
def shift_view(self, index):
"""Shifts the selected view into focus"""
if index < 0:
return
self.window.set_view_index(self.views[index], self.group, 0)
self.window.focus_view(self.views[index])
|
ice9js/power-shift-sublime
|
power_shift.py
|
Python
|
mit
| 1,372
|
from django.conf.urls import include
from django.urls import path
from django.contrib import admin
import django_js_reverse.views
from rest_framework.routers import DefaultRouter
from common.routes import routes as common_routes
router = DefaultRouter()
routes = common_routes
for route in routes:
router.register(route['regex'], route['viewset'], basename=route['basename'])
urlpatterns = [
path("", include("common.urls"), name="common"),
path("admin/", admin.site.urls, name="admin"),
path("jsreverse/", django_js_reverse.views.urls_js, name="js_reverse"),
path("api/", include(router.urls), name="api"),
]
|
vintasoftware/django-react-boilerplate
|
backend/project_name/urls.py
|
Python
|
mit
| 636
|
# -*- coding: utf8 -*-
# This file is part of PyBossa.
#
# Copyright (C) 2014 SF Isle of Man Limited
#
# PyBossa is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PyBossa is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with PyBossa. If not, see <http://www.gnu.org/licenses/>.
"""This module tests the Uploader class."""
import os
import tempfile
from default import Test, with_context
from pybossa.uploader.local import LocalUploader
from mock import patch
from werkzeug.datastructures import FileStorage
from nose.tools import assert_raises
class TestLocalUploader(Test):
"""Test PyBossa Uploader module."""
@with_context
def test_local_uploader_relative_directory_init(self):
"""Test LOCAL UPLOADER init works with relative path."""
new_upload_folder = 'uploads'
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
new_uploader.init_app(self.flask_app)
err_msg = "Upload folder should be absolute not relative"
assert os.path.isabs(new_uploader.upload_folder) is True, err_msg
err_msg = "Upload folder uploads should be existing"
assert os.path.isdir(new_uploader.upload_folder) is True, err_msg
@with_context
def test_wrong_local_uploader_relative_directory_init(self):
"""Test LOCAL UPLOADER init with wrong relative path."""
new_upload_folder = 'iamnotexisting'
err_msg = "Uploadfolder ./iamnotexisting should not exist"
assert os.path.isdir(new_upload_folder) is False, err_msg
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
assert_raises(IOError, new_uploader.init_app, self.flask_app) # Should raise IOError
err_msg = "wrong upload folder ./iamnotexisting should not exist"
assert os.path.isdir(new_upload_folder) is False, err_msg
@with_context
def test_local_uploader_standard_directory_existing(self):
"""Test if local uploads directory existing"""
uploads_path = os.path.join(os.path.dirname(self.flask_app.root_path), 'uploads') # ../uploads
err_msg = "./uploads folder is not existing"
assert os.path.isdir(uploads_path) is True, err_msg
context_uploads_path = os.path.join(self.flask_app.root_path, 'uploads') # pybossa/uploads
err_msg = "pybossa/uploads should not exist"
assert os.path.isdir(context_uploads_path) is False, err_msg
@with_context
def test_local_uploader_init(self):
"""Test LOCAL UPLOADER init works."""
u = LocalUploader()
u.init_app(self.flask_app)
new_extensions = ['pdf', 'doe']
new_upload_folder = '/tmp/'
new_config_ext = {'ALLOWED_EXTENSIONS': new_extensions}
new_config_uf = {'UPLOAD_FOLDER': new_upload_folder}
with patch.dict(self.flask_app.config, new_config_ext):
with patch.dict(self.flask_app.config, new_config_uf):
new_uploader = LocalUploader()
new_uploader.init_app(self.flask_app)
expected_extensions = set.union(u.allowed_extensions,
new_extensions)
err_msg = "The new uploader should support two extra extensions"
assert expected_extensions == new_uploader.allowed_extensions, err_msg
err_msg = "Upload folder /tmp should be existing"
assert os.path.isdir(new_uploader.upload_folder) is True, err_msg
err_msg = "Upload folder by default is /tmp/"
assert new_uploader.upload_folder == '/tmp/', err_msg
@with_context
@patch('werkzeug.datastructures.FileStorage.save', side_effect=IOError)
def test_local_uploader_upload_fails(self, mock):
"""Test LOCAL UPLOADER upload fails."""
u = LocalUploader()
file = FileStorage(filename='test.jpg')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return False, \
as there is an exception")
assert res is False, err_msg
@with_context
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_uploader_upload_correct_file(self, mock):
"""Test LOCAL UPLOADER upload works."""
mock.save.return_value = None
u = LocalUploader()
file = FileStorage(filename='test.jpg')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return True, \
as this extension is allowed")
assert res is True, err_msg
@with_context
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_uploader_upload_wrong_file(self, mock):
"""Test LOCAL UPLOADER upload works with wrong extension."""
mock.save.return_value = None
u = LocalUploader()
file = FileStorage(filename='test.txt')
res = u.upload_file(file, container='user_3')
err_msg = ("Upload file should return False, \
as this extension is not allowed")
assert res is False, err_msg
@with_context
@patch('werkzeug.datastructures.FileStorage.save', return_value=None)
def test_local_folder_is_created(self, mock):
"""Test LOCAL UPLOADER folder creation works."""
mock.save.return_value = True
u = LocalUploader()
u.upload_folder = tempfile.mkdtemp()
file = FileStorage(filename='test.jpg')
container = 'mycontainer'
res = u.upload_file(file, container=container)
path = os.path.join(u.upload_folder, container)
err_msg = "This local path should exist: %s" % path
assert os.path.isdir(path) is True, err_msg
@with_context
@patch('os.remove', return_value=None)
def test_local_folder_delete(self, mock):
"""Test LOCAL UPLOADER delete works."""
u = LocalUploader()
err_msg = "Delete should return true"
assert u.delete_file('file', 'container') is True, err_msg
@with_context
@patch('os.remove', side_effect=OSError)
def test_local_folder_delete_fails(self, mock):
"""Test LOCAL UPLOADER delete fail works."""
u = LocalUploader()
err_msg = "Delete should return False"
assert u.delete_file('file', 'container') is False, err_msg
|
stefanhahmann/pybossa
|
test/test_uploader/test_local_uploader.py
|
Python
|
agpl-3.0
| 7,020
|
#!/usr/bin/env python
from ciscoconfparse import CiscoConfParse
print "We will use this program to parse a cisco config file"
filename = raw_input("Please enter the name of the file that needs to be parsed: ")
#print filename
input_file = CiscoConfParse(filename)
crypto_find = input_file.find_objects_w_child(parentspec=r"^crypto map CRYPTO", childspec=r"pfs group2")
#print crypto_find
for item in crypto_find:
print item.text
|
networkpadwan/appliedpython
|
week1/parse2.py
|
Python
|
apache-2.0
| 435
|
# -*- coding: utf-8 -*-
#
# This file is part of NINJA-IDE (http://ninja-ide.org).
#
# NINJA-IDE is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# any later version.
#
# NINJA-IDE is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NINJA-IDE; If not, see <http://www.gnu.org/licenses/>.
from __future__ import absolute_import
from __future__ import unicode_literals
from PyQt4.QtGui import QWidget
from PyQt4.QtGui import QVBoxLayout
from PyQt4.QtGui import QHBoxLayout
from PyQt4.QtGui import QGroupBox
from PyQt4.QtGui import QCheckBox
from PyQt4.QtGui import QGridLayout
from PyQt4.QtGui import QSpacerItem
from PyQt4.QtGui import QLabel
from PyQt4.QtGui import QSpinBox
from PyQt4.QtGui import QComboBox
from PyQt4.QtGui import QSizePolicy
from PyQt4.QtCore import Qt
from PyQt4.QtCore import SIGNAL
from ninja_ide import translations
from ninja_ide.core import settings
from ninja_ide.gui.ide import IDE
from ninja_ide.gui.dialogs.preferences import preferences
class EditorConfiguration(QWidget):
"""EditorConfiguration widget class"""
def __init__(self, parent):
super(EditorConfiguration, self).__init__()
self._preferences, vbox = parent, QVBoxLayout(self)
# groups
group1 = QGroupBox(translations.TR_PREFERENCES_EDITOR_CONFIG_INDENT)
group2 = QGroupBox(translations.TR_PREFERENCES_EDITOR_CONFIG_MARGIN)
group3 = QGroupBox(translations.TR_LINT_DIRTY_TEXT)
group4 = QGroupBox(translations.TR_PEP8_DIRTY_TEXT)
group5 = QGroupBox(translations.TR_HIGHLIGHTER_EXTRAS)
group6 = QGroupBox(translations.TR_TYPING_ASSISTANCE)
group7 = QGroupBox(translations.TR_DISPLAY)
# groups container
container_widget_with_all_preferences = QWidget()
formFeatures = QGridLayout(container_widget_with_all_preferences)
# Indentation
hboxg1 = QHBoxLayout(group1)
hboxg1.setContentsMargins(5, 15, 5, 5)
self._spin, self._checkUseTabs = QSpinBox(), QComboBox()
self._spin.setRange(1, 10)
self._spin.setValue(settings.INDENT)
hboxg1.addWidget(self._spin)
self._checkUseTabs.addItems([
translations.TR_PREFERENCES_EDITOR_CONFIG_SPACES.capitalize(),
translations.TR_PREFERENCES_EDITOR_CONFIG_TABS.capitalize()])
self._checkUseTabs.setCurrentIndex(int(settings.USE_TABS))
hboxg1.addWidget(self._checkUseTabs)
formFeatures.addWidget(group1, 0, 0)
# Margin Line
hboxg2 = QHBoxLayout(group2)
hboxg2.setContentsMargins(5, 15, 5, 5)
self._checkShowMargin = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_MARGIN_LINE)
self._checkShowMargin.setChecked(settings.SHOW_MARGIN_LINE)
hboxg2.addWidget(self._checkShowMargin)
self._spinMargin = QSpinBox()
self._spinMargin.setRange(50, 100)
self._spinMargin.setSingleStep(2)
self._spinMargin.setValue(settings.MARGIN_LINE)
hboxg2.addWidget(self._spinMargin)
hboxg2.addWidget(QLabel(translations.TR_CHARACTERS))
formFeatures.addWidget(group2, 0, 1)
# Display Errors
vboxDisplay = QVBoxLayout(group7)
vboxDisplay.setContentsMargins(5, 15, 5, 5)
self._checkHighlightLine = QComboBox()
self._checkHighlightLine.addItems([
translations.TR_PREFERENCES_EDITOR_CONFIG_ERROR_USE_BACKGROUND,
translations.TR_PREFERENCES_EDITOR_CONFIG_ERROR_USE_UNDERLINE])
self._checkHighlightLine.setCurrentIndex(
int(settings.UNDERLINE_NOT_BACKGROUND))
hboxDisplay1 = QHBoxLayout()
hboxDisplay1.addWidget(QLabel(translations.TR_DISPLAY_ERRORS))
hboxDisplay1.addWidget(self._checkHighlightLine)
hboxDisplay2 = QHBoxLayout()
self._checkDisplayLineNumbers = QCheckBox(
translations.TR_DISPLAY_LINE_NUMBERS)
self._checkDisplayLineNumbers.setChecked(settings.SHOW_LINE_NUMBERS)
hboxDisplay2.addWidget(self._checkDisplayLineNumbers)
vboxDisplay.addLayout(hboxDisplay1)
vboxDisplay.addLayout(hboxDisplay2)
formFeatures.addWidget(group7, 1, 0, 1, 0)
# Find Lint Errors (highlighter)
vboxg3 = QVBoxLayout(group3)
self._checkErrors = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_FIND_ERRORS)
self._checkErrors.setChecked(settings.FIND_ERRORS)
self.connect(self._checkErrors, SIGNAL("stateChanged(int)"),
self._disable_show_errors)
self._showErrorsOnLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TOOLTIP_ERRORS)
self._showErrorsOnLine.setChecked(settings.ERRORS_HIGHLIGHT_LINE)
self.connect(self._showErrorsOnLine, SIGNAL("stateChanged(int)"),
self._enable_errors_inline)
vboxg3.addWidget(self._checkErrors)
vboxg3.addWidget(self._showErrorsOnLine)
formFeatures.addWidget(group3, 2, 0)
# Find PEP8 Errors (highlighter)
vboxg4 = QVBoxLayout(group4)
vboxg4.setContentsMargins(5, 15, 5, 5)
self._checkStyle = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_PEP8)
self._checkStyle.setChecked(settings.CHECK_STYLE)
self.connect(self._checkStyle, SIGNAL("stateChanged(int)"),
self._disable_check_style)
vboxg4.addWidget(self._checkStyle)
self._checkStyleOnLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TOOLTIP_PEP8)
self._checkStyleOnLine.setChecked(settings.CHECK_HIGHLIGHT_LINE)
self.connect(self._checkStyleOnLine, SIGNAL("stateChanged(int)"),
self._enable_check_inline)
vboxg4.addWidget(self._checkStyleOnLine)
formFeatures.addWidget(group4, 2, 1)
# Show Python3 Migration, DocStrings and Spaces (highlighter)
vboxg5 = QVBoxLayout(group5)
vboxg5.setContentsMargins(5, 15, 5, 5)
self._showMigrationTips = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_MIGRATION)
self._showMigrationTips.setChecked(settings.SHOW_MIGRATION_TIPS)
vboxg5.addWidget(self._showMigrationTips)
self._checkForDocstrings = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_CHECK_FOR_DOCSTRINGS)
self._checkForDocstrings.setChecked(settings.CHECK_FOR_DOCSTRINGS)
vboxg5.addWidget(self._checkForDocstrings)
self._checkShowSpaces = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_SHOW_TABS_AND_SPACES)
self._checkShowSpaces.setChecked(settings.SHOW_TABS_AND_SPACES)
vboxg5.addWidget(self._checkShowSpaces)
self._checkIndentationGuide = QCheckBox(
translations.TR_SHOW_INDENTATION_GUIDE)
self._checkIndentationGuide.setChecked(settings.SHOW_INDENTATION_GUIDE)
vboxg5.addWidget(self._checkIndentationGuide)
formFeatures.addWidget(group5, 3, 0)
# End of line, Center On Scroll, Trailing space, Word wrap
vboxg6 = QVBoxLayout(group6)
vboxg6.setContentsMargins(5, 15, 5, 5)
self._checkEndOfLine = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_END_OF_LINE)
self._checkEndOfLine.setChecked(settings.USE_PLATFORM_END_OF_LINE)
vboxg6.addWidget(self._checkEndOfLine)
self._checkCenterScroll = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_CENTER_SCROLL)
self._checkCenterScroll.setChecked(settings.CENTER_ON_SCROLL)
vboxg6.addWidget(self._checkCenterScroll)
self._checkTrailing = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_REMOVE_TRAILING)
self._checkTrailing.setChecked(settings.REMOVE_TRAILING_SPACES)
vboxg6.addWidget(self._checkTrailing)
self._allowWordWrap = QCheckBox(
translations.TR_PREFERENCES_EDITOR_CONFIG_WORD_WRAP)
self._allowWordWrap.setChecked(settings.ALLOW_WORD_WRAP)
vboxg6.addWidget(self._allowWordWrap)
formFeatures.addWidget(group6, 3, 1)
# pack all the groups
vbox.addWidget(container_widget_with_all_preferences)
vbox.addItem(QSpacerItem(0, 10, QSizePolicy.Expanding,
QSizePolicy.Expanding))
self.connect(self._preferences, SIGNAL("savePreferences()"), self.save)
def _enable_check_inline(self, val):
"""Method that takes a value to enable the inline style checking"""
if val == Qt.Checked:
self._checkStyle.setChecked(True)
def _enable_errors_inline(self, val):
"""Method that takes a value to enable the inline errors checking"""
if val == Qt.Checked:
self._checkErrors.setChecked(True)
def _disable_check_style(self, val):
"""Method that takes a value to disable the inline style checking"""
if val == Qt.Unchecked:
self._checkStyleOnLine.setChecked(False)
def _disable_show_errors(self, val):
"""Method that takes a value to disable the inline errors checking"""
if val == Qt.Unchecked:
self._showErrorsOnLine.setChecked(False)
def save(self):
"""Method to save settings"""
qsettings = IDE.ninja_settings()
settings.USE_TABS = bool(self._checkUseTabs.currentIndex())
qsettings.setValue('preferences/editor/useTabs',
settings.USE_TABS)
margin_line = self._spinMargin.value()
settings.MARGIN_LINE = margin_line
settings.pep8mod_update_margin_line_length(margin_line)
qsettings.setValue('preferences/editor/marginLine', margin_line)
settings.SHOW_MARGIN_LINE = self._checkShowMargin.isChecked()
qsettings.setValue('preferences/editor/showMarginLine',
settings.SHOW_MARGIN_LINE)
settings.INDENT = self._spin.value()
qsettings.setValue('preferences/editor/indent', settings.INDENT)
endOfLine = self._checkEndOfLine.isChecked()
settings.USE_PLATFORM_END_OF_LINE = endOfLine
qsettings.setValue('preferences/editor/platformEndOfLine', endOfLine)
settings.UNDERLINE_NOT_BACKGROUND = \
bool(self._checkHighlightLine.currentIndex())
qsettings.setValue('preferences/editor/errorsUnderlineBackground',
settings.UNDERLINE_NOT_BACKGROUND)
settings.FIND_ERRORS = self._checkErrors.isChecked()
qsettings.setValue('preferences/editor/errors', settings.FIND_ERRORS)
settings.ERRORS_HIGHLIGHT_LINE = self._showErrorsOnLine.isChecked()
qsettings.setValue('preferences/editor/errorsInLine',
settings.ERRORS_HIGHLIGHT_LINE)
settings.CHECK_STYLE = self._checkStyle.isChecked()
qsettings.setValue('preferences/editor/checkStyle',
settings.CHECK_STYLE)
settings.SHOW_MIGRATION_TIPS = self._showMigrationTips.isChecked()
qsettings.setValue('preferences/editor/showMigrationTips',
settings.SHOW_MIGRATION_TIPS)
settings.CHECK_HIGHLIGHT_LINE = self._checkStyleOnLine.isChecked()
qsettings.setValue('preferences/editor/checkStyleInline',
settings.CHECK_HIGHLIGHT_LINE)
settings.CENTER_ON_SCROLL = self._checkCenterScroll.isChecked()
qsettings.setValue('preferences/editor/centerOnScroll',
settings.CENTER_ON_SCROLL)
settings.REMOVE_TRAILING_SPACES = self._checkTrailing.isChecked()
qsettings.setValue('preferences/editor/removeTrailingSpaces',
settings.REMOVE_TRAILING_SPACES)
settings.ALLOW_WORD_WRAP = self._allowWordWrap.isChecked()
qsettings.setValue('preferences/editor/allowWordWrap',
settings.ALLOW_WORD_WRAP)
settings.SHOW_TABS_AND_SPACES = self._checkShowSpaces.isChecked()
qsettings.setValue('preferences/editor/showTabsAndSpaces',
settings.SHOW_TABS_AND_SPACES)
settings.SHOW_INDENTATION_GUIDE = (
self._checkIndentationGuide.isChecked())
qsettings.setValue('preferences/editor/showIndentationGuide',
settings.SHOW_INDENTATION_GUIDE)
settings.CHECK_FOR_DOCSTRINGS = self._checkForDocstrings.isChecked()
qsettings.setValue('preferences/editor/checkForDocstrings',
settings.CHECK_FOR_DOCSTRINGS)
settings.SHOW_LINE_NUMBERS = self._checkDisplayLineNumbers.isChecked()
qsettings.setValue('preferences/editor/showLineNumbers',
settings.SHOW_LINE_NUMBERS)
if settings.USE_TABS:
settings.pep8mod_add_ignore("W191")
else:
settings.pep8mod_remove_ignore("W191")
settings.pep8mod_refresh_checks()
preferences.Preferences.register_configuration(
'EDITOR', EditorConfiguration,
translations.TR_PREFERENCES_EDITOR_CONFIGURATION,
weight=0, subsection='CONFIGURATION')
|
goblincoding/ninja-ide
|
ninja_ide/gui/dialogs/preferences/preferences_editor_configuration.py
|
Python
|
gpl-3.0
| 13,499
|
"""
PureFTP Blueprint
===============
**Fabric environment:**
.. code-block:: yaml
blueprints:
- blues.pureftp
settings:
pureftp:
users:
- username: joe
password: rosebud
"""
import os
from fabric.context_managers import settings
from fabric.contrib import files
from fabric.decorators import task
from refabric.context_managers import sudo, silent
from refabric.contrib import blueprints
from . import debian
from . import user
from refabric.operations import run
__all__ = ['start', 'stop', 'restart', 'setup', 'configure']
blueprint = blueprints.get(__name__)
start = debian.service_task('pure-ftpd', 'start')
stop = debian.service_task('pure-ftpd', 'stop')
restart = debian.service_task('pure-ftpd', 'restart')
ftp_root = '/srv/ftp'
ftp_user = 'ftp'
ftp_group = 'www-data'
@task
def setup():
"""
Install and configure PureFTP
"""
install()
configure()
def install():
with sudo():
debian.apt_get('install', 'pure-ftpd', 'openssl')
# Create ftp user
debian.useradd(ftp_user, '/dev/null', shell='/bin/false',
user_group=True, groups=[ftp_group], uid_min=1000)
# Create ftp root dir
debian.mkdir(ftp_root, mode=1770, owner=ftp_user, group=ftp_group)
# Set up symlinks
debian.ln('/etc/pure-ftpd/conf/PureDB', '/etc/pure-ftpd/auth/PureDB')
# Enable TLS
run('echo 1 > /etc/pure-ftpd/conf/TLS')
key_path = '/etc/ssl/private/pure-ftpd.pem'
if not files.exists(key_path):
prompts = {
'Country Name (2 letter code) [AU]:': '',
'State or Province Name (full name) [Some-State]:': '',
'Locality Name (eg, city) []:': '',
'Organization Name (eg, company) [Internet Widgits Pty Ltd]:': '',
'Organizational Unit Name (eg, section) []:': '',
'Common Name (e.g. server FQDN or YOUR name) []:': '',
'Email Address []:': ''
}
with settings(prompts=prompts):
run('openssl req -x509 -nodes -newkey rsa:2048 -keyout {0} -out {0}'.format(
key_path))
debian.chmod(key_path, 600)
@task
def configure():
"""
Configure PureFTP
"""
with sudo():
# Echo configurations
setup_config()
for user in blueprint.get('users'):
username, password = user['username'], user['password']
if 'homedir' in user:
user_home = user['homedir']
else:
user_home = os.path.join(ftp_root, username)
passwd_path = '/etc/pure-ftpd/pureftpd.passwd'
with settings(warn_only=True):
if files.exists(passwd_path) and run('pure-pw show {}'.format(
username)).return_code == 0:
continue
debian.mkdir(user_home, owner=ftp_user, group=ftp_group)
prompts = {
'Password: ': password,
'Enter it again: ': password
}
with settings(prompts=prompts):
run('pure-pw useradd {} -u {} -g {} -d {}'.format(username, ftp_user, ftp_group,
user_home))
run('pure-pw mkdb')
restart()
config_defaults = {
'ChrootEveryone': 'yes', # Cage in every user in his home directory
'BrokenClientsCompatibility': 'yes', # Turn on compatibility hacks for broken clients
'MaxClientsNumber': '50', # Maximum number of simultaneous users
'MaxClientsPerIP': '5', # Maximum number of sim clients with the same IP address
'Daemonize': 'yes', # Fork in background
'VerboseLog': 'yes', # Turn off verbose logging
'DisplayDotFiles': 'yes', # List dot-files even when the client doesn't send "-a".
'ProhibitDotFilesWrite': 'yes', # Users can't delete/write files beginning with a dot ('.')
'NoChmod': 'yes', # Disallow the CHMOD command. Users can't change perms of their files.
'AnonymousOnly': 'no', # Don't allow authenticated users - have a public anonymous FTP only.
'NoAnonymous': 'yes', # Don't allow authenticated users - have a public anonymous FTP only.
'PAMAuthentication': 'no', # Disable PAM authentication
'UnixAuthentication': 'no', # Disable /etc/passwd (UNIX) authentication
'DontResolve': 'yes', # Don't resolve host names in log files.
'MaxIdleTime': '15', # Maximum idle time in minutes (default = 15 minutes)
'LimitRecursion': '2000 8', # 'ls' recursion limits.
'AntiWarez': 'yes', # Disallow downloading of files owned by "ftp"
'AnonymousCanCreateDirs': 'no', # Are anonymous users allowed to create new directories ?
'MaxLoad': '6', # If the system is more loaded than the following value, disallow download.
'AllowUserFXP': 'no', # Disallow FXP transfers for authenticated users.
'AllowAnonymousFXP': 'no', # Disallow anonymous FXP for anonymous and non-anonymous users.
'AutoRename': 'no', # Turn off autorenaming of conflicting filenames
'AnonymousCantUpload': 'yes', # Disallow anonymous users to upload new files (no = upload is allowed)
'NoChmod': 'yes', # Disallow the CHMOD command. Users can't change perms of their files.
'MaxDiskUsage': '80', # When the partition is more that X percent full, new uploads are disallowed.
'CustomerProof': 'yes', # Workaround against common customer mistakes like chmod 0 public_html
'PureDB': '/etc/pure-ftpd/pureftpd.pdb' # User database
}
def set_pureftp_config_value(**kwargs):
for key, value in kwargs.iteritems():
if key in config_defaults:
run("echo '{}' > /etc/pure-ftpd/conf/{}".format(value, key))
def setup_config():
with silent():
config = config_defaults.copy()
config.update(blueprint.get(''))
set_pureftp_config_value(**config)
|
5monkeys/blues
|
blues/pureftp.py
|
Python
|
mit
| 6,003
|
"""
GBM_exp.py
Author: Ginny Cunningham
Date: December 11, 2017
For a given magnitude and time of a GRB, calculate the expected magnitude at a later time assuming a power law decay.
Usage: python GBM_exp.py [Initial_Magnitude] [Age of Burst]
"""
import numpy as np
import matplotlib.pyplot as plt
import sys
print " "
#####Inputs
inputs = sys.argv
m0 = float(inputs[1]) #Initial magnitude (read from email alert)
t0 = float(inputs[2]) #Time since burst (read from email alert) [s]
#delta_t = float(inputs[3]) #Expected delta_t from t0 to time of observation with SEDm [s]
delta_t = 600. #Use a placeholder value of 10 minutes for now [s]
t_obs = t0+delta_t #Expected time of observation [s]
#print m0,t0,t_obs
#####Power law index
gamma = 1.
#####Calculate expected magnitude
tend = 10*t_obs #End of observation run [s]
t = np.linspace(t0, tend, 1000) #Range of times to observe over [s]
m = 2.5*gamma*np.log10(t/t0)+m0 #final magnitude
m_exp = 2.5*gamma*np.log10((t_obs)/t0)+m0 #Expected magnitude at t_obs=t0+delta_t
print "Expected Magnitude %s s after initial UVOT observations: %.2f" %(delta_t, m_exp)
#####Examples of exposure times for various magnitudes
if 10 < m_exp <= 11:
exptime = 120
elif 11 < m_exp <= 12:
exptime = 240
elif 12 < m_exp <= 13:
exptime = 360
elif 13 < m_exp <= 14:
exptime = 500
else:
exptime = -1
print "Exposure Time not Within Expected Range."
print "Recommended Exposure time: %s seconds" %exptime
#####Plotting
plt.semilogx(t,m)
plt.gca().invert_yaxis()
p1 = plt.scatter(t0, m0, c='g', label="Initial Magnitude")
p2 = plt.scatter(t0+delta_t, m_exp, c='r', label="Expected Magnitude at Observation")
plt.legend()
plt.xlabel("Time since Trigger [s]")
plt.ylabel("Magnitude")
plt.show()
print " "
|
scizen9/kpy
|
GRB/GBM_exp.py
|
Python
|
gpl-2.0
| 1,780
|
# -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# This module copyright (C) 2010 - 2014 Savoir-faire Linux
# (<http://www.savoirfairelinux.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
import os
import sys
import shutil
import openerp
from openerp import api
from openerp.osv import orm, fields
from openerp.addons.runbot.runbot import mkdirs
_logger = logging.getLogger(__name__)
MAGIC_PID_RUN_NEXT_JOB = -2
def custom_build(func):
"""Decorator for functions which should be overwritten only if
is_custom_build is enabled in repo.
"""
def custom_func(self, cr, uid, ids, context=None):
args = [
('id', 'in', ids),
('branch_id.repo_id.is_custom_build', '=', True)
]
custom_ids = self.search(cr, uid, args, context=context)
regular_ids = list(set(ids) - set(custom_ids))
ret = None
if regular_ids:
regular_func = getattr(super(runbot_build, self), func.func_name)
ret = regular_func(cr, uid, regular_ids, context=context)
if custom_ids:
assert ret is None
ret = func(self, cr, uid, custom_ids, context=context)
return ret
return custom_func
class runbot_build(orm.Model):
_inherit = "runbot.build"
_columns = {
'prebuilt': fields.boolean("Prebuilt"),
}
def job_00_init(self, cr, uid, build, lock_path, log_path):
res = super(runbot_build, self).job_00_init(
cr, uid, build, lock_path, log_path
)
if build.branch_id.repo_id.is_custom_build:
build.pre_build(lock_path, log_path)
build.prebuilt = True
return res
def job_10_test_base(self, cr, uid, build, lock_path, log_path):
if build.branch_id.repo_id.skip_test_jobs:
_logger.info('skipping job_10_test_base')
return MAGIC_PID_RUN_NEXT_JOB
else:
return super(runbot_build, self).job_10_test_base(
cr, uid, build, lock_path, log_path
)
def job_20_test_all(self, cr, uid, build, lock_path, log_path):
if build.branch_id.repo_id.skip_test_jobs:
_logger.info('skipping job_20_test_all')
with open(log_path, 'w') as f:
f.write('consider tests as passed: '
'.modules.loading: Modules loaded.')
return MAGIC_PID_RUN_NEXT_JOB
else:
return super(runbot_build, self).job_20_test_all(
cr, uid, build, lock_path, log_path
)
def sub_cmd(self, build, cmd):
if not cmd:
return []
if isinstance(cmd, basestring):
cmd = cmd.split()
internal_vals = {
'custom_build_dir': build.repo_id.custom_build_dir or '',
'custom_server_path': build.repo_id.custom_server_path,
'other_repo_path': build.repo_id.other_repo_id.path or '',
'build_dest': build.dest,
}
return [i % internal_vals for i in cmd]
def pre_build(self, cr, uid, ids, lock_path, log_path, context=None):
"""Run pre-build command if there is one
Substitute path variables after splitting command to avoid problems
with spaces in internal variables.
Run command in build path to avoid relative path issues.
"""
pushd = os.getcwd()
try:
for build in self.browse(cr, uid, ids, context=context):
if build.prebuilt:
continue
cmd = self.sub_cmd(build, build.repo_id.custom_pre_build_cmd)
if not cmd:
continue
os.chdir(build.path())
self.spawn(cmd, lock_path, log_path)
finally:
os.chdir(pushd)
@custom_build
def checkout(self, cr, uid, ids, context=None):
"""Checkout in custom build directories if they are specified
Do same as superclass except for git_export path.
"""
for build in self.browse(cr, uid, ids, context=context):
if build.prebuilt:
continue
# starts from scratch
if os.path.isdir(build.path()):
shutil.rmtree(build.path())
# runbot log path
mkdirs([build.path("logs")])
# checkout branch
build_path = build.path()
custom_build_dir = build.repo_id.custom_build_dir
if custom_build_dir:
mkdirs([build.path(custom_build_dir)])
build_path = os.path.join(build_path, custom_build_dir)
build.repo_id.git_export(build.name, build_path)
@custom_build
def cmd(self, cr, uid, ids, context=None):
"""Get server start script from build config
Overwrite superclass completely
Specify database user in the case of custom config, to allow viewing
after db has been created by Odoo (using current user).
Disable multiworker
"""
build = self.browse(cr, uid, ids[0], context=context)
server_path = build.path(build.repo_id.custom_server_path)
mods = build.repo_id.modules or "base"
params = self.sub_cmd(build, build.repo_id.custom_server_params)
# commandline
cmd = [
sys.executable,
server_path,
"--no-xmlrpcs",
"--xmlrpc-port=%d" % build.port,
"--db_user=%s" % openerp.tools.config['db_user'],
"--workers=0",
] + params
return cmd, mods
@api.cr_uid_ids_context
def server(self, cr, uid, ids, *l, **kw):
for build in self.browse(cr, uid, ids, context=None):
if build.repo_id.is_custom_build:
custom_odoo_path = build.repo_id.custom_odoo_path
if custom_odoo_path and\
os.path.exists(build.path(custom_odoo_path)):
return build.path(custom_odoo_path, *l)
return super(runbot_build, self).server(cr, uid, ids, *l, **kw)
|
open-synergy/runbot-addons
|
runbot_build_instructions/runbot_build.py
|
Python
|
agpl-3.0
| 6,892
|
# -*- coding: utf-8 -*-
import os
import pygame
import random
import classes.board
import classes.extras as ex
import classes.game_driver as gd
import classes.level_controller as lc
class Board(gd.BoardGame):
def __init__(self, mainloop, speaker, config, screen_w, screen_h):
self.lvlc = mainloop.xml_conn.get_level_count(mainloop.m.game_dbid, mainloop.config.user_age_group)
self.level = lc.Level(self, mainloop, self.lvlc[0], self.lvlc[1])
gd.BoardGame.__init__(self, mainloop, speaker, config, screen_w, screen_h, 20, 10)
def create_game_objects(self, level=1):
self.vis_buttons = [1, 1, 1, 1, 1, 0, 1, 0, 1]
self.mainloop.info.hide_buttonsa(self.vis_buttons)
# create non-movable objects
self.board.draw_grid = False
h = random.randrange(0, 255, 5)
font_color = ex.hsv_to_rgb(h, 255, 140)
white = (255, 255, 255)
# data = [x_count, y_count, number of items on the list, top_quantity, font-size]
data = [20, 13]
data.extend(
self.mainloop.xml_conn.get_level_data(self.mainloop.m.game_dbid, self.mainloop.config.user_age_group,
self.level.lvl))
self.chapters = self.mainloop.xml_conn.get_chapters(self.mainloop.m.game_dbid,
self.mainloop.config.user_age_group)
# rescale the number of squares horizontally to better match the screen width
x_count = self.get_x_count(data[1], even=None)
if x_count > 20:
data[0] = x_count
self.data = data
self.board.set_animation_constraints(0, data[0], 0, data[1])
self.layout.update_layout(data[0], data[1])
self.board.level_start(data[0], data[1], self.layout.scale)
self.unit_mouse_over = None
self.units = []
shelf_len = 7
# basket
basket_w = data[0] - shelf_len - 1
self.board.add_door(data[0] - basket_w, data[1] - 5, basket_w, 5, classes.board.Door, "", white, "")
self.board.units[0].door_outline = True
# basket image - 260 x 220
img_bg_col = white
if self.mainloop.scheme is not None:
if self.mainloop.scheme.dark:
img_bg_col = (0, 0, 0)
img_src = "basket.png"
self.board.add_door(data[0] - 6, data[1] - 5, 6, 5, classes.board.Door, "", img_bg_col, img_src, door_alpha=True)
self.board.units[-1].is_door = False
self.board.add_unit(data[0] - 7, 0, 7, 1, classes.board.Label, self.d["Shopping List"], white, "", data[4] + 1)
self.board.units[-1].font_color = font_color
f_end = ".png"
items = ["fr_apple1", "fr_apple2", "fr_strawberry", "fr_pear", "fr_orange", "fr_onion", "fr_tomato", "fr_lemon",
"fr_cherry", "fr_pepper", "fr_carrot", "fr_banana", "fr_wmelon"]
self.items = items
self.img_captions = []
self.singular_items = ["green apple", "red apple", "strawberry", "pear", "orange [fruit]", "onion", "tomato",
"lemon", "cherry", "pepper", "carrot", "banana", "watermelon"]
#h_list = [15, 61, 5, 44, 17, 23, 9, 42, 253, 2, 17, 35, 60]
self.count_units = []
for each in self.singular_items:
caption = self.lang._n(each, 1)
if not self.lang.ltr_text:
caption = ex.reverse(self.lang._n(each, 1), self.lang.lang)
if caption is None:
caption = ""
self.img_captions.append(caption)
if self.lang.lang in ["ru", "he"]:
self.img_pcaptions = []
si = self.lang.dp["fruit"]
for each in si:
pcaption = self.lang._n(each, 1)
if pcaption is None:
pcaption = ""
self.img_pcaptions.append(pcaption)
else:
self.img_pcaptions = self.img_captions
item_indexes = [x for x in range(len(items))]
self.chosen_items = [[], []]
self.solution = {}
# pick items and quantities
for i in range(data[2]):
index = random.randrange(0, len(item_indexes))
self.chosen_items[0].append(item_indexes[index])
quantity = random.randrange(1, data[3] + 1)
self.chosen_items[1].append(quantity)
self.solution[str(item_indexes[index])] = quantity
del (item_indexes[index])
if self.lang.ltr_text:
l = [data[0] - 7, data[0] - 6, data[0] - 5]
else:
l = [data[0] - 1, data[0] - 2, data[0] - 7]
# create shopping list
for i in range(data[2]):
ind = self.chosen_items[0][i]
caption = self.lang._n(self.singular_items[ind], self.chosen_items[1][i])
if not self.lang.ltr_text:
caption = ex.reverse(caption, self.lang.lang)
if caption is None:
caption = ""
self.board.add_unit(l[0], i + 1, 1, 1, classes.board.Label, str(self.chosen_items[1][i]) + " ", white, "",
data[4])
self.board.units[-1].font_color = font_color
self.board.units[-1].checkable = True
self.board.units[-1].init_check_images(1, 1.5)
self.count_units.append(len(self.board.units))
self.board.add_unit(l[1], i + 1, 1, 1, classes.board.ImgShip, "", (0, 0, 0, 0),
os.path.join("fr", items[ind] + f_end), data[4], alpha=True)
self.board.add_unit(l[2], i + 1, 5, 1, classes.board.Label, caption, white, "", data[4])
self.board.units[-1].font_color = font_color
self.board.ships[i].immobilize()
self.board.ships[i].outline = False
if self.lang.ltr_text:
self.board.units[-1].align = 1
else:
self.board.units[-1].align = 2
# rearange z-order of red outlines (shopping list and basket)
for i in range(2):
self.board.all_sprites_list.move_to_front(self.board.units[i])
if self.mainloop.scheme is None:
dc_img_src = os.path.join('unit_bg', "dc_hover_wb.png")
else:
if self.mainloop.scheme.dark:
dc_img_src = os.path.join('unit_bg', "dc_hover_bw.png")
else:
dc_img_src = os.path.join('unit_bg', "dc_hover_wb.png")
dc_tint_color = ex.hsv_to_rgb(253, self.mainloop.cl.bg_color_s, self.mainloop.cl.bg_color_v)
# put stuff on shelves:
for i in range(len(items)):
image = os.path.join("fr", items[i] + f_end)
for j in range(0, shelf_len):
self.board.add_universal_unit(grid_x=j, grid_y=i, grid_w=1, grid_h=1, txt=self.img_captions[i],
fg_img_src=None, bg_img_src=image, dc_img_src=dc_img_src,
bg_color=(0, 0, 0, 0), border_color=None, font_color=None,
bg_tint_color=None, fg_tint_color=None, dc_tint_color=dc_tint_color,
txt_align=(0, 0), font_type=0, multi_color=False, alpha=True,
immobilized=False, dc_as_hover=True, mode=0)
self.board.ships[-1].audible = False
self.board.ships[-1].speaker_val = self.img_pcaptions[i]
self.board.ships[-1].speaker_val_update = False
self.board.ships[-1].item_code = items[i]
self.units.append(self.board.ships[-1])
self.board.all_sprites_list.move_to_front(self.board.units[0])
def show_info_dialog(self):
self.mainloop.dialog.show_dialog(3, self.d["Check the shopping list"])
def handle(self, event):
gd.BoardGame.handle(self, event)
if event.type == pygame.MOUSEBUTTONUP:
for each in self.board.units:
if each.is_door is True:
self.board.all_sprites_list.move_to_front(each)
elif event.type == pygame.MOUSEBUTTONDOWN:
self.auto_check_reset()
if event.type == pygame.MOUSEMOTION or event.type == pygame.MOUSEBUTTONUP:
self.default_hover(event)
def auto_check_reset(self):
for i in range(self.data[2]):
self.board.units[self.count_units[i]-1].set_display_check(None)
def update(self, game):
game.fill((255, 255, 255))
gd.BoardGame.update(self, game)
def check_result(self):
# checking what sprites collide with the basket sprite
purchased = pygame.sprite.spritecollide(self.board.units[0], self.board.ship_list, False, collided=None)
result = {}
# count each item and check if they are the items from the shopping list
for i in range(len(self.items)):
count = 0
for each in purchased:
if each.item_code == self.items[i]:
count += 1
if count > 0:
result[str(i)] = count
for i in range(self.data[2]):
if str(self.chosen_items[0][i]) in result and str(self.chosen_items[0][i]) in self.solution:
if result[str(self.chosen_items[0][i])] == self.solution[str(self.chosen_items[0][i])]:
self.board.units[self.count_units[i]-1].set_display_check(True)
else:
self.board.units[self.count_units[i]-1].set_display_check(False)
else:
self.board.units[self.count_units[i]-1].set_display_check(False)
if result == self.solution:
self.level.next_board()
self.mainloop.redraw_needed[0] = True
|
imiolek-ireneusz/eduActiv8
|
game_boards/game027.py
|
Python
|
gpl-3.0
| 9,800
|
from suds.client import Client
from nova import exception
from nova import db
import logging
logging.getLogger('suds').setLevel(logging.INFO)
def update_for_run_instance(service_url, region_name, server_port1, server_port2, dpid1, dpid2):
# check region name
client = Client(service_url + "?wsdl")
client.service.setServerPort(dpid1, server_port1, region_name)
client.service.setServerPort(dpid2, server_port2, region_name)
client.service.save()
def update_for_terminate_instance(service_url, region_name, server_port1, server_port2, dpid1, dpid2, vlan_id):
client = Client(service_url + "?wsdl")
client.service.clearServerPort(dpid1, server_port1)
client.service.clearServerPort(dpid2, server_port2)
client.service.save()
dpid_datas = client.service.showSwitchDatapathId()
for dpid_data in dpid_datas:
ports = client.service.showPorts(dpid_data.dpid)
for port in ports:
if port.type != "ServerPort":
continue
if port.regionName == region_name:
return
remove_region(service_url, region_name, vlan_id)
def create_region(service_url, region_name, vlan_id):
client = Client(service_url + "?wsdl")
try:
client.service.createRegion(region_name)
client.service.save()
except:
raise exception.OFCRegionCreationFailed(region_name=region_name)
try:
switches = db.switch_get_all(None)
for switch in switches:
client.service.setOuterPortAssociationSetting(switch["dpid"], switch["outer_port"], vlan_id, 65535, region_name)
client.service.save()
except:
client.service.destroyRegion(region_name)
client.service.save()
raise exception.OFCRegionSettingOuterPortAssocFailed(region_name=region_name, vlan_id=vlan_id)
def remove_region(service_url, region_name, vlan_id):
client = Client(service_url + "?wsdl")
try:
switches = db.switch_get_all(None)
for switch in switches:
client.service.clearOuterPortAssociationSetting(switch["dpid"], switch["outer_port"], vlan_id)
client.service.save()
except:
pass
client.service.destroyRegion(region_name)
client.service.save()
def has_region(service_url, region_name):
client = Client(service_url + "?wsdl")
return region_name in [x.regionName for x in client.service.showRegion()]
|
nii-cloud/dodai-compute
|
nova/virt/dodai/ofc_utils.py
|
Python
|
apache-2.0
| 2,420
|
#!/usr/bin/env python
# coding:utf-8 vi:et:ts=2
# Python Rewriter predefined loader code.
# Copyright 2013 Grigory Petrov
# See LICENSE for details.
import imp
import os
class Context( object ):
_inst_o = None
def __init__( self ):
self.predefined = {}
@classmethod
def get( self ):
if not self._inst_o:
self._inst_o = Context()
return self._inst_o
def predefined( s_name ):
if s_name not in Context.get().predefined:
sModule = 'predefined_{0}'.format( s_name )
sFile = '{0}.py'.format( sModule )
sDir = os.path.dirname( os.path.abspath( __file__ ) )
sPath = os.path.join( sDir, sFile )
Context.get().predefined[ s_name ] = imp.load_source( sModule, sPath )
return Context.get().predefined[ s_name ].GRAMMAR
|
eyeofhell/pyrewriter
|
pyrewriter/predefined.py
|
Python
|
gpl-3.0
| 808
|
class Settings():
def __init__(self):
# screen parameters
self.screen_width, self.screen_height = 800, 600
self.bg_color = 200, 200, 200
self.scoreboard_height = 50
self.button_width, self.button_height = 250, 50
self.button_bg = (0,163,0)
self.button_text_color = (235,235,235)
self.button_font, self.button_font_size = 'Arial', 24
# game status
self.game_active = False
# game over conditions
self.min_popped_ratio = 0.9
self.games_played = 0
self.initialize_game_parameters()
def initialize_game_parameters(self):
# game play parameters
self.balloon_speed = 0.1
# How quickly the speed of balloons rises
# ~1.05 during testing and ~1.01 for actual play
self.speed_increase_factor = 1.05
self.points_per_balloon = 10
# Number of balloons to release in a spawning:
self.batch_size = 1
# Number of batches that must be completed to increment batch_size
self.batches_needed = 3
# Ratio of kittens released per balloon released:
self.kitten_ratio = 0.10
# Relative value of kittens, in terms of balloons:
self.kitten_score_factor = 3
|
ehmatthes/balloon_ninja
|
Settings.py
|
Python
|
mit
| 1,274
|
from datetime import datetime, time, timedelta
from django.utils import timezone
import sal.plugin
from server.models import ManagedItemHistory
STATUSES = ('present', 'pending', 'error')
class MunkiInstalls(sal.plugin.Widget):
description = 'Chart of Munki install activity'
widget_width = 8
supported_os_families = [sal.plugin.OSFamilies.darwin]
def get_context(self, queryset, **kwargs):
context = self.super_get_context(queryset, **kwargs)
# Set up 14 days back of time ranges as a generator.
now = timezone.now()
days = (now - timedelta(days=d) for d in range(0, 15))
time_ranges = ((
timezone.make_aware(datetime.combine(d, time.min)),
timezone.make_aware(datetime.combine(d, time.max))) for d in days)
# For each day, get a count of installs, pending, and errors,
# and the date, as a list of dicts.
context['data'] = []
for time_range in time_ranges:
day_status = {key: self._filter(queryset, key, time_range) for key in STATUSES}
day_status['date'] = time_range[0].strftime("%Y-%m-%d")
context['data'].append(day_status)
return context
def _filter(self, queryset, data, time_range):
return (
ManagedItemHistory.objects
.filter(
status__iexact=data,
recorded__range=time_range,
machine__in=queryset,
management_source__name='Munki')
.count())
|
salopensource/sal
|
server/plugins/munkiinstalls/munkiinstalls.py
|
Python
|
apache-2.0
| 1,529
|
import logging
from concurrent.futures import ThreadPoolExecutor
import stomp
from stomp.listener import TestListener
from .testutils import *
executor = ThreadPoolExecutor()
def create_thread(fc):
f = executor.submit(fc)
print("Created future %s on executor %s" % (f, executor))
return f
class ReconnectListener(TestListener):
def __init__(self, conn):
TestListener.__init__(self, "123", True)
self.conn = conn
def on_receiver_loop_ended(self, *args):
if self.conn:
c = self.conn
self.conn = None
c.connect(get_default_user(), get_default_password(), wait=True)
c.disconnect()
@pytest.fixture
def conn():
conn = stomp.Connection(get_default_host())
# check thread override here
conn.transport.override_threading(create_thread)
listener = ReconnectListener(conn)
conn.set_listener("testlistener", listener)
conn.connect(get_default_user(), get_default_password(), wait=True)
yield conn
class TestThreadingOverride(object):
def test_threading(self, conn):
listener = conn.get_listener("testlistener")
queuename = "/queue/test1-%s" % listener.timestamp
conn.subscribe(destination=queuename, id=1, ack="auto")
conn.send(body="this is a test", destination=queuename, receipt="123")
validate_send(conn, 1, 1, 0)
logging.info("first disconnect")
conn.disconnect(receipt="112233")
logging.info("reconnecting")
conn.connect(get_default_user(), get_default_password(), wait=True)
logging.info("second disconnect")
conn.disconnect()
|
jasonrbriggs/stomp.py
|
tests/test_override_threading.py
|
Python
|
apache-2.0
| 1,654
|
from __future__ import division, print_function, absolute_import
from functools import reduce
import numpy as np
import numpy.testing as npt
from dipy.reconst.multi_voxel import _squash, multi_voxel_model, CallableArray
from dipy.core.sphere import unit_icosahedron
def test_squash():
A = np.ones((3, 3), dtype=float)
B = np.asarray(A, object)
npt.assert_array_equal(A, _squash(B))
npt.assert_equal(_squash(B).dtype, A.dtype)
B[2, 2] = None
A[2, 2] = 0
npt.assert_array_equal(A, _squash(B))
npt.assert_equal(_squash(B).dtype, A.dtype)
for ijk in np.ndindex(*B.shape):
B[ijk] = np.ones((2,))
A = np.ones((3, 3, 2))
npt.assert_array_equal(A, _squash(B))
npt.assert_equal(_squash(B).dtype, A.dtype)
B[2, 2] = None
A[2, 2] = 0
npt.assert_array_equal(A, _squash(B))
npt.assert_equal(_squash(B).dtype, A.dtype)
# sub-arrays have different shapes ( (3,) and (2,) )
B[0, 0] = np.ones((3,))
npt.assert_(_squash(B) is B)
# Check dtypes for arrays and scalars
arr_arr = np.zeros((2,), dtype=object)
scalar_arr = np.zeros((2,), dtype=object)
numeric_types = sum(
[np.sctypes[t] for t in ('int', 'uint', 'float', 'complex')],
[np.bool_])
for dt0 in numeric_types:
arr_arr[0] = np.zeros((3,), dtype=dt0)
scalar_arr[0] = dt0(0)
for dt1 in numeric_types:
arr_arr[1] = np.zeros((3,), dtype=dt1)
npt.assert_equal(_squash(arr_arr).dtype,
reduce(np.add, arr_arr).dtype)
scalar_arr[1] = dt0(1)
npt.assert_equal(_squash(scalar_arr).dtype,
reduce(np.add, scalar_arr).dtype)
# Check masks and Nones
arr = np.ones((3, 4), dtype=float)
obj_arr = arr.astype(object)
arr[1, 1] = 99
obj_arr[1, 1] = None
npt.assert_array_equal(_squash(obj_arr, mask=None, fill=99), arr)
msk = arr == 1
npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), arr)
msk[1, 1] = 1 # unmask None - object array back
npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), obj_arr)
msk[1, 1] = 0 # remask, back to fill again
npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), arr)
obj_arr[2, 3] = None # add another unmasked None, object again
npt.assert_array_equal(_squash(obj_arr, mask=msk, fill=99), obj_arr)
# Check array of arrays
obj_arrs = np.zeros((3,), dtype=object)
for i in range(3):
obj_arrs[i] = np.ones((4, 5))
arr_arrs = np.ones((3, 4, 5))
# No Nones
npt.assert_array_equal(_squash(obj_arrs, mask=None, fill=99), arr_arrs)
# None, implicit masking
obj_masked = obj_arrs.copy()
obj_masked[1] = None
arr_masked = arr_arrs.copy()
arr_masked[1] = 99
npt.assert_array_equal(_squash(obj_masked, mask=None, fill=99),
arr_masked)
msk = np.array([1, 0, 1], dtype=np.bool_) # explicit mask
npt.assert_array_equal(_squash(obj_masked, mask=msk, fill=99),
arr_masked)
msk[1] = True # unmask None, object array back
npt.assert_array_equal(_squash(obj_masked, mask=msk, fill=99),
obj_masked)
def test_CallableArray():
callarray = CallableArray((2, 3), dtype=object)
# Test without Nones
callarray[:] = np.arange
expected = np.empty([2, 3, 4])
expected[:] = range(4)
npt.assert_array_equal(callarray(4), expected)
# Test with Nones
callarray[0, 0] = None
expected[0, 0] = 0
npt.assert_array_equal(callarray(4), expected)
def test_multi_voxel_model():
class SillyModel(object):
def fit(self, data, mask=None):
return SillyFit(model)
class SillyFit(object):
def __init__(self, model):
self.model = model
model_attr = 2.
def odf(self, sphere):
return np.ones(len(sphere.phi))
@property
def directions(self):
n = np.random.randint(0, 10)
return np.zeros((n, 3))
# Wrap the SillyModel
MultiVoxelSillyModel = multi_voxel_model(SillyModel)
# Test the single voxel case
model = MultiVoxelSillyModel()
single_voxel = np.zeros(64)
fit = model.fit(single_voxel)
npt.assert_equal(type(fit), SillyFit)
# Test without a mask
many_voxels = np.zeros((2, 3, 4, 64))
fit = model.fit(many_voxels)
expected = np.empty((2, 3, 4))
expected[:] = 2.
npt.assert_array_equal(fit.model_attr, expected)
expected = np.ones((2, 3, 4, 12))
npt.assert_array_equal(fit.odf(unit_icosahedron), expected)
npt.assert_equal(fit.directions.shape, (2, 3, 4))
# Test with a mask
mask = np.eye(3).astype('bool')
data = np.zeros((3, 3, 64))
fit = model.fit(data, mask)
npt.assert_array_equal(fit.model_attr, np.eye(3)*2)
odf = fit.odf(unit_icosahedron)
npt.assert_equal(odf.shape, (3, 3, 12))
npt.assert_array_equal(odf[~mask], 0)
npt.assert_array_equal(odf[mask], 1)
# Test fit.shape
npt.assert_equal(fit.shape, (3, 3))
# Test indexing into a fit
npt.assert_equal(type(fit[0, 0]), SillyFit)
npt.assert_equal(fit[:2, :2].shape, (2, 2))
|
maurozucchelli/dipy
|
dipy/reconst/tests/test_multi_voxel.py
|
Python
|
bsd-3-clause
| 5,227
|
from itertools import chain
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.test import TestCase
import guardian
from guardian.backends import ObjectPermissionBackend
from guardian.exceptions import GuardianError
from guardian.exceptions import NotUserNorGroup
from guardian.exceptions import ObjectNotPersisted
from guardian.exceptions import WrongAppError
from guardian.models import GroupObjectPermission
from guardian.models import UserObjectPermission
from guardian.models import AnonymousUser
from guardian.models import Group
from guardian.models import Permission
from guardian.models import User
class UserPermissionTests(TestCase):
fixtures = ['tests.json']
def setUp(self):
self.user = User.objects.get(username='jack')
self.ctype = ContentType.objects.create(name='foo', model='bar',
app_label='fake-for-guardian-tests')
self.obj1 = ContentType.objects.create(name='ct1', model='foo',
app_label='guardian-tests')
self.obj2 = ContentType.objects.create(name='ct2', model='bar',
app_label='guardian-tests')
def test_assignement(self):
self.assertFalse(self.user.has_perm('change_contenttype', self.ctype))
UserObjectPermission.objects.assign('change_contenttype', self.user,
self.ctype)
self.assertTrue(self.user.has_perm('change_contenttype', self.ctype))
self.assertTrue(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
def test_assignement_and_remove(self):
UserObjectPermission.objects.assign('change_contenttype', self.user,
self.ctype)
self.assertTrue(self.user.has_perm('change_contenttype', self.ctype))
UserObjectPermission.objects.remove_perm('change_contenttype',
self.user, self.ctype)
self.assertFalse(self.user.has_perm('change_contenttype', self.ctype))
def test_ctypes(self):
UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj1)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj1))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj2))
UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj1)
UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj2)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj2))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj1))
UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj1)
UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj2)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj2))
self.assertTrue(self.user.has_perm('change_contenttype', self.obj1))
UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj1)
UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj2)
self.assertFalse(self.user.has_perm('change_contenttype', self.obj2))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj1))
def test_get_for_object(self):
perms = UserObjectPermission.objects.get_for_object(self.user, self.ctype)
self.assertEqual(perms.count(), 0)
to_assign = sorted([
'delete_contenttype',
'change_contenttype',
])
for perm in to_assign:
UserObjectPermission.objects.assign(perm, self.user, self.ctype)
perms = UserObjectPermission.objects.get_for_object(self.user, self.ctype)
codenames = sorted(chain(*perms.values_list('permission__codename')))
self.assertEqual(to_assign, codenames)
def test_assign_validation(self):
self.assertRaises(Permission.DoesNotExist,
UserObjectPermission.objects.assign, 'change_group', self.user,
self.user)
group = Group.objects.create(name='test_group_assign_validation')
ctype = ContentType.objects.get_for_model(group)
perm = Permission.objects.get(codename='change_user')
create_info = dict(
permission = perm,
user = self.user,
content_type = ctype,
object_pk = group.pk
)
self.assertRaises(ValidationError, UserObjectPermission.objects.create,
**create_info)
def test_unicode(self):
obj_perm = UserObjectPermission.objects.assign("change_user",
self.user, self.user)
self.assertTrue(isinstance(obj_perm.__unicode__(), unicode))
def test_errors(self):
not_saved_user = User(username='not_saved_user')
self.assertRaises(ObjectNotPersisted,
UserObjectPermission.objects.assign,
"change_user", self.user, not_saved_user)
self.assertRaises(ObjectNotPersisted,
UserObjectPermission.objects.remove_perm,
"change_user", self.user, not_saved_user)
self.assertRaises(ObjectNotPersisted,
UserObjectPermission.objects.get_for_object,
"change_user", not_saved_user)
class GroupPermissionTests(TestCase):
fixtures = ['tests.json']
def setUp(self):
self.user = User.objects.get(username='jack')
self.group, created = Group.objects.get_or_create(name='jackGroup')
self.user.groups.add(self.group)
self.ctype = ContentType.objects.create(name='foo', model='bar',
app_label='fake-for-guardian-tests')
self.obj1 = ContentType.objects.create(name='ct1', model='foo',
app_label='guardian-tests')
self.obj2 = ContentType.objects.create(name='ct2', model='bar',
app_label='guardian-tests')
def test_assignement(self):
self.assertFalse(self.user.has_perm('change_contenttype', self.ctype))
self.assertFalse(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.ctype)
self.assertTrue(self.user.has_perm('change_contenttype', self.ctype))
self.assertTrue(self.user.has_perm('contenttypes.change_contenttype',
self.ctype))
def test_assignement_and_remove(self):
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.ctype)
self.assertTrue(self.user.has_perm('change_contenttype', self.ctype))
GroupObjectPermission.objects.remove_perm('change_contenttype',
self.group, self.ctype)
self.assertFalse(self.user.has_perm('change_contenttype', self.ctype))
def test_ctypes(self):
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.obj1)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj1))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj2))
GroupObjectPermission.objects.remove_perm('change_contenttype',
self.group, self.obj1)
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.obj2)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj2))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj1))
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.obj1)
GroupObjectPermission.objects.assign('change_contenttype', self.group,
self.obj2)
self.assertTrue(self.user.has_perm('change_contenttype', self.obj2))
self.assertTrue(self.user.has_perm('change_contenttype', self.obj1))
GroupObjectPermission.objects.remove_perm('change_contenttype',
self.group, self.obj1)
GroupObjectPermission.objects.remove_perm('change_contenttype',
self.group, self.obj2)
self.assertFalse(self.user.has_perm('change_contenttype', self.obj2))
self.assertFalse(self.user.has_perm('change_contenttype', self.obj1))
def test_get_for_object(self):
group = Group.objects.create(name='get_group_perms_for_object')
self.user.groups.add(group)
perms = GroupObjectPermission.objects.get_for_object(group, self.ctype)
self.assertEqual(perms.count(), 0)
to_assign = sorted([
'delete_contenttype',
'change_contenttype',
])
for perm in to_assign:
GroupObjectPermission.objects.assign(perm, group, self.ctype)
perms = GroupObjectPermission.objects.get_for_object(group, self.ctype)
codenames = sorted(chain(*perms.values_list('permission__codename')))
self.assertEqual(to_assign, codenames)
def test_assign_validation(self):
self.assertRaises(Permission.DoesNotExist,
GroupObjectPermission.objects.assign, 'change_user', self.group,
self.group)
user = User.objects.create(username='test_user_assign_validation')
ctype = ContentType.objects.get_for_model(user)
perm = Permission.objects.get(codename='change_group')
create_info = dict(
permission = perm,
group = self.group,
content_type = ctype,
object_pk = user.pk
)
self.assertRaises(ValidationError, GroupObjectPermission.objects.create,
**create_info)
def test_unicode(self):
obj_perm = GroupObjectPermission.objects.assign("change_group",
self.group, self.group)
self.assertTrue(isinstance(obj_perm.__unicode__(), unicode))
def test_errors(self):
not_saved_group = Group(name='not_saved_group')
self.assertRaises(ObjectNotPersisted,
GroupObjectPermission.objects.assign,
"change_group", self.group, not_saved_group)
self.assertRaises(ObjectNotPersisted,
GroupObjectPermission.objects.remove_perm,
"change_group", self.group, not_saved_group)
self.assertRaises(ObjectNotPersisted,
GroupObjectPermission.objects.get_for_object,
"change_group", not_saved_group)
class ObjectPermissionBackendTests(TestCase):
def setUp(self):
self.user = User.objects.create(username='jack')
self.backend = ObjectPermissionBackend()
def test_attrs(self):
self.assertTrue(self.backend.supports_anonymous_user)
self.assertTrue(self.backend.supports_object_permissions)
self.assertTrue(self.backend.supports_inactive_user)
def test_authenticate(self):
self.assertEqual(self.backend.authenticate(
self.user.username, self.user.password), None)
def test_has_perm_noobj(self):
result = self.backend.has_perm(self.user, "change_contenttype")
self.assertFalse(result)
def test_has_perm_notauthed(self):
user = AnonymousUser()
self.assertFalse(self.backend.has_perm(user, "change_user", self.user))
def test_has_perm_wrong_app(self):
self.assertRaises(WrongAppError, self.backend.has_perm,
self.user, "no_app.change_user", self.user)
def test_obj_is_not_model(self):
for obj in (Group, 666, "String", [2, 1, 5, 7], {}):
self.assertFalse(self.backend.has_perm(self.user,
"any perm", obj))
def test_not_active_user(self):
user = User.objects.create(username='non active user')
ctype = ContentType.objects.create(name='foo', model='bar',
app_label='fake-for-guardian-tests')
perm = 'change_contenttype'
UserObjectPermission.objects.assign(perm, user, ctype)
self.assertTrue(self.backend.has_perm(user, perm, ctype))
user.is_active = False
user.save()
self.assertFalse(self.backend.has_perm(user, perm, ctype))
class GuardianBaseTests(TestCase):
def has_attrs(self):
self.assertTrue(hasattr(guardian, '__version__'))
def test_version(self):
for x in guardian.VERSION:
self.assertTrue(isinstance(x, (int, str)))
def test_get_version(self):
self.assertTrue(isinstance(guardian.get_version(), str))
class TestExceptions(TestCase):
def _test_error_class(self, exc_cls):
self.assertTrue(isinstance(exc_cls, GuardianError))
def test_error_classes(self):
self.assertTrue(isinstance(GuardianError(), Exception))
guardian_errors = [NotUserNorGroup]
for err in guardian_errors:
self._test_error_class(err())
|
sumit4iit/django-guardian
|
guardian/tests/other_test.py
|
Python
|
bsd-2-clause
| 12,625
|
import pyrge, random
__doc__="""A simple particle emitter
An L{Emitter} can be used to create particle emission effects like explosions,
eruptions, fountains, and so on. It works by creating a number of particles of
a particular class, with random (but constrained) velocities. Each particle has
its own lifetime, and is killed when that time expires.
"""
__all__ = ['Emitter', 'CircleParticle', 'BoxParticle']
class Emitter(pyrge.entity.Image):
"""A particle emitter.
Emitters create explosion-like effects by releasing multiple small
sprites (particles) at varying speeds and in varying directions. The
speed, direction, and duration, and their ranges can all be controlled by
using the Emitter's properties.
@param particleType: A class (note: not an instance of that class) to be
used as the base type of particle. Multiple instances of this class will
be created and emitted when L{emit} is called.
"""
def __init__(self, particleType, x=0.0, y=0.0, **kwargs):
super(Emitter, self).__init__(x,y,0,0)
self.particleType = particleType
self.__particlekwargs = kwargs
self._duration = 1.0 # seconds
self._durationrange = 1.0 # seconds
self._velocity = 100.0 # pixels/sec
self._velocityrange = 100.0 # pixels/sec
self._emitAngle = 0.0 # degrees
self._spread = 0.0 # degrees
def emit(self, number):
"""Emit a given number of particles, using the speed, direction, etc.
given by this object's properties.
@param number: The number of particles to emit. The Emitter will
release exactly this many particles. If this value is too high,
slowdown will occur, but "too high" varies on different systems.
"""
for i in xrange(number):
self._doEmit()
def _doEmit(self, x=0.0, y=0.0):
p = self.particleType(position=(x,y), **self.__particlekwargs)
p.duration = random.gauss(self.duration, self.durationRange/4.)
a = pyrge.util.vectorFromAngle(random.gauss(self.emitAngle, self.spread/4.))
p.velocity = random.gauss(self.velocity,self.velocity/4.) * a
self.addChild(p)
def update(self):
"""Update the Emitter for each frame, particularly to control the
lifetimes of the child particles."""
super(Emitter, self).update()
for c in self.children:
c.duration -= pyrge.Game.elapsed / 1000.0
if c.duration <= 0.0:
self.removeChild(c)
# Emitter properties (these are used to control the particles themselves)
def __get_duration(self):
return self._duration
def __set_duration(self, seconds):
self._duration = seconds
duration = property(__get_duration, __set_duration, \
doc = "The average lifetime of a particle, in seconds.")
def __get_durationRange(self):
return self._durationrange
def __set_durationRange(self, seconds):
self._durationrange = seconds
durationRange = property(__get_durationRange, __set_durationRange, \
doc = """The amount that a particle's lifetime can differ
from the average, in seconds.""")
# Note that Emitter velocity is not the same as Entity velocity!
def __get_velocity(self):
return self._velocity
def __set_velocity(self, pixps):
self._velocity = pixps
velocity = property(__get_velocity, __set_velocity, \
doc = "The average speed of a particle (in pixels/sec)")
def __get_velocityRange(self):
return self._velocityrange
def __set_velocityRange(self, pixps):
self._velocityrange = pixps
velocityRange = property(__get_velocityRange, __set_velocityRange, \
doc = """The amount that a particle's speed can differ
from the average (in pixels/sec).""")
# This can't be called "angle" because Images already have that property
def __get_emitAngle(self):
return self._emitAngle
def __set_emitAngle(self, degrees):
self._emitAngle = degrees
emitAngle = property(__get_emitAngle, __set_emitAngle, \
doc="The angle (in degrees) that the emission will be aimed")
def __get_spread(self):
return self._spread
def __set_spread(self, degrees):
self._spread = degrees
spread = property(__get_spread, __set_spread, \
doc="The amount of variation in the emitter's angle (in degrees)")
# Quick and dirty example particles
class CircleParticle(pyrge.entity.Entity):
"""A simple circular particle.
@keyword radius: The radius of the particle.
@keyword color: The color of the particle.
"""
def __init__(self, *args, **kwargs):
r = kwargs.get('radius', 1)
if not kwargs.get('size'):
kwargs['size'] = (r*2,r*2)
super(CircleParticle, self).__init__(*args, **kwargs)
circle = pyrge.Game.Surface(kwargs['size'], pyrge.Constants.SRCALPHA)
pyrge.Game.Draw.circle(circle, kwargs.get('color', pyrge.Game.randomcolor()), \
(r,r), r)
self.loadSurface(circle)
class BoxParticle(pyrge.entity.Entity):
"""A simple rectangular particle.
@keyword color: The color of the particle.
"""
def __init__(self, *args, **kwargs):
super (BoxParticle, self).__init__(*args, **kwargs)
self.pixels.fill(kwargs.get('color', pyrge.Game.randomcolor()))
self.redraw()
|
momikey/pyrge
|
emitter.py
|
Python
|
lgpl-2.1
| 5,678
|
from paddle.trainer.PyDataProvider2 import *
# Define a py data provider
@provider(input_types={
'pixel': dense_vector(28 * 28),
'label': integer_value(10)
})
def process(settings, filename): # settings is not used currently.
f = open(filename, 'r') # open one of training file
for line in f: # read each line
label, pixel = line.split(';')
# get features and label
pixels_str = pixel.split(' ')
pixels_float = []
for each_pixel_str in pixels_str:
pixels_float.append(float(each_pixel_str))
# give data to paddle.
yield {"pixel": pixels_float, 'label': int(label)}
f.close() # close file
|
zuowang/Paddle
|
doc_cn/ui/data_provider/mnist_provider.dict.py
|
Python
|
apache-2.0
| 687
|
import numpy as np
from bokeh.document import Document
from bokeh.models import ColumnDataSource, DataRange1d, Plot, LinearAxis, Grid
from bokeh.models.glyphs import MultiLine
from bokeh.plotting import show
N = 9
x = np.linspace(-2, 2, N)
y = x**2
xpts = np.array([-.09, -.12, .0, .12, .09])
ypts = np.array([-.1, .02, .1, .02, -.1])
source = ColumnDataSource(dict(
xs=[xpts*(1+i/10.0)+xx for i, xx in enumerate(x)],
ys=[ypts*(1+i/10.0)+yy for i, yy in enumerate(y)],
)
)
xdr = DataRange1d(sources=[source.columns("xs")])
ydr = DataRange1d(sources=[source.columns("ys")])
plot = Plot(
title=None, x_range=xdr, y_range=ydr, plot_width=300, plot_height=300,
h_symmetry=False, v_symmetry=False, min_border=0, toolbar_location=None)
glyph = MultiLine(xs="xs", ys="ys", line_color="#8073ac", line_width=2)
plot.add_glyph(source, glyph)
xaxis = LinearAxis()
plot.add_layout(xaxis, 'below')
yaxis = LinearAxis()
plot.add_layout(yaxis, 'left')
plot.add_layout(Grid(dimension=0, ticker=xaxis.ticker))
plot.add_layout(Grid(dimension=1, ticker=yaxis.ticker))
doc = Document()
doc.add(plot)
show(plot)
|
almarklein/bokeh
|
tests/glyphs/MultiLine.py
|
Python
|
bsd-3-clause
| 1,133
|
# Copyright 2017 ProjectQ-Framework (www.projectq.ch)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests _jordan_wigner.py."""
from __future__ import absolute_import
import numpy
import unittest
from fermilib.ops import (FermionOperator,
hermitian_conjugated,
InteractionOperator,
normal_ordered,
number_operator)
from fermilib.transforms import (get_interaction_operator,
reverse_jordan_wigner)
from fermilib.transforms._jordan_wigner import (jordan_wigner,
jordan_wigner_one_body,
jordan_wigner_two_body,
jordan_wigner_interaction_op)
from projectq.ops import QubitOperator
class JordanWignerTransformTest(unittest.TestCase):
def setUp(self):
self.n_qubits = 5
def test_bad_input(self):
with self.assertRaises(TypeError):
jordan_wigner(3)
def test_transm_raise3(self):
raising = jordan_wigner(FermionOperator(((3, 1),)))
self.assertEqual(len(raising.terms), 2)
correct_operators_x = ((0, 'Z'), (1, 'Z'), (2, 'Z'), (3, 'X'))
correct_operators_y = ((0, 'Z'), (1, 'Z'), (2, 'Z'), (3, 'Y'))
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, -0.5j)
self.assertEqual(raising.terms[correct_operators_x], 0.5)
self.assertEqual(raising.terms[correct_operators_y], -0.5j)
self.assertTrue(raising.isclose(qtermx + qtermy))
def test_transm_raise1(self):
raising = jordan_wigner(FermionOperator(((1, 1),)))
correct_operators_x = ((0, 'Z'), (1, 'X'))
correct_operators_y = ((0, 'Z'), (1, 'Y'))
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, -0.5j)
self.assertEqual(raising.terms[correct_operators_x], 0.5)
self.assertEqual(raising.terms[correct_operators_y], -0.5j)
self.assertTrue(raising.isclose(qtermx + qtermy))
def test_transm_lower3(self):
lowering = jordan_wigner(FermionOperator(((3, 0),)))
correct_operators_x = ((0, 'Z'), (1, 'Z'), (2, 'Z'), (3, 'X'))
correct_operators_y = ((0, 'Z'), (1, 'Z'), (2, 'Z'), (3, 'Y'))
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, 0.5j)
self.assertEqual(lowering.terms[correct_operators_x], 0.5)
self.assertEqual(lowering.terms[correct_operators_y], 0.5j)
self.assertTrue(lowering.isclose(qtermx + qtermy))
def test_transm_lower2(self):
lowering = jordan_wigner(FermionOperator(((2, 0),)))
correct_operators_x = ((0, 'Z'), (1, 'Z'), (2, 'X'))
correct_operators_y = ((0, 'Z'), (1, 'Z'), (2, 'Y'))
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, 0.5j)
self.assertEqual(lowering.terms[correct_operators_x], 0.5)
self.assertEqual(lowering.terms[correct_operators_y], 0.5j)
self.assertTrue(lowering.isclose(qtermx + qtermy))
def test_transm_lower1(self):
lowering = jordan_wigner(FermionOperator(((1, 0),)))
correct_operators_x = ((0, 'Z'), (1, 'X'))
correct_operators_y = ((0, 'Z'), (1, 'Y'))
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, 0.5j)
self.assertEqual(lowering.terms[correct_operators_x], 0.5)
self.assertEqual(lowering.terms[correct_operators_y], 0.5j)
self.assertTrue(lowering.isclose(qtermx + qtermy))
def test_transm_lower0(self):
lowering = jordan_wigner(FermionOperator(((0, 0),)))
correct_operators_x = ((0, 'X'),)
correct_operators_y = ((0, 'Y'),)
qtermx = QubitOperator(correct_operators_x, 0.5)
qtermy = QubitOperator(correct_operators_y, 0.5j)
self.assertEqual(lowering.terms[correct_operators_x], 0.5)
self.assertEqual(lowering.terms[correct_operators_y], 0.5j)
self.assertTrue(lowering.isclose(qtermx + qtermy))
def test_transm_raise3lower0(self):
# recall that creation gets -1j on Y and annihilation gets +1j on Y.
term = jordan_wigner(FermionOperator(((3, 1), (0, 0))))
self.assertEqual(term.terms[((0, 'X'), (1, 'Z'), (2, 'Z'), (3, 'Y'))],
0.25 * 1 * -1j)
self.assertEqual(term.terms[((0, 'Y'), (1, 'Z'), (2, 'Z'), (3, 'Y'))],
0.25 * 1j * -1j)
self.assertEqual(term.terms[((0, 'Y'), (1, 'Z'), (2, 'Z'), (3, 'X'))],
0.25 * 1j * 1)
self.assertEqual(term.terms[((0, 'X'), (1, 'Z'), (2, 'Z'), (3, 'X'))],
0.25 * 1 * 1)
def test_transm_number(self):
n = number_operator(self.n_qubits, 3)
n_jw = jordan_wigner(n)
self.assertEqual(n_jw.terms[((3, 'Z'),)], -0.5)
self.assertEqual(n_jw.terms[()], 0.5)
self.assertEqual(len(n_jw.terms), 2)
def test_ccr_offsite_even_ca(self):
c2 = FermionOperator(((2, 1),))
a4 = FermionOperator(((4, 0),))
self.assertTrue(normal_ordered(c2 * a4).isclose(
normal_ordered(-a4 * c2)))
self.assertTrue(jordan_wigner(c2 * a4).isclose(
jordan_wigner(-a4 * c2)))
def test_ccr_offsite_odd_ca(self):
c1 = FermionOperator(((1, 1),))
a4 = FermionOperator(((4, 0),))
self.assertTrue(normal_ordered(c1 * a4).isclose(
normal_ordered(-a4 * c1)))
self.assertTrue(jordan_wigner(c1 * a4).isclose(
jordan_wigner(-a4 * c1)))
def test_ccr_offsite_even_cc(self):
c2 = FermionOperator(((2, 1),))
c4 = FermionOperator(((4, 1),))
self.assertTrue(normal_ordered(c2 * c4).isclose(
normal_ordered(-c4 * c2)))
self.assertTrue(jordan_wigner(c2 * c4).isclose(
jordan_wigner(-c4 * c2)))
def test_ccr_offsite_odd_cc(self):
c1 = FermionOperator(((1, 1),))
c4 = FermionOperator(((4, 1),))
self.assertTrue(normal_ordered(c1 * c4).isclose(
normal_ordered(-c4 * c1)))
self.assertTrue(jordan_wigner(c1 * c4).isclose(
jordan_wigner(-c4 * c1)))
def test_ccr_offsite_even_aa(self):
a2 = FermionOperator(((2, 0),))
a4 = FermionOperator(((4, 0),))
self.assertTrue(normal_ordered(a2 * a4).isclose(
normal_ordered(-a4 * a2)))
self.assertTrue(jordan_wigner(a2 * a4).isclose(
jordan_wigner(-a4 * a2)))
def test_ccr_offsite_odd_aa(self):
a1 = FermionOperator(((1, 0),))
a4 = FermionOperator(((4, 0),))
self.assertTrue(normal_ordered(a1 * a4).isclose(
normal_ordered(-a4 * a1)))
self.assertTrue(jordan_wigner(a1 * a4).isclose(
jordan_wigner(-a4 * a1)))
def test_ccr_onsite(self):
c1 = FermionOperator(((1, 1),))
a1 = hermitian_conjugated(c1)
self.assertTrue(normal_ordered(c1 * a1).isclose(
FermionOperator(()) - normal_ordered(a1 * c1)))
self.assertTrue(jordan_wigner(c1 * a1).isclose(
QubitOperator(()) - jordan_wigner(a1 * c1)))
def test_jordan_wigner_transm_op(self):
n = number_operator(self.n_qubits)
n_jw = jordan_wigner(n)
self.assertEqual(self.n_qubits + 1, len(n_jw.terms))
self.assertEqual(self.n_qubits / 2., n_jw.terms[()])
for qubit in range(self.n_qubits):
operators = ((qubit, 'Z'),)
self.assertEqual(n_jw.terms[operators], -0.5)
class InteractionOperatorsJWTest(unittest.TestCase):
def setUp(self):
self.n_qubits = 5
self.constant = 0.
self.one_body = numpy.zeros((self.n_qubits, self.n_qubits), float)
self.two_body = numpy.zeros((self.n_qubits, self.n_qubits,
self.n_qubits, self.n_qubits), float)
self.interaction_operator = InteractionOperator(self.constant,
self.one_body,
self.two_body)
def test_jordan_wigner_one_body(self):
# Make sure it agrees with jordan_wigner(FermionTerm).
for p in range(self.n_qubits):
for q in range(self.n_qubits):
# Get test qubit operator.
test_operator = jordan_wigner_one_body(p, q)
# Get correct qubit operator.
fermion_term = FermionOperator(((p, 1), (q, 0)))
correct_op = jordan_wigner(fermion_term)
hermitian_conjugate = hermitian_conjugated(fermion_term)
if not fermion_term.isclose(hermitian_conjugate):
correct_op += jordan_wigner(hermitian_conjugate)
self.assertTrue(test_operator.isclose(correct_op))
def test_jordan_wigner_two_body(self):
# Make sure it agrees with jordan_wigner(FermionTerm).
for p in range(self.n_qubits):
for q in range(self.n_qubits):
for r in range(self.n_qubits):
for s in range(self.n_qubits):
# Get test qubit operator.
test_operator = jordan_wigner_two_body(p, q, r, s)
# Get correct qubit operator.
fermion_term = FermionOperator(((p, 1), (q, 1),
(r, 0), (s, 0)))
correct_op = jordan_wigner(fermion_term)
hermitian_conjugate = hermitian_conjugated(
fermion_term)
if not fermion_term.isclose(hermitian_conjugate):
if p == r and q == s:
pass
else:
correct_op += jordan_wigner(
hermitian_conjugate)
self.assertTrue(test_operator.isclose(correct_op),
str(test_operator - correct_op))
def test_jordan_wigner_twobody_interaction_op_allunique(self):
test_op = FermionOperator('1^ 2^ 3 4')
test_op += hermitian_conjugated(test_op)
retransformed_test_op = reverse_jordan_wigner(jordan_wigner(
get_interaction_operator(test_op)))
self.assertTrue(normal_ordered(retransformed_test_op).isclose(
normal_ordered(test_op)))
def test_jordan_wigner_twobody_interaction_op_reversal_symmetric(self):
test_op = FermionOperator('1^ 2^ 2 1')
test_op += hermitian_conjugated(test_op)
self.assertTrue(jordan_wigner(test_op).isclose(
jordan_wigner(get_interaction_operator(test_op))))
def test_jordan_wigner_interaction_op_too_few_n_qubits(self):
with self.assertRaises(ValueError):
jordan_wigner_interaction_op(self.interaction_operator,
self.n_qubits - 2)
def test_jordan_wigner_interaction_op_with_zero_term(self):
test_op = FermionOperator('1^ 2^ 3 4')
test_op += hermitian_conjugated(test_op)
interaction_op = get_interaction_operator(test_op)
interaction_op.constant = 0.0
retransformed_test_op = reverse_jordan_wigner(jordan_wigner(
interaction_op))
class GetInteractionOperatorTest(unittest.TestCase):
def setUp(self):
self.n_qubits = 5
self.constant = 0.
self.one_body = numpy.zeros((self.n_qubits, self.n_qubits), float)
self.two_body = numpy.zeros((self.n_qubits, self.n_qubits,
self.n_qubits, self.n_qubits), float)
def test_get_interaction_operator_identity(self):
interaction_operator = InteractionOperator(-2j, self.one_body,
self.two_body)
qubit_operator = jordan_wigner(interaction_operator)
self.assertTrue(qubit_operator.isclose(-2j * QubitOperator(())))
self.assertEqual(interaction_operator,
get_interaction_operator(reverse_jordan_wigner(
qubit_operator), self.n_qubits))
def test_get_interaction_operator_one_body(self):
interaction_operator = get_interaction_operator(
FermionOperator('2^ 2'), self.n_qubits)
one_body = numpy.zeros((self.n_qubits, self.n_qubits), float)
one_body[2, 2] = 1.
self.assertEqual(interaction_operator,
InteractionOperator(0.0, one_body, self.two_body))
def test_get_interaction_operator_one_body_twoterm(self):
interaction_operator = get_interaction_operator(
FermionOperator('2^ 3', -2j) + FermionOperator('3^ 2', 3j),
self.n_qubits)
one_body = numpy.zeros((self.n_qubits, self.n_qubits), complex)
one_body[2, 3] = -2j
one_body[3, 2] = 3j
self.assertEqual(interaction_operator,
InteractionOperator(0.0, one_body, self.two_body))
def test_get_interaction_operator_two_body(self):
interaction_operator = get_interaction_operator(
FermionOperator('2^ 2 3^ 4'), self.n_qubits)
two_body = numpy.zeros((self.n_qubits, self.n_qubits,
self.n_qubits, self.n_qubits), float)
two_body[3, 2, 4, 2] = -1.
self.assertEqual(interaction_operator,
InteractionOperator(0.0, self.one_body, two_body))
def test_get_interaction_operator_two_body_distinct(self):
interaction_operator = get_interaction_operator(
FermionOperator('0^ 1^ 2 3'), self.n_qubits)
two_body = numpy.zeros((self.n_qubits, self.n_qubits,
self.n_qubits, self.n_qubits), float)
two_body[1, 0, 3, 2] = 1.
self.assertEqual(interaction_operator,
InteractionOperator(0.0, self.one_body, two_body))
|
ProjectQ-Framework/FermiLib
|
src/fermilib/transforms/_jordan_wigner_test.py
|
Python
|
apache-2.0
| 14,760
|
# -*- coding: utf-8 -*-
from copy import deepcopy
import pytest
import yaml
from awx.main.utils.safe_yaml import safe_dump
@pytest.mark.parametrize('value', [None, 1, 1.5, []])
def test_native_types(value):
# Native non-string types should dump the same way that `yaml.safe_dump` does
assert safe_dump(value) == yaml.safe_dump(value)
def test_empty():
assert safe_dump({}) == ''
def test_raw_string():
assert safe_dump('foo') == "!unsafe 'foo'\n"
def test_kv_null():
assert safe_dump({'a': None}) == "!unsafe 'a': null\n"
def test_kv_null_safe():
assert safe_dump({'a': None}, {'a': None}) == "a: null\n"
def test_kv_null_unsafe():
assert safe_dump({'a': ''}, {'a': None}) == "!unsafe 'a': !unsafe ''\n"
def test_kv_int():
assert safe_dump({'a': 1}) == "!unsafe 'a': 1\n"
def test_kv_float():
assert safe_dump({'a': 1.5}) == "!unsafe 'a': 1.5\n"
def test_kv_unsafe():
assert safe_dump({'a': 'b'}) == "!unsafe 'a': !unsafe 'b'\n"
def test_kv_unsafe_unicode():
assert safe_dump({'a': u'🐉'}) == '!unsafe \'a\': !unsafe "\\U0001F409"\n'
def test_kv_unsafe_in_list():
assert safe_dump({'a': ['b']}) == "!unsafe 'a':\n- !unsafe 'b'\n"
def test_kv_unsafe_in_mixed_list():
assert safe_dump({'a': [1, 'b']}) == "!unsafe 'a':\n- 1\n- !unsafe 'b'\n"
def test_kv_unsafe_deep_nesting():
yaml = safe_dump({'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]})
for x in ('a', 'b', 'c', 'd', 'e'):
assert "!unsafe '{}'".format(x) in yaml
def test_kv_unsafe_multiple():
assert safe_dump({'a': 'b', 'c': 'd'}) == '\n'.join([
"!unsafe 'a': !unsafe 'b'",
"!unsafe 'c': !unsafe 'd'",
""
])
def test_safe_marking():
assert safe_dump({'a': 'b'}, safe_dict={'a': 'b'}) == "a: b\n"
def test_safe_marking_mixed():
assert safe_dump({'a': 'b', 'c': 'd'}, safe_dict={'a': 'b'}) == '\n'.join([
"a: b",
"!unsafe 'c': !unsafe 'd'",
""
])
def test_safe_marking_deep_nesting():
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
yaml = safe_dump(deep, deepcopy(deep))
for x in ('a', 'b', 'c', 'd', 'e'):
assert "!unsafe '{}'".format(x) not in yaml
def test_deep_diff_unsafe_marking():
deep = {'a': [1, [{'b': {'c': [{'d': 'e'}]}}]]}
jt_vars = deepcopy(deep)
deep['a'][1][0]['b']['z'] = 'not safe'
yaml = safe_dump(deep, jt_vars)
assert "!unsafe 'z'" in yaml
|
GoogleCloudPlatform/sap-deployment-automation
|
third_party/github.com/ansible/awx/awx/main/tests/unit/utils/test_safe_yaml.py
|
Python
|
apache-2.0
| 2,424
|
# urls.py
from django.conf.urls.defaults import *
urlpatterns = patterns('approver.views',
(r'^$', 'list_tweets'),
(r'^review/(?P<tweet_id>\d+)', 'review_tweet'),
)
|
YuxuanLing/trunk
|
trunk/code/study/python/core_python_appilication/ch11/myproject/approver/urls.py
|
Python
|
gpl-3.0
| 181
|
UserFile='./UserList.txt'
|
51reboot/actual_09_homework
|
05/qicheng/gconf.py
|
Python
|
mit
| 26
|
# Generated by Django 2.0.10 on 2019-05-05 23:44
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('userprofile', '0014_auto_20190430_2023'),
]
operations = [
migrations.AddField(
model_name='profile',
name='allergi_annet',
field=models.CharField(blank=True, max_length=140, null=True, verbose_name='Oppgi evt. annet ønske vedrørende arrangementer med matservering.'),
),
migrations.AddField(
model_name='profile',
name='allergi_gluten',
field=models.BooleanField(default=False, verbose_name='Ønsker glutenfritt alternativ i arrangementer med matservering'),
),
migrations.AddField(
model_name='profile',
name='allergi_vegan',
field=models.BooleanField(default=False, verbose_name='Ønsker vegansk alternativ i arrangementer med matservering'),
),
migrations.AddField(
model_name='profile',
name='allergi_vegetar',
field=models.BooleanField(default=False, verbose_name='Ønsker vegetar alternativ i arrangementer med matservering'),
),
migrations.AddField(
model_name='profile',
name='show_email',
field=models.BooleanField(default=False, verbose_name='Vis e-postadresse i din profil'),
),
]
|
hackerspace-ntnu/website
|
userprofile/migrations/0015_auto_20190505_2344.py
|
Python
|
mit
| 1,432
|
import sys
import os
import re
full_path = sys.argv[1]
if not os.path.isdir(full_path):
print('folder expected')
exit(1)
os.chdir(full_path)
dirname = os.path.basename(full_path)
print('Tracklist: ')
for f in [fl for fl in os.listdir(full_path) if fl.endswith('.flac')]:
new_name = f.replace(dirname, '')
new_name = re.sub('([- ])+(?=[0-9])', '', new_name, 1)
print(new_name.replace('.flac', '', 1))
os.rename(f, new_name)
|
singulart/bandcd
|
tracklist.py
|
Python
|
mit
| 451
|
from blinker import signal
pre_init = signal('application-pre-init')
post_init = signal('application-post-init')
pre_registration = signal('application-pre-registration')
post_registration = signal('application-post-registration')
pre_create_database = signal('application-pre-create-database')
post_create_database = signal('application-post-create-database')
pre_create_tables = signal('application-pre-create-tables')
post_create_tables = signal('application-post-create-tables')
pre_delete_database = signal('application-pre-delete-database')
post_delete_database = signal('application-post-delete-database')
pre_delete_tables = signal('application-pre-delete-tables')
post_delete_tables = signal('application-post-delete-tables')
|
JeffHeard/sondra
|
sondra/application/signals.py
|
Python
|
apache-2.0
| 735
|
from django import forms
PRODUCT_QUANTITY_CHOICES = [(i, str(i)) for i in range(1, 21)]
class CartAddProductForm(forms.Form):
quantity = forms.TypedChoiceField(
choices=PRODUCT_QUANTITY_CHOICES,
coerce=int
)
update = forms.BooleanField(
required=False,
initial=False,
widget=forms.HiddenInput
)
|
ch1huizong/dj
|
onlineshop/myshop/cart/forms.py
|
Python
|
unlicense
| 355
|
def f(m,n):
ans = 1
while (m - n >= 0):
(ans,m) = (ans*2,m-n)
return(ans)
|
selvagit/experiments
|
nptel/nptel_programming_data_structure/week_1/q3.py
|
Python
|
gpl-3.0
| 97
|
print "Running SMS request parse script"
marker = db(db.gis_marker.name=="phone").select()
feature = db(db.gis_feature_class.name=="SMS").select()
marker_id = marker[0]['id'] if len(marker) == 1 else None
feature_id = feature[0]['id'] if len(feature) == 1 else None
def rss2record(entry):
myd = {}
locd = {}
myd['ush_id'] = entry['id']
myd['link'] = entry['link'] # url for the entry
myd['author'] = entry['author']
year = entry.updated_parsed[0]
month = entry.updated_parsed[1]
day = entry.updated_parsed[2]
hour = entry.updated_parsed[3]
minute = entry.updated_parsed[4]
myd['updated'] = datetime.datetime(year=year, month=month, day=day, hour=hour, minute=minute)
myd['title'] = entry['title']
myd['sms'] = entry['sms']
#myd['smsrec'] = entry['smsrec']
#myd['phone']=entry['phone']
myd['categorization'] = entry['categorization']
myd['firstname'] = entry['firstname']
myd['lastname'] = entry['lastname']
myd['status'] = entry['status']
myd['address'] = entry['address']
myd['city'] = entry['city']
myd['department'] = entry['department']
myd['summary'] = entry['summary']
myd['notes'] = entry['notes']
#myd['actionable'] = True if entry['actionable'] != '0' else False
# Fix escape characters:
myd["sms"] = " ".join(myd["sms"].split())
myd["sms"] = myd["sms"].replace('\\"', '"')
myd["sms"] = myd["sms"].replace("\\'", "'")
myd["notes"] = " ".join(myd["notes"].split())
myd["notes"] = myd["notes"].replace('\\"', '"')
myd["notes"] = myd["notes"].replace("\\'", "'")
# Add location information. The key name contains a colon, and for
# this reason, is platform dependent. Thus, we just look for any
# entry that ends in "point":
for key in entry.keys():
if key[-5:] == "point":
# Found the location info
gpoint = entry[key].split()
if len(gpoint) == 2:
try:
lat = float(gpoint[0])
lon = float(gpoint[1])
except ValueError:
continue
# Ushahidi uses a 0,0 lat/lon to indicate no lat lon.
if abs(lat) > 1.0e-8 and abs(lon) > 1.0e-8:
locd['lat' ] = lat
locd['lon' ] = lon
name = "SMS: "
if myd['categorization'] != "":
name += myd['categorization']
else:
name += "No category"
locd['name'] = name
if marker_id is not None:
locd['marker_id'] = marker_id
if feature_class_id is not None:
locd['feature_class_id'] = feature_id
return myd, locd
def sms_to_metadata(sms_dict):
metadata = {}
metadata["event_time" ] = sms_dict["updated"]
desc = sms_to_description(sms_dict)
desc = " ".join(desc.split())
desc = desc.replace('"', '\\"')
desc = desc.replace("'", "\\'")
metadata["description"] = desc
metadata["location_id"] = locid
return metadata
def sms_to_description(sms_dict):
desc = sms_dict["sms"]
if sms_dict["notes"] != "":
if desc == "":
desc += sms_dict["notes"]
else:
desc += " NOTE: " + sms_dict["notes"]
if sms_dict["categorization"] != "":
desc = sms_dict["categorization"] + ": " + desc
return desc
def sms_to_request(sms_dict, sms_id):
# usha_cats maps Ushahidi categorizations to
# sahana-style categorizations
usha_cats = {
"1. Emergency": 6,
"1a. Collapsed Structure": 5,
"1b. Fire": 6,
"1c. People Trapped": 2,
"1d. Contaminated water supply": 3,
"1e. Earthquake and aftershocks": 6,
"1f. Medical Emergency": 4,
"2. Threats": 6,
"2a. Structures at risk": 5,
"2b. Looting": 6,
"3. Vital Lines": 6,
"3a. Water shortage": 3,
"3b. Roads blocked": 6,
"3c. Power Outage": 6,
"4. Response": 6,
"4a. Health Services": 4,
"4b. USAR search and rescue": 2,
"4c. Shelter": 5,
"4d. Food distribution": 1,
"4e. Water sanitation and hygiene promotion": 3,
"4f. Non food items": 6,
"4g. Rubble removal": 6,
"4h. Died bodies management": 6,
"5. Other": 6,
"6. Person News": 2,
"6a. Deaths": 2,
"6b. Missing Persons": 2,
"7. Child Alone": 2,
"8. Asking to forward a message": 6,
}
d = rms_req_source_type
request_dict = {}
request_dict["location_id"] = sms_dict["location_id"]
request_dict["timestamp" ] = sms_dict["updated" ]
request_dict["message" ] = sms_to_description(sms_dict)
request_dict["source_id" ] = sms_id
request_dict["source_type"] = d.keys()[d.values().index("SMS")]
#request_dict["actionable" ] = sms_dict["actionable" ]
if sms_dict["categorization"] in usha_cats :
request_dict["type"] = usha_cats[sms_dict["categorization"]]
return request_dict
import datetime
import gluon.contrib.feedparser as feedparser
url_base = "http://server.domain/rss.php?key=keyrequired"
N = 100
start = 0
done = False
while done == False:
url = url_base + "&limit=" + str(start) + "," + str(N)
d = feedparser.parse(url)
for entry in d.entries:
rec, locd = rss2record(entry)
# Don't import duplicates
if db(db.rms_sms_request.ush_id == rec['ush_id']).count() == 0:
locid = None
if locd != {}:
# Calculate WKT for display on Map
locd['wkt'] = 'POINT(%f %f)' % (locd['lon'], locd['lat'])
locid = db.gis_location.insert(**locd)
rec["location_id"] = locid
smsid = db.rms_sms_request.insert(**rec)
if locid != None:
db(db.gis_location.id == locid).update(name="SMS " + repr(smsid))
# Add media_metadata entry to show additional
# information on the map
db.media_metadata.insert(**sms_to_metadata(rec))
# Insert the request:
db.rms_req.insert(**sms_to_request(rec, smsid))
else:
done = True
break
start = start + N
if len(d["entries"]) == 0:
done = True
db.commit()
|
luisibanez/SahanaEden
|
cron/rms_sms2record.py
|
Python
|
mit
| 6,616
|
# Copyright DataStax, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from cassandra.cluster import Cluster
from tests import connection_class, EVENT_LOOP_MANAGER
Cluster.connection_class = connection_class
try:
import unittest2 as unittest
except ImportError:
import unittest # noqa
from packaging.version import Version
import logging
import socket
import sys
import time
import traceback
import platform
from threading import Event
from subprocess import call
from itertools import groupby
import six
from cassandra import OperationTimedOut, ReadTimeout, ReadFailure, WriteTimeout, WriteFailure, AlreadyExists, \
InvalidRequest
from cassandra.cluster import NoHostAvailable
from cassandra.protocol import ConfigurationException
try:
from ccmlib.cluster import Cluster as CCMCluster
from ccmlib.cluster_factory import ClusterFactory as CCMClusterFactory
from ccmlib import common
except ImportError as e:
CCMClusterFactory = None
log = logging.getLogger(__name__)
CLUSTER_NAME = 'test_cluster'
SINGLE_NODE_CLUSTER_NAME = 'single_node'
MULTIDC_CLUSTER_NAME = 'multidc_test_cluster'
CCM_CLUSTER = None
path = os.path.join(os.path.abspath(os.path.dirname(__file__)), 'ccm')
if not os.path.exists(path):
os.mkdir(path)
cass_version = None
cql_version = None
def get_server_versions():
"""
Probe system.local table to determine Cassandra and CQL version.
Returns a tuple of (cassandra_version, cql_version).
"""
global cass_version, cql_version
if cass_version is not None:
return (cass_version, cql_version)
c = Cluster()
s = c.connect()
row = s.execute('SELECT cql_version, release_version FROM system.local')[0]
cass_version = _tuple_version(row.release_version)
cql_version = _tuple_version(row.cql_version)
c.shutdown()
return (cass_version, cql_version)
def _tuple_version(version_string):
if '-' in version_string:
version_string = version_string[:version_string.index('-')]
return tuple([int(p) for p in version_string.split('.')])
USE_CASS_EXTERNAL = bool(os.getenv('USE_CASS_EXTERNAL', False))
KEEP_TEST_CLUSTER = bool(os.getenv('KEEP_TEST_CLUSTER', False))
SIMULACRON_JAR = os.getenv('SIMULACRON_JAR', None)
# If set to to true this will force the Cython tests to run regardless of whether they are installed
cython_env = os.getenv('VERIFY_CYTHON', "False")
VERIFY_CYTHON = False
if(cython_env == 'True'):
VERIFY_CYTHON = True
default_cassandra_version = '2.2.0'
CASSANDRA_IP = os.getenv('CASSANDRA_IP', '127.0.0.1')
CASSANDRA_DIR = os.getenv('CASSANDRA_DIR', None)
CASSANDRA_VERSION = os.getenv('CASSANDRA_VERSION', default_cassandra_version)
CCM_KWARGS = {}
if CASSANDRA_DIR:
log.info("Using Cassandra dir: %s", CASSANDRA_DIR)
CCM_KWARGS['install_dir'] = CASSANDRA_DIR
else:
log.info('Using Cassandra version: %s', CASSANDRA_VERSION)
CCM_KWARGS['version'] = CASSANDRA_VERSION
#This changes the default contact_point parameter in Cluster
def set_default_cass_ip():
if CASSANDRA_IP.startswith("127.0.0."):
return
defaults = list(Cluster.__init__.__defaults__)
defaults = [[CASSANDRA_IP]] + defaults[1:]
try:
Cluster.__init__.__defaults__ = tuple(defaults)
except:
Cluster.__init__.__func__.__defaults__ = tuple(defaults)
def set_default_beta_flag_true():
defaults = list(Cluster.__init__.__defaults__)
defaults = (defaults[:28] + [True] + defaults[29:])
try:
Cluster.__init__.__defaults__ = tuple(defaults)
except:
Cluster.__init__.__func__.__defaults__ = tuple(defaults)
def get_default_protocol():
version = Version(CASSANDRA_VERSION)
if version >= Version('4.0'):
set_default_beta_flag_true()
return 5
elif version >= Version('2.2'):
return 4
elif version >= Version('2.1'):
return 3
elif version >= Version('2.0'):
return 2
else:
return 1
def get_supported_protocol_versions():
"""
1.2 -> 1
2.0 -> 2, 1
2.1 -> 3, 2, 1
2.2 -> 4, 3, 2, 1
3.X -> 4, 3
3.10 -> 5(beta),4,3
` """
version = Version(CASSANDRA_VERSION)
if version >= Version('4.0'):
return (3, 4, 5)
elif version >= Version('3.10'):
return (3, 4)
elif version >= Version('3.0'):
return (3, 4)
elif version >= Version('2.2'):
return (1, 2, 3, 4)
elif version >= Version('2.1'):
return (1, 2, 3)
elif version >= Version('2.0'):
return (1, 2)
else:
return (1, )
def get_unsupported_lower_protocol():
"""
This is used to determine the lowest protocol version that is NOT
supported by the version of C* running
"""
if Version(CASSANDRA_VERSION) >= Version('3.0'):
return 2
else:
return None
def get_unsupported_upper_protocol():
"""
This is used to determine the highest protocol version that is NOT
supported by the version of C* running
"""
if Version(CASSANDRA_VERSION) >= Version('2.2'):
return None
if Version(CASSANDRA_VERSION) >= Version('2.1'):
return 4
elif Version(CASSANDRA_VERSION) >= Version('2.0'):
return 3
else:
return None
default_protocol_version = get_default_protocol()
PROTOCOL_VERSION = int(os.getenv('PROTOCOL_VERSION', default_protocol_version))
local = unittest.skipUnless(CASSANDRA_IP.startswith("127.0.0."), 'Tests only runs against local C*')
notprotocolv1 = unittest.skipUnless(PROTOCOL_VERSION > 1, 'Protocol v1 not supported')
lessthenprotocolv4 = unittest.skipUnless(PROTOCOL_VERSION < 4, 'Protocol versions 4 or greater not supported')
greaterthanprotocolv3 = unittest.skipUnless(PROTOCOL_VERSION >= 4, 'Protocol versions less than 4 are not supported')
protocolv5 = unittest.skipUnless(5 in get_supported_protocol_versions(), 'Protocol versions less than 5 are not supported')
greaterthancass20 = unittest.skipUnless(CASSANDRA_VERSION >= '2.1', 'Cassandra version 2.1 or greater required')
greaterthancass21 = unittest.skipUnless(CASSANDRA_VERSION >= '2.2', 'Cassandra version 2.2 or greater required')
greaterthanorequalcass30 = unittest.skipUnless(CASSANDRA_VERSION >= '3.0', 'Cassandra version 3.0 or greater required')
greaterthanorequalcass36 = unittest.skipUnless(CASSANDRA_VERSION >= '3.6', 'Cassandra version 3.6 or greater required')
greaterthanorequalcass3_10 = unittest.skipUnless(CASSANDRA_VERSION >= '3.10', 'Cassandra version 3.10 or greater required')
greaterthanorequalcass3_11 = unittest.skipUnless(CASSANDRA_VERSION >= '3.11', 'Cassandra version 3.10 or greater required')
greaterthanorequalcass40 = unittest.skipUnless(CASSANDRA_VERSION >= '4.0', 'Cassandra version 4.0 or greater required')
lessthanorequalcass40 = unittest.skipIf(CASSANDRA_VERSION >= '4.0', 'Cassandra version 4.0 or greater required')
lessthancass30 = unittest.skipUnless(CASSANDRA_VERSION < '3.0', 'Cassandra version less then 3.0 required')
pypy = unittest.skipUnless(platform.python_implementation() == "PyPy", "Test is skipped unless it's on PyPy")
notpy3 = unittest.skipIf(sys.version_info >= (3, 0), "Test not applicable for Python 3.x runtime")
requiresmallclockgranularity = unittest.skipIf("Windows" in platform.system() or "asyncore" in EVENT_LOOP_MANAGER,
"This test is not suitible for environments with large clock granularity")
requiressimulacron = unittest.skipIf(SIMULACRON_JAR is None or CASSANDRA_VERSION < "2.1", "Simulacron jar hasn't been specified or C* version is 2.0")
def wait_for_node_socket(node, timeout):
binary_itf = node.network_interfaces['binary']
if not common.check_socket_listening(binary_itf, timeout=timeout):
log.warning("Unable to connect to binary socket for node " + node.name)
else:
log.debug("Node %s is up and listening " % (node.name,))
def check_socket_listening(itf, timeout=60):
end = time.time() + timeout
while time.time() <= end:
try:
sock = socket.socket()
sock.connect(itf)
sock.close()
return True
except socket.error:
# Try again in another 200ms
time.sleep(.2)
continue
return False
def get_cluster():
return CCM_CLUSTER
def get_node(node_id):
return CCM_CLUSTER.nodes['node%s' % node_id]
def use_multidc(dc_list, workloads=[]):
use_cluster(MULTIDC_CLUSTER_NAME, dc_list, start=True, workloads=workloads)
def use_singledc(start=True, workloads=[]):
use_cluster(CLUSTER_NAME, [3], start=start, workloads=workloads)
def use_single_node(start=True, workloads=[]):
use_cluster(SINGLE_NODE_CLUSTER_NAME, [1], start=start, workloads=workloads)
def remove_cluster():
if USE_CASS_EXTERNAL or KEEP_TEST_CLUSTER:
return
global CCM_CLUSTER
if CCM_CLUSTER:
log.debug("Removing cluster {0}".format(CCM_CLUSTER.name))
tries = 0
while tries < 100:
try:
CCM_CLUSTER.remove()
CCM_CLUSTER = None
return
except OSError:
ex_type, ex, tb = sys.exc_info()
log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
tries += 1
time.sleep(1)
raise RuntimeError("Failed to remove cluster after 100 attempts")
def is_current_cluster(cluster_name, node_counts):
global CCM_CLUSTER
if CCM_CLUSTER and CCM_CLUSTER.name == cluster_name:
if [len(list(nodes)) for dc, nodes in
groupby(CCM_CLUSTER.nodelist(), lambda n: n.data_center)] == node_counts:
return True
return False
def use_cluster(cluster_name, nodes, ipformat=None, start=True, workloads=[], set_keyspace=True, ccm_options=None,
configuration_options={}):
set_default_cass_ip()
if ccm_options is None:
ccm_options = CCM_KWARGS
cassandra_version = ccm_options.get('version', CASSANDRA_VERSION)
global CCM_CLUSTER
if USE_CASS_EXTERNAL:
if CCM_CLUSTER:
log.debug("Using external CCM cluster {0}".format(CCM_CLUSTER.name))
else:
log.debug("Using unnamed external cluster")
if set_keyspace and start:
setup_keyspace(ipformat=ipformat, wait=False)
return
if is_current_cluster(cluster_name, nodes):
log.debug("Using existing cluster, matching topology: {0}".format(cluster_name))
else:
if CCM_CLUSTER:
log.debug("Stopping existing cluster, topology mismatch: {0}".format(CCM_CLUSTER.name))
CCM_CLUSTER.stop()
try:
CCM_CLUSTER = CCMClusterFactory.load(path, cluster_name)
log.debug("Found existing CCM cluster, {0}; clearing.".format(cluster_name))
CCM_CLUSTER.clear()
CCM_CLUSTER.set_install_dir(**ccm_options)
CCM_CLUSTER.set_configuration_options(configuration_options)
except Exception:
ex_type, ex, tb = sys.exc_info()
log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
log.debug("Creating new CCM cluster, {0}, with args {1}".format(cluster_name, ccm_options))
CCM_CLUSTER = CCMCluster(path, cluster_name, **ccm_options)
CCM_CLUSTER.set_configuration_options({'start_native_transport': True})
if cassandra_version >= '2.2':
CCM_CLUSTER.set_configuration_options({'enable_user_defined_functions': True})
if cassandra_version >= '3.0':
CCM_CLUSTER.set_configuration_options({'enable_scripted_user_defined_functions': True})
common.switch_cluster(path, cluster_name)
CCM_CLUSTER.set_configuration_options(configuration_options)
CCM_CLUSTER.populate(nodes, ipformat=ipformat)
try:
jvm_args = []
# This will enable the Mirroring query handler which will echo our custom payload k,v pairs back
if 'graph' not in workloads:
if PROTOCOL_VERSION >= 4:
jvm_args = [" -Dcassandra.custom_query_handler_class=org.apache.cassandra.cql3.CustomPayloadMirroringQueryHandler"]
if(len(workloads) > 0):
for node in CCM_CLUSTER.nodes.values():
node.set_workloads(workloads)
if start:
log.debug("Starting CCM cluster: {0}".format(cluster_name))
CCM_CLUSTER.start(wait_for_binary_proto=True, wait_other_notice=True, jvm_args=jvm_args)
# Added to wait for slow nodes to start up
for node in CCM_CLUSTER.nodes.values():
wait_for_node_socket(node, 120)
if set_keyspace:
setup_keyspace(ipformat=ipformat)
except Exception:
log.exception("Failed to start CCM cluster; removing cluster.")
if os.name == "nt":
if CCM_CLUSTER:
for node in six.itervalues(CCM_CLUSTER.nodes):
os.system("taskkill /F /PID " + str(node.pid))
else:
call(["pkill", "-9", "-f", ".ccm"])
remove_cluster()
raise
return CCM_CLUSTER
def teardown_package():
if USE_CASS_EXTERNAL or KEEP_TEST_CLUSTER:
return
# when multiple modules are run explicitly, this runs between them
# need to make sure CCM_CLUSTER is properly cleared for that case
remove_cluster()
for cluster_name in [CLUSTER_NAME, MULTIDC_CLUSTER_NAME]:
try:
cluster = CCMClusterFactory.load(path, cluster_name)
try:
cluster.remove()
log.info('Removed cluster: %s' % cluster_name)
except Exception:
log.exception('Failed to remove cluster: %s' % cluster_name)
except Exception:
log.warning('Did not find cluster: %s' % cluster_name)
def execute_until_pass(session, query):
tries = 0
while tries < 100:
try:
return session.execute(query)
except (ConfigurationException, AlreadyExists, InvalidRequest):
log.warning("Received already exists from query {0} not exiting".format(query))
# keyspace/table was already created/dropped
return
except (OperationTimedOut, ReadTimeout, ReadFailure, WriteTimeout, WriteFailure):
ex_type, ex, tb = sys.exc_info()
log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
tries += 1
raise RuntimeError("Failed to execute query after 100 attempts: {0}".format(query))
def execute_with_long_wait_retry(session, query, timeout=30):
tries = 0
while tries < 10:
try:
return session.execute(query, timeout=timeout)
except (ConfigurationException, AlreadyExists):
log.warning("Received already exists from query {0} not exiting".format(query))
# keyspace/table was already created/dropped
return
except (OperationTimedOut, ReadTimeout, ReadFailure, WriteTimeout, WriteFailure):
ex_type, ex, tb = sys.exc_info()
log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
tries += 1
raise RuntimeError("Failed to execute query after 100 attempts: {0}".format(query))
def execute_with_retry_tolerant(session, query, retry_exceptions, escape_exception):
# TODO refactor above methods into this one for code reuse
tries = 0
while tries < 100:
try:
tries += 1
rs = session.execute(query)
return rs
except escape_exception:
return
except retry_exceptions:
time.sleep(.1)
raise RuntimeError("Failed to execute query after 100 attempts: {0}".format(query))
def drop_keyspace_shutdown_cluster(keyspace_name, session, cluster):
try:
execute_with_long_wait_retry(session, "DROP KEYSPACE {0}".format(keyspace_name))
except:
log.warning("Error encountered when droping keyspace {0}".format(keyspace_name))
ex_type, ex, tb = sys.exc_info()
log.warning("{0}: {1} Backtrace: {2}".format(ex_type.__name__, ex, traceback.extract_tb(tb)))
del tb
finally:
log.warning("Shutting down cluster")
cluster.shutdown()
def setup_keyspace(ipformat=None, wait=True):
# wait for nodes to startup
if wait:
time.sleep(10)
if not ipformat:
cluster = Cluster(protocol_version=PROTOCOL_VERSION)
else:
cluster = Cluster(contact_points=["::1"], protocol_version=PROTOCOL_VERSION)
session = cluster.connect()
try:
for ksname in ('test1rf', 'test2rf', 'test3rf'):
if ksname in cluster.metadata.keyspaces:
execute_until_pass(session, "DROP KEYSPACE %s" % ksname)
ddl = '''
CREATE KEYSPACE test3rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '3'}'''
execute_with_long_wait_retry(session, ddl)
ddl = '''
CREATE KEYSPACE test2rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '2'}'''
execute_with_long_wait_retry(session, ddl)
ddl = '''
CREATE KEYSPACE test1rf
WITH replication = {'class': 'SimpleStrategy', 'replication_factor': '1'}'''
execute_with_long_wait_retry(session, ddl)
ddl_3f = '''
CREATE TABLE test3rf.test (
k int PRIMARY KEY,
v int )'''
execute_with_long_wait_retry(session, ddl_3f)
ddl_1f = '''
CREATE TABLE test1rf.test (
k int PRIMARY KEY,
v int )'''
execute_with_long_wait_retry(session, ddl_1f)
except Exception:
traceback.print_exc()
raise
finally:
cluster.shutdown()
class UpDownWaiter(object):
def __init__(self, host):
self.down_event = Event()
self.up_event = Event()
host.monitor.register(self)
def on_up(self, host):
self.up_event.set()
def on_down(self, host):
self.down_event.set()
def wait_for_down(self):
self.down_event.wait()
def wait_for_up(self):
self.up_event.wait()
class BasicKeyspaceUnitTestCase(unittest.TestCase):
"""
This is basic unit test case that provides various utility methods that can be leveraged for testcase setup and tear
down
"""
@property
def keyspace_name(self):
return self.ks_name
@property
def class_table_name(self):
return self.ks_name
@property
def function_table_name(self):
return self._testMethodName.lower()
@property
def keyspace_table_name(self):
return "{0}.{1}".format(self.keyspace_name, self._testMethodName.lower())
@classmethod
def drop_keyspace(cls):
execute_with_long_wait_retry(cls.session, "DROP KEYSPACE {0}".format(cls.ks_name))
@classmethod
def create_keyspace(cls, rf):
ddl = "CREATE KEYSPACE {0} WITH replication = {{'class': 'SimpleStrategy', 'replication_factor': '{1}'}}".format(cls.ks_name, rf)
execute_with_long_wait_retry(cls.session, ddl)
@classmethod
def common_setup(cls, rf, keyspace_creation=True, create_class_table=False, metrics=False):
cls.cluster = Cluster(protocol_version=PROTOCOL_VERSION, metrics_enabled=metrics)
cls.session = cls.cluster.connect(wait_for_all_pools=True)
cls.ks_name = cls.__name__.lower()
if keyspace_creation:
cls.create_keyspace(rf)
cls.cass_version, cls.cql_version = get_server_versions()
if create_class_table:
ddl = '''
CREATE TABLE {0}.{1} (
k int PRIMARY KEY,
v int )'''.format(cls.ks_name, cls.ks_name)
execute_until_pass(cls.session, ddl)
def create_function_table(self):
ddl = '''
CREATE TABLE {0}.{1} (
k int PRIMARY KEY,
v int )'''.format(self.keyspace_name, self.function_table_name)
execute_until_pass(self.session, ddl)
def drop_function_table(self):
ddl = "DROP TABLE {0}.{1} ".format(self.keyspace_name, self.function_table_name)
execute_until_pass(self.session, ddl)
class MockLoggingHandler(logging.Handler):
"""Mock logging handler to check for expected logs."""
def __init__(self, *args, **kwargs):
self.reset()
logging.Handler.__init__(self, *args, **kwargs)
def emit(self, record):
self.messages[record.levelname.lower()].append(record.getMessage())
def reset(self):
self.messages = {
'debug': [],
'info': [],
'warning': [],
'error': [],
'critical': [],
}
def get_message_count(self, level, sub_string):
count = 0
for msg in self.messages.get(level):
if sub_string in msg:
count+=1
return count
class BasicExistingKeyspaceUnitTestCase(BasicKeyspaceUnitTestCase):
"""
This is basic unit test defines class level teardown and setup methods. It assumes that keyspace is already defined, or created as part of the test.
"""
@classmethod
def setUpClass(cls):
cls.common_setup(1, keyspace_creation=False)
@classmethod
def tearDownClass(cls):
cls.cluster.shutdown()
class BasicSharedKeyspaceUnitTestCase(BasicKeyspaceUnitTestCase):
"""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the testclass with a rf of 1.
"""
@classmethod
def setUpClass(cls):
cls.common_setup(1)
@classmethod
def tearDownClass(cls):
drop_keyspace_shutdown_cluster(cls.ks_name, cls.session, cls.cluster)
class BasicSharedKeyspaceUnitTestCaseRF1(BasicSharedKeyspaceUnitTestCase):
"""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the testclass with a rf of 1
"""
@classmethod
def setUpClass(self):
self.common_setup(1, True)
class BasicSharedKeyspaceUnitTestCaseRF2(BasicSharedKeyspaceUnitTestCase):
"""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the test class with a rf of 2, and a table named after the class
"""
@classmethod
def setUpClass(self):
self.common_setup(2)
class BasicSharedKeyspaceUnitTestCaseRF3(BasicSharedKeyspaceUnitTestCase):
"""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the test class with a rf of 3
"""
@classmethod
def setUpClass(self):
self.common_setup(3)
class BasicSharedKeyspaceUnitTestCaseRF3WM(BasicSharedKeyspaceUnitTestCase):
"""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the test class with a rf of 3 with metrics enabled
"""
@classmethod
def setUpClass(self):
self.common_setup(3, True, True, True)
class BasicSharedKeyspaceUnitTestCaseWFunctionTable(BasicSharedKeyspaceUnitTestCase):
""""
This is basic unit test case that can be leveraged to scope a keyspace to a specific test class.
creates a keyspace named after the test class with a rf of 3 and a table named after the class
the table is scoped to just the unit test and will be removed.
"""
def setUp(self):
self.create_function_table()
def tearDown(self):
self.drop_function_table()
class BasicSegregatedKeyspaceUnitTestCase(BasicKeyspaceUnitTestCase):
"""
This unit test will create and teardown a keyspace for each individual unit tests.
It has overhead and should only be used with complex unit test were sharing a keyspace will
cause issues.
"""
def setUp(self):
self.common_setup(1)
def tearDown(self):
drop_keyspace_shutdown_cluster(self.ks_name, self.session, self.cluster)
class BasicExistingSegregatedKeyspaceUnitTestCase(BasicKeyspaceUnitTestCase):
"""
This unit test will create and teardown or each individual unit tests. It assumes that keyspace is existing
or created as part of a test.
It has some overhead and should only be used when sharing cluster/session is not feasible.
"""
def setUp(self):
self.common_setup(1, keyspace_creation=False)
def tearDown(self):
self.cluster.shutdown()
|
thelastpickle/python-driver
|
tests/integration/__init__.py
|
Python
|
apache-2.0
| 25,475
|
# Copyright 2012 Nebula, Inc.
# Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import copy
import datetime
import inspect
import os
import re
import urllib
import uuid as uuid_lib
from lxml import etree
import mock
from oslo.config import cfg
from nova.api.metadata import password
from nova.api.openstack.compute.contrib import fping
from nova.api.openstack.compute import extensions
# Import extensions to pull in osapi_compute_extension CONF option used below.
from nova.cells import rpcapi as cells_rpcapi
from nova.cells import state
from nova.cloudpipe import pipelib
from nova.compute import api as compute_api
from nova.compute import cells_api as cells_api
from nova.compute import manager as compute_manager
from nova.compute import rpcapi as compute_rpcapi
from nova.conductor import manager as conductor_manager
from nova import context
from nova import db
from nova.db.sqlalchemy import models
from nova import exception
from nova.network import api as network_api
from nova import objects
from nova.openstack.common import importutils
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.openstack.common import timeutils
import nova.quota
from nova.servicegroup import api as service_group_api
from nova import test
from nova.tests.api.openstack.compute.contrib import test_fping
from nova.tests.api.openstack.compute.contrib import test_networks
from nova.tests.api.openstack.compute.contrib import test_services
from nova.tests.api.openstack import fakes
from nova.tests import fake_block_device
from nova.tests import fake_instance
from nova.tests import fake_network
from nova.tests import fake_network_cache_model
from nova.tests import fake_server_actions
from nova.tests import fake_utils
from nova.tests.image import fake
from nova.tests.integrated import api_samples_test_base
from nova.tests.integrated import integrated_helpers
from nova.tests.objects import test_network
from nova.tests import utils as test_utils
from nova.tests.virt.baremetal.db import base as bm_db_base
from nova import utils
from nova.volume import cinder
CONF = cfg.CONF
CONF.import_opt('allow_resize_to_same_host', 'nova.compute.api')
CONF.import_opt('shelved_offload_time', 'nova.compute.manager')
CONF.import_opt('enable_network_quota',
'nova.api.openstack.compute.contrib.os_tenant_networks')
CONF.import_opt('osapi_compute_extension',
'nova.api.openstack.compute.extensions')
CONF.import_opt('vpn_image_id', 'nova.cloudpipe.pipelib')
CONF.import_opt('osapi_compute_link_prefix', 'nova.api.openstack.common')
CONF.import_opt('osapi_glance_link_prefix', 'nova.api.openstack.common')
CONF.import_opt('enable', 'nova.cells.opts', group='cells')
CONF.import_opt('cell_type', 'nova.cells.opts', group='cells')
CONF.import_opt('db_check_interval', 'nova.cells.state', group='cells')
LOG = logging.getLogger(__name__)
class ApiSampleTestBaseV2(api_samples_test_base.ApiSampleTestBase):
_api_version = 'v2'
def setUp(self):
extends = []
self.flags(use_ipv6=False,
osapi_compute_link_prefix=self._get_host(),
osapi_glance_link_prefix=self._get_glance_host())
if not self.all_extensions:
if hasattr(self, 'extends_name'):
extends = [self.extends_name]
ext = [self.extension_name] if self.extension_name else []
self.flags(osapi_compute_extension=ext + extends)
super(ApiSampleTestBaseV2, self).setUp()
self.useFixture(test.SampleNetworks(host=self.network.host))
fake_network.stub_compute_with_ips(self.stubs)
fake_utils.stub_out_utils_spawn_n(self.stubs)
self.generate_samples = os.getenv('GENERATE_SAMPLES') is not None
class ApiSamplesTrap(ApiSampleTestBaseV2):
"""Make sure extensions don't get added without tests."""
all_extensions = True
def _get_extensions_tested(self):
tests = []
for attr in globals().values():
if not inspect.isclass(attr):
continue # Skip non-class objects
if not issubclass(attr, integrated_helpers._IntegratedTestBase):
continue # Skip non-test classes
if attr.extension_name is None:
continue # Skip base tests
cls = importutils.import_class(attr.extension_name)
tests.append(cls.alias)
return tests
def _get_extensions(self):
extensions = []
response = self._do_get('extensions')
for extension in jsonutils.loads(response.content)['extensions']:
extensions.append(str(extension['alias']))
return extensions
def test_all_extensions_have_samples(self):
# NOTE(danms): This is a list of extensions which are currently
# in the tree but that don't (yet) have tests. This list should
# NOT be allowed to grow, and should shrink to zero (and be
# removed) soon.
do_not_approve_additions = []
do_not_approve_additions.append('os-create-server-ext')
tests = self._get_extensions_tested()
extensions = self._get_extensions()
missing_tests = []
for extension in extensions:
# NOTE(danms): if you add tests, remove it from the
# exclusions list
self.assertFalse(extension in do_not_approve_additions and
extension in tests)
# NOTE(danms): if you add an extension, it must come with
# api_samples tests!
if (extension not in tests and
extension not in do_not_approve_additions):
missing_tests.append(extension)
if missing_tests:
LOG.error("Extensions are missing tests: %s" % missing_tests)
self.assertEqual(missing_tests, [])
class VersionsSampleJsonTest(ApiSampleTestBaseV2):
def test_versions_get(self):
response = self._do_get('', strip_version=True)
subs = self._get_regexes()
self._verify_response('versions-get-resp', subs, response, 200)
class VersionsSampleXmlTest(VersionsSampleJsonTest):
ctype = 'xml'
class ServersSampleBase(ApiSampleTestBaseV2):
def _post_server(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
}
response = self._do_post('servers', 'server-post-req', subs)
subs = self._get_regexes()
return self._verify_response('server-post-resp', subs, response, 202)
class ServersSampleJsonTest(ServersSampleBase):
def test_servers_post(self):
return self._post_server()
def test_servers_get(self):
uuid = self.test_servers_post()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('server-get-resp', subs, response, 200)
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers')
subs = self._get_regexes()
subs['id'] = uuid
self._verify_response('servers-list-resp', subs, response, 200)
def test_servers_details(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('servers-details-resp', subs, response, 200)
class ServersSampleXmlTest(ServersSampleJsonTest):
ctype = 'xml'
class ServersSampleAllExtensionJsonTest(ServersSampleJsonTest):
all_extensions = True
class ServersSampleAllExtensionXmlTest(ServersSampleXmlTest):
all_extensions = True
class ServersSampleHideAddressesJsonTest(ServersSampleJsonTest):
extension_name = '.'.join(('nova.api.openstack.compute.contrib',
'hide_server_addresses',
'Hide_server_addresses'))
class ServersSampleHideAddressesXMLTest(ServersSampleHideAddressesJsonTest):
ctype = 'xml'
class ServersSampleMultiStatusJsonTest(ServersSampleBase):
extension_name = '.'.join(('nova.api.openstack.compute.contrib',
'server_list_multi_status',
'Server_list_multi_status'))
def test_servers_list(self):
uuid = self._post_server()
response = self._do_get('servers?status=active&status=error')
subs = self._get_regexes()
subs['id'] = uuid
self._verify_response('servers-list-resp', subs, response, 200)
class ServersSampleMultiStatusXMLTest(ServersSampleMultiStatusJsonTest):
ctype = 'xml'
class ServersMetadataJsonTest(ServersSampleBase):
def _create_and_set(self, subs):
uuid = self._post_server()
response = self._do_put('servers/%s/metadata' % uuid,
'server-metadata-all-req',
subs)
self._verify_response('server-metadata-all-resp', subs, response, 200)
return uuid
def generalize_subs(self, subs, vanilla_regexes):
subs['value'] = '(Foo|Bar) Value'
return subs
def test_metadata_put_all(self):
# Test setting all metadata for a server.
subs = {'value': 'Foo Value'}
self._create_and_set(subs)
def test_metadata_post_all(self):
# Test updating all metadata for a server.
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
subs['value'] = 'Bar Value'
response = self._do_post('servers/%s/metadata' % uuid,
'server-metadata-all-req',
subs)
self._verify_response('server-metadata-all-resp', subs, response, 200)
def test_metadata_get_all(self):
# Test getting all metadata for a server.
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_get('servers/%s/metadata' % uuid)
self._verify_response('server-metadata-all-resp', subs, response, 200)
def test_metadata_put(self):
# Test putting an individual metadata item for a server.
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
subs['value'] = 'Bar Value'
response = self._do_put('servers/%s/metadata/foo' % uuid,
'server-metadata-req',
subs)
self._verify_response('server-metadata-resp', subs, response, 200)
def test_metadata_get(self):
# Test getting an individual metadata item for a server.
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_get('servers/%s/metadata/foo' % uuid)
self._verify_response('server-metadata-resp', subs, response, 200)
def test_metadata_delete(self):
# Test deleting an individual metadata item for a server.
subs = {'value': 'Foo Value'}
uuid = self._create_and_set(subs)
response = self._do_delete('servers/%s/metadata/foo' % uuid)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
class ServersMetadataXmlTest(ServersMetadataJsonTest):
ctype = 'xml'
class ServersIpsJsonTest(ServersSampleBase):
def test_get(self):
# Test getting a server's IP information.
uuid = self._post_server()
response = self._do_get('servers/%s/ips' % uuid)
subs = self._get_regexes()
self._verify_response('server-ips-resp', subs, response, 200)
def test_get_by_network(self):
# Test getting a server's IP information by network id.
uuid = self._post_server()
response = self._do_get('servers/%s/ips/private' % uuid)
subs = self._get_regexes()
self._verify_response('server-ips-network-resp', subs, response, 200)
class ServersIpsXmlTest(ServersIpsJsonTest):
ctype = 'xml'
class ExtensionsSampleJsonTest(ApiSampleTestBaseV2):
all_extensions = True
def test_extensions_get(self):
response = self._do_get('extensions')
subs = self._get_regexes()
self._verify_response('extensions-get-resp', subs, response, 200)
class ExtensionsSampleXmlTest(ExtensionsSampleJsonTest):
ctype = 'xml'
class FlavorsSampleJsonTest(ApiSampleTestBaseV2):
def test_flavors_get(self):
response = self._do_get('flavors/1')
subs = self._get_regexes()
self._verify_response('flavor-get-resp', subs, response, 200)
def test_flavors_list(self):
response = self._do_get('flavors')
subs = self._get_regexes()
self._verify_response('flavors-list-resp', subs, response, 200)
class FlavorsSampleXmlTest(FlavorsSampleJsonTest):
ctype = 'xml'
class HostsSampleJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.hosts.Hosts"
def test_host_startup(self):
response = self._do_get('os-hosts/%s/startup' % self.compute.host)
subs = self._get_regexes()
self._verify_response('host-get-startup', subs, response, 200)
def test_host_reboot(self):
response = self._do_get('os-hosts/%s/reboot' % self.compute.host)
subs = self._get_regexes()
self._verify_response('host-get-reboot', subs, response, 200)
def test_host_shutdown(self):
response = self._do_get('os-hosts/%s/shutdown' % self.compute.host)
subs = self._get_regexes()
self._verify_response('host-get-shutdown', subs, response, 200)
def test_host_maintenance(self):
response = self._do_put('os-hosts/%s' % self.compute.host,
'host-put-maintenance-req', {})
subs = self._get_regexes()
self._verify_response('host-put-maintenance-resp', subs, response, 200)
def test_host_get(self):
response = self._do_get('os-hosts/%s' % self.compute.host)
subs = self._get_regexes()
self._verify_response('host-get-resp', subs, response, 200)
def test_hosts_list(self):
response = self._do_get('os-hosts')
subs = self._get_regexes()
self._verify_response('hosts-list-resp', subs, response, 200)
class HostsSampleXmlTest(HostsSampleJsonTest):
ctype = 'xml'
class FlavorsSampleAllExtensionJsonTest(FlavorsSampleJsonTest):
all_extensions = True
class FlavorsSampleAllExtensionXmlTest(FlavorsSampleXmlTest):
all_extensions = True
class ImagesSampleJsonTest(ApiSampleTestBaseV2):
def test_images_list(self):
# Get api sample of images get list request.
response = self._do_get('images')
subs = self._get_regexes()
self._verify_response('images-list-get-resp', subs, response, 200)
def test_image_get(self):
# Get api sample of one single image details request.
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s' % image_id)
subs = self._get_regexes()
subs['image_id'] = image_id
self._verify_response('image-get-resp', subs, response, 200)
def test_images_details(self):
# Get api sample of all images details request.
response = self._do_get('images/detail')
subs = self._get_regexes()
self._verify_response('images-details-get-resp', subs, response, 200)
def test_image_metadata_get(self):
# Get api sample of an image metadata request.
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s/metadata' % image_id)
subs = self._get_regexes()
subs['image_id'] = image_id
self._verify_response('image-metadata-get-resp', subs, response, 200)
def test_image_metadata_post(self):
# Get api sample to update metadata of an image metadata request.
image_id = fake.get_valid_image_id()
response = self._do_post(
'images/%s/metadata' % image_id,
'image-metadata-post-req', {})
subs = self._get_regexes()
self._verify_response('image-metadata-post-resp', subs, response, 200)
def test_image_metadata_put(self):
# Get api sample of image metadata put request.
image_id = fake.get_valid_image_id()
response = self._do_put('images/%s/metadata' % image_id,
'image-metadata-put-req', {})
subs = self._get_regexes()
self._verify_response('image-metadata-put-resp', subs, response, 200)
def test_image_meta_key_get(self):
# Get api sample of an image metadata key request.
image_id = fake.get_valid_image_id()
key = "kernel_id"
response = self._do_get('images/%s/metadata/%s' % (image_id, key))
subs = self._get_regexes()
self._verify_response('image-meta-key-get', subs, response, 200)
def test_image_meta_key_put(self):
# Get api sample of image metadata key put request.
image_id = fake.get_valid_image_id()
key = "auto_disk_config"
response = self._do_put('images/%s/metadata/%s' % (image_id, key),
'image-meta-key-put-req', {})
subs = self._get_regexes()
self._verify_response('image-meta-key-put-resp', subs, response, 200)
class ImagesSampleXmlTest(ImagesSampleJsonTest):
ctype = 'xml'
class LimitsSampleJsonTest(ApiSampleTestBaseV2):
def test_limits_get(self):
response = self._do_get('limits')
subs = self._get_regexes()
self._verify_response('limit-get-resp', subs, response, 200)
class LimitsSampleXmlTest(LimitsSampleJsonTest):
ctype = 'xml'
class ServersActionsJsonTest(ServersSampleBase):
def _test_server_action(self, uuid, action,
subs=None, resp_tpl=None, code=202):
subs = subs or {}
subs.update({'action': action})
response = self._do_post('servers/%s/action' % uuid,
'server-action-%s' % action.lower(),
subs)
if resp_tpl:
subs.update(self._get_regexes())
self._verify_response(resp_tpl, subs, response, code)
else:
self.assertEqual(response.status_code, code)
self.assertEqual(response.content, "")
def test_server_password(self):
uuid = self._post_server()
self._test_server_action(uuid, "changePassword",
{"password": "foo"})
def test_server_reboot_hard(self):
uuid = self._post_server()
self._test_server_action(uuid, "reboot",
{"type": "HARD"})
def test_server_reboot_soft(self):
uuid = self._post_server()
self._test_server_action(uuid, "reboot",
{"type": "SOFT"})
def test_server_rebuild(self):
uuid = self._post_server()
image = self.api.get_images()[0]['id']
subs = {'host': self._get_host(),
'uuid': image,
'name': 'foobar',
'pass': 'seekr3t',
'ip': '1.2.3.4',
'ip6': 'fe80::100',
'hostid': '[a-f0-9]+',
}
self._test_server_action(uuid, 'rebuild', subs,
'server-action-rebuild-resp')
def test_server_resize(self):
self.flags(allow_resize_to_same_host=True)
uuid = self._post_server()
self._test_server_action(uuid, "resize",
{"id": 2,
"host": self._get_host()})
return uuid
def test_server_revert_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "revertResize")
def test_server_confirm_resize(self):
uuid = self.test_server_resize()
self._test_server_action(uuid, "confirmResize", code=204)
def test_server_create_image(self):
uuid = self._post_server()
self._test_server_action(uuid, 'createImage',
{'name': 'foo-image',
'meta_var': 'myvar',
'meta_val': 'foobar'})
class ServersActionsXmlTest(ServersActionsJsonTest):
ctype = 'xml'
class ServersActionsAllJsonTest(ServersActionsJsonTest):
all_extensions = True
class ServersActionsAllXmlTest(ServersActionsXmlTest):
all_extensions = True
class ServerStartStopJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".server_start_stop.Server_start_stop"
def _test_server_action(self, uuid, action):
response = self._do_post('servers/%s/action' % uuid,
'server_start_stop',
{'action': action})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_server_start(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop')
self._test_server_action(uuid, 'os-start')
def test_server_stop(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-stop')
class ServerStartStopXmlTest(ServerStartStopJsonTest):
ctype = 'xml'
class UserDataJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.user_data.User_data"
def test_user_data_post(self):
user_data_contents = '#!/bin/bash\n/bin/su\necho "I am in you!"\n'
user_data = base64.b64encode(user_data_contents)
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'user_data': user_data
}
response = self._do_post('servers', 'userdata-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('userdata-post-resp', subs, response, 202)
class UserDataXmlTest(UserDataJsonTest):
ctype = 'xml'
class FlavorsExtraDataJsonTest(ApiSampleTestBaseV2):
extension_name = ('nova.api.openstack.compute.contrib.flavorextradata.'
'Flavorextradata')
def _get_flags(self):
f = super(FlavorsExtraDataJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# Flavorextradata extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.flavormanage.Flavormanage')
return f
def test_flavors_extra_data_get(self):
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = {
'flavor_id': flavor_id,
'flavor_name': 'm1.tiny'
}
subs.update(self._get_regexes())
self._verify_response('flavors-extra-data-get-resp',
subs, response, 200)
def test_flavors_extra_data_list(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavors-extra-data-list-resp',
subs, response, 200)
def test_flavors_extra_data_create(self):
subs = {
'flavor_id': 666,
'flavor_name': 'flavortest'
}
response = self._do_post('flavors',
'flavors-extra-data-post-req',
subs)
subs.update(self._get_regexes())
self._verify_response('flavors-extra-data-post-resp',
subs, response, 200)
class FlavorsExtraDataXmlTest(FlavorsExtraDataJsonTest):
ctype = 'xml'
class FlavorRxtxJsonTest(ApiSampleTestBaseV2):
extension_name = ('nova.api.openstack.compute.contrib.flavor_rxtx.'
'Flavor_rxtx')
def _get_flags(self):
f = super(FlavorRxtxJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# FlavorRxtx extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.flavormanage.Flavormanage')
return f
def test_flavor_rxtx_get(self):
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = {
'flavor_id': flavor_id,
'flavor_name': 'm1.tiny'
}
subs.update(self._get_regexes())
self._verify_response('flavor-rxtx-get-resp', subs, response, 200)
def test_flavors_rxtx_list(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavor-rxtx-list-resp', subs, response, 200)
def test_flavors_rxtx_create(self):
subs = {
'flavor_id': 100,
'flavor_name': 'flavortest'
}
response = self._do_post('flavors',
'flavor-rxtx-post-req',
subs)
subs.update(self._get_regexes())
self._verify_response('flavor-rxtx-post-resp', subs, response, 200)
class FlavorRxtxXmlTest(FlavorRxtxJsonTest):
ctype = 'xml'
class FlavorSwapJsonTest(ApiSampleTestBaseV2):
extension_name = ('nova.api.openstack.compute.contrib.flavor_swap.'
'Flavor_swap')
def _get_flags(self):
f = super(FlavorSwapJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# FlavorSwap extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.flavormanage.Flavormanage')
return f
def test_flavor_swap_get(self):
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = {
'flavor_id': flavor_id,
'flavor_name': 'm1.tiny'
}
subs.update(self._get_regexes())
self._verify_response('flavor-swap-get-resp', subs, response, 200)
def test_flavor_swap_list(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavor-swap-list-resp', subs, response, 200)
def test_flavor_swap_create(self):
subs = {
'flavor_id': 100,
'flavor_name': 'flavortest'
}
response = self._do_post('flavors',
'flavor-swap-post-req',
subs)
subs.update(self._get_regexes())
self._verify_response('flavor-swap-post-resp', subs, response, 200)
class FlavorSwapXmlTest(FlavorSwapJsonTest):
ctype = 'xml'
class SecurityGroupsSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".security_groups.Security_groups"
def _get_create_subs(self):
return {
'group_name': 'test',
"description": "description",
}
def _create_security_group(self):
subs = self._get_create_subs()
return self._do_post('os-security-groups',
'security-group-post-req', subs)
def _add_group(self, uuid):
subs = {
'group_name': 'test'
}
return self._do_post('servers/%s/action' % uuid,
'security-group-add-post-req', subs)
def test_security_group_create(self):
response = self._create_security_group()
subs = self._get_create_subs()
self._verify_response('security-groups-create-resp', subs,
response, 200)
def test_security_groups_list(self):
# Get api sample of security groups get list request.
response = self._do_get('os-security-groups')
subs = self._get_regexes()
self._verify_response('security-groups-list-get-resp',
subs, response, 200)
def test_security_groups_get(self):
# Get api sample of security groups get request.
security_group_id = '1'
response = self._do_get('os-security-groups/%s' % security_group_id)
subs = self._get_regexes()
self._verify_response('security-groups-get-resp', subs, response, 200)
def test_security_groups_list_server(self):
# Get api sample of security groups for a specific server.
uuid = self._post_server()
response = self._do_get('servers/%s/os-security-groups' % uuid)
subs = self._get_regexes()
self._verify_response('server-security-groups-list-resp',
subs, response, 200)
def test_security_groups_add(self):
self._create_security_group()
uuid = self._post_server()
response = self._add_group(uuid)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_security_groups_remove(self):
self._create_security_group()
uuid = self._post_server()
self._add_group(uuid)
subs = {
'group_name': 'test'
}
response = self._do_post('servers/%s/action' % uuid,
'security-group-remove-post-req', subs)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class SecurityGroupsSampleXmlTest(SecurityGroupsSampleJsonTest):
ctype = 'xml'
class SecurityGroupDefaultRulesSampleJsonTest(ServersSampleBase):
extension_name = ('nova.api.openstack.compute.contrib'
'.security_group_default_rules'
'.Security_group_default_rules')
def test_security_group_default_rules_create(self):
response = self._do_post('os-security-group-default-rules',
'security-group-default-rules-create-req',
{})
self._verify_response('security-group-default-rules-create-resp',
{}, response, 200)
def test_security_group_default_rules_list(self):
self.test_security_group_default_rules_create()
response = self._do_get('os-security-group-default-rules')
self._verify_response('security-group-default-rules-list-resp',
{}, response, 200)
def test_security_group_default_rules_show(self):
self.test_security_group_default_rules_create()
rule_id = '1'
response = self._do_get('os-security-group-default-rules/%s' % rule_id)
self._verify_response('security-group-default-rules-show-resp',
{}, response, 200)
class SecurityGroupDefaultRulesSampleXmlTest(
SecurityGroupDefaultRulesSampleJsonTest):
ctype = 'xml'
class SchedulerHintsJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.scheduler_hints."
"Scheduler_hints")
def test_scheduler_hints_post(self):
# Get api sample of scheduler hint post request.
hints = {'image_id': fake.get_valid_image_id(),
'image_near': str(uuid_lib.uuid4())
}
response = self._do_post('servers', 'scheduler-hints-post-req',
hints)
subs = self._get_regexes()
self._verify_response('scheduler-hints-post-resp', subs, response, 202)
class SchedulerHintsXmlTest(SchedulerHintsJsonTest):
ctype = 'xml'
class ConsoleOutputSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".console_output.Console_output"
def test_get_console_output(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'console-output-post-req',
{'action': 'os-getConsoleOutput'})
subs = self._get_regexes()
self._verify_response('console-output-post-resp', subs, response, 200)
class ConsoleOutputSampleXmlTest(ConsoleOutputSampleJsonTest):
ctype = 'xml'
class ExtendedServerAttributesJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".extended_server_attributes" + \
".Extended_server_attributes"
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
subs['hypervisor_hostname'] = r'[\w\.\-]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['instance_name'] = 'instance-\d{8}'
subs['hypervisor_hostname'] = r'[\w\.\-]+'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedServerAttributesXmlTest(ExtendedServerAttributesJsonTest):
ctype = 'xml'
class FloatingIpsJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib." \
"floating_ips.Floating_ips"
def setUp(self):
super(FloatingIpsJsonTest, self).setUp()
pool = CONF.default_floating_pool
interface = CONF.public_interface
self.ip_pool = [
{
'address': "10.10.10.1",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.2",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.3",
'pool': pool,
'interface': interface
},
]
self.compute.db.floating_ip_bulk_create(
context.get_admin_context(), self.ip_pool)
def tearDown(self):
self.compute.db.floating_ip_bulk_destroy(
context.get_admin_context(), self.ip_pool)
super(FloatingIpsJsonTest, self).tearDown()
def test_floating_ips_list_empty(self):
response = self._do_get('os-floating-ips')
subs = self._get_regexes()
self._verify_response('floating-ips-list-empty-resp',
subs, response, 200)
def test_floating_ips_list(self):
self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
response = self._do_get('os-floating-ips')
subs = self._get_regexes()
self._verify_response('floating-ips-list-resp',
subs, response, 200)
def test_floating_ips_create_nopool(self):
response = self._do_post('os-floating-ips',
'floating-ips-create-nopool-req',
{})
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp',
subs, response, 200)
def test_floating_ips_create(self):
response = self._do_post('os-floating-ips',
'floating-ips-create-req',
{"pool": CONF.default_floating_pool})
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp', subs, response, 200)
def test_floating_ips_get(self):
self.test_floating_ips_create()
# NOTE(sdague): the first floating ip will always have 1 as an id,
# but it would be better if we could get this from the create
response = self._do_get('os-floating-ips/%d' % 1)
subs = self._get_regexes()
self._verify_response('floating-ips-create-resp', subs, response, 200)
def test_floating_ips_delete(self):
self.test_floating_ips_create()
response = self._do_delete('os-floating-ips/%d' % 1)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class ExtendedFloatingIpsJsonTest(FloatingIpsJsonTest):
extends_name = ("nova.api.openstack.compute.contrib."
"floating_ips.Floating_ips")
extension_name = ("nova.api.openstack.compute.contrib."
"extended_floating_ips.Extended_floating_ips")
class FloatingIpsXmlTest(FloatingIpsJsonTest):
ctype = 'xml'
class ExtendedFloatingIpsXmlTest(ExtendedFloatingIpsJsonTest):
ctype = 'xml'
class FloatingIpsBulkJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib." \
"floating_ips_bulk.Floating_ips_bulk"
def setUp(self):
super(FloatingIpsBulkJsonTest, self).setUp()
pool = CONF.default_floating_pool
interface = CONF.public_interface
self.ip_pool = [
{
'address': "10.10.10.1",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.2",
'pool': pool,
'interface': interface
},
{
'address': "10.10.10.3",
'pool': pool,
'interface': interface,
'host': "testHost"
},
]
self.compute.db.floating_ip_bulk_create(
context.get_admin_context(), self.ip_pool)
def tearDown(self):
self.compute.db.floating_ip_bulk_destroy(
context.get_admin_context(), self.ip_pool)
super(FloatingIpsBulkJsonTest, self).tearDown()
def test_floating_ips_bulk_list(self):
response = self._do_get('os-floating-ips-bulk')
subs = self._get_regexes()
self._verify_response('floating-ips-bulk-list-resp',
subs, response, 200)
def test_floating_ips_bulk_list_by_host(self):
response = self._do_get('os-floating-ips-bulk/testHost')
subs = self._get_regexes()
self._verify_response('floating-ips-bulk-list-by-host-resp',
subs, response, 200)
def test_floating_ips_bulk_create(self):
response = self._do_post('os-floating-ips-bulk',
'floating-ips-bulk-create-req',
{"ip_range": "192.168.1.0/24",
"pool": CONF.default_floating_pool,
"interface": CONF.public_interface})
subs = self._get_regexes()
self._verify_response('floating-ips-bulk-create-resp', subs,
response, 200)
def test_floating_ips_bulk_delete(self):
response = self._do_put('os-floating-ips-bulk/delete',
'floating-ips-bulk-delete-req',
{"ip_range": "192.168.1.0/24"})
subs = self._get_regexes()
self._verify_response('floating-ips-bulk-delete-resp', subs,
response, 200)
class FloatingIpsBulkXmlTest(FloatingIpsBulkJsonTest):
ctype = 'xml'
class KeyPairsSampleJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.keypairs.Keypairs"
def generalize_subs(self, subs, vanilla_regexes):
subs['keypair_name'] = 'keypair-[0-9a-f-]+'
return subs
def test_keypairs_post(self, public_key=None):
"""Get api sample of key pairs post request."""
key_name = 'keypair-' + str(uuid_lib.uuid4())
response = self._do_post('os-keypairs', 'keypairs-post-req',
{'keypair_name': key_name})
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self._verify_response('keypairs-post-resp', subs, response, 200)
# NOTE(maurosr): return the key_name is necessary cause the
# verification returns the label of the last compared information in
# the response, not necessarily the key name.
return key_name
def test_keypairs_import_key_post(self):
# Get api sample of key pairs post to import user's key.
key_name = 'keypair-' + str(uuid_lib.uuid4())
subs = {
'keypair_name': key_name,
'public_key': "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAAAgQDx8nkQv/zgGg"
"B4rMYmIf+6A4l6Rr+o/6lHBQdW5aYd44bd8JttDCE/F/pNRr0l"
"RE+PiqSPO8nDPHw0010JeMH9gYgnnFlyY3/OcJ02RhIPyyxYpv"
"9FhY+2YiUkpwFOcLImyrxEsYXpD/0d3ac30bNH6Sw9JD9UZHYc"
"pSxsIbECHw== Generated-by-Nova"
}
response = self._do_post('os-keypairs', 'keypairs-import-post-req',
subs)
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self._verify_response('keypairs-import-post-resp', subs, response, 200)
def test_keypairs_list(self):
# Get api sample of key pairs list request.
key_name = self.test_keypairs_post()
response = self._do_get('os-keypairs')
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self._verify_response('keypairs-list-resp', subs, response, 200)
def test_keypairs_get(self):
# Get api sample of key pairs get request.
key_name = self.test_keypairs_post()
response = self._do_get('os-keypairs/%s' % key_name)
subs = self._get_regexes()
subs['keypair_name'] = '(%s)' % key_name
self._verify_response('keypairs-get-resp', subs, response, 200)
class KeyPairsSampleXmlTest(KeyPairsSampleJsonTest):
ctype = 'xml'
class RescueJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".rescue.Rescue")
def _rescue(self, uuid):
req_subs = {
'password': 'MySecretPass'
}
response = self._do_post('servers/%s/action' % uuid,
'server-rescue-req', req_subs)
self._verify_response('server-rescue', req_subs, response, 200)
def _unrescue(self, uuid):
response = self._do_post('servers/%s/action' % uuid,
'server-unrescue-req', {})
self.assertEqual(response.status_code, 202)
def test_server_rescue(self):
uuid = self._post_server()
self._rescue(uuid)
# Do a server get to make sure that the 'RESCUE' state is set
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['status'] = 'RESCUE'
self._verify_response('server-get-resp-rescue', subs, response, 200)
def test_server_unrescue(self):
uuid = self._post_server()
self._rescue(uuid)
self._unrescue(uuid)
# Do a server get to make sure that the 'ACTIVE' state is back
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['status'] = 'ACTIVE'
self._verify_response('server-get-resp-unrescue', subs, response, 200)
class RescueXmlTest(RescueJsonTest):
ctype = 'xml'
class ExtendedRescueWithImageJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_rescue_with_image.Extended_rescue_with_image")
def _get_flags(self):
f = super(ExtendedRescueWithImageJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# ExtendedRescueWithImage extension also needs Rescue to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.rescue.Rescue')
return f
def _rescue(self, uuid):
req_subs = {
'password': 'MySecretPass',
'rescue_image_ref': fake.get_valid_image_id()
}
response = self._do_post('servers/%s/action' % uuid,
'server-rescue-req', req_subs)
self._verify_response('server-rescue', req_subs, response, 200)
def test_server_rescue(self):
uuid = self._post_server()
self._rescue(uuid)
# Do a server get to make sure that the 'RESCUE' state is set
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['status'] = 'RESCUE'
self._verify_response('server-get-resp-rescue', subs, response, 200)
class ExtendedRescueWithImageXmlTest(ExtendedRescueWithImageJsonTest):
ctype = 'xml'
class ShelveJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib.shelve.Shelve"
def setUp(self):
super(ShelveJsonTest, self).setUp()
# Don't offload instance, so we can test the offload call.
CONF.set_override('shelved_offload_time', -1)
def _test_server_action(self, uuid, template, action):
response = self._do_post('servers/%s/action' % uuid,
template, {'action': action})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_shelve(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-shelve', 'shelve')
def test_shelve_offload(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-shelve', 'shelve')
self._test_server_action(uuid, 'os-shelve-offload', 'shelveOffload')
def test_unshelve(self):
uuid = self._post_server()
self._test_server_action(uuid, 'os-shelve', 'shelve')
self._test_server_action(uuid, 'os-unshelve', 'unshelve')
class ShelveXmlTest(ShelveJsonTest):
ctype = 'xml'
class VirtualInterfacesJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".virtual_interfaces.Virtual_interfaces")
def test_vifs_list(self):
uuid = self._post_server()
response = self._do_get('servers/%s/os-virtual-interfaces' % uuid)
subs = self._get_regexes()
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('vifs-list-resp', subs, response, 200)
class VirtualInterfacesXmlTest(VirtualInterfacesJsonTest):
ctype = 'xml'
class CloudPipeSampleJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.cloudpipe.Cloudpipe"
def setUp(self):
super(CloudPipeSampleJsonTest, self).setUp()
def get_user_data(self, project_id):
"""Stub method to generate user data for cloudpipe tests."""
return "VVNFUiBEQVRB\n"
def network_api_get(self, context, network_uuid):
"""Stub to get a valid network and its information."""
return {'vpn_public_address': '127.0.0.1',
'vpn_public_port': 22}
self.stubs.Set(pipelib.CloudPipe, 'get_encoded_zip', get_user_data)
self.stubs.Set(network_api.API, "get",
network_api_get)
def generalize_subs(self, subs, vanilla_regexes):
subs['project_id'] = 'cloudpipe-[0-9a-f-]+'
return subs
def test_cloud_pipe_create(self):
# Get api samples of cloud pipe extension creation.
self.flags(vpn_image_id=fake.get_valid_image_id())
project = {'project_id': 'cloudpipe-' + str(uuid_lib.uuid4())}
response = self._do_post('os-cloudpipe', 'cloud-pipe-create-req',
project)
subs = self._get_regexes()
subs.update(project)
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-create-resp', subs, response, 200)
return project
def test_cloud_pipe_list(self):
# Get api samples of cloud pipe extension get request.
project = self.test_cloud_pipe_create()
response = self._do_get('os-cloudpipe')
subs = self._get_regexes()
subs.update(project)
subs['image_id'] = CONF.vpn_image_id
self._verify_response('cloud-pipe-get-resp', subs, response, 200)
class CloudPipeSampleXmlTest(CloudPipeSampleJsonTest):
ctype = "xml"
class CloudPipeUpdateJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib"
".cloudpipe_update.Cloudpipe_update")
def _get_flags(self):
f = super(CloudPipeUpdateJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# Cloudpipe_update also needs cloudpipe to be loaded
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.cloudpipe.Cloudpipe')
return f
def test_cloud_pipe_update(self):
subs = {'vpn_ip': '192.168.1.1',
'vpn_port': 2000}
response = self._do_put('os-cloudpipe/configure-project',
'cloud-pipe-update-req',
subs)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class CloudPipeUpdateXmlTest(CloudPipeUpdateJsonTest):
ctype = "xml"
class AgentsJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.agents.Agents"
def _get_flags(self):
f = super(AgentsJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
return f
def setUp(self):
super(AgentsJsonTest, self).setUp()
fake_agents_list = [{'url': 'http://example.com/path/to/resource',
'hypervisor': 'hypervisor',
'architecture': 'x86',
'os': 'os',
'version': '8.0',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'id': 1}]
def fake_agent_build_create(context, values):
values['id'] = 1
agent_build_ref = models.AgentBuild()
agent_build_ref.update(values)
return agent_build_ref
def fake_agent_build_get_all(context, hypervisor):
agent_build_all = []
for agent in fake_agents_list:
if hypervisor and hypervisor != agent['hypervisor']:
continue
agent_build_ref = models.AgentBuild()
agent_build_ref.update(agent)
agent_build_all.append(agent_build_ref)
return agent_build_all
def fake_agent_build_update(context, agent_build_id, values):
pass
def fake_agent_build_destroy(context, agent_update_id):
pass
self.stubs.Set(db, "agent_build_create",
fake_agent_build_create)
self.stubs.Set(db, "agent_build_get_all",
fake_agent_build_get_all)
self.stubs.Set(db, "agent_build_update",
fake_agent_build_update)
self.stubs.Set(db, "agent_build_destroy",
fake_agent_build_destroy)
def test_agent_create(self):
# Creates a new agent build.
project = {'url': 'http://example.com/path/to/resource',
'hypervisor': 'hypervisor',
'architecture': 'x86',
'os': 'os',
'version': '8.0',
'md5hash': 'add6bb58e139be103324d04d82d8f545'
}
response = self._do_post('os-agents', 'agent-post-req',
project)
project['agent_id'] = 1
self._verify_response('agent-post-resp', project, response, 200)
return project
def test_agent_list(self):
# Return a list of all agent builds.
response = self._do_get('os-agents')
project = {'url': 'http://example.com/path/to/resource',
'hypervisor': 'hypervisor',
'architecture': 'x86',
'os': 'os',
'version': '8.0',
'md5hash': 'add6bb58e139be103324d04d82d8f545',
'agent_id': 1
}
self._verify_response('agents-get-resp', project, response, 200)
def test_agent_update(self):
# Update an existing agent build.
agent_id = 1
subs = {'version': '7.0',
'url': 'http://example.com/path/to/resource',
'md5hash': 'add6bb58e139be103324d04d82d8f545'}
response = self._do_put('os-agents/%s' % agent_id,
'agent-update-put-req', subs)
subs['agent_id'] = 1
self._verify_response('agent-update-put-resp', subs, response, 200)
def test_agent_delete(self):
# Deletes an existing agent build.
agent_id = 1
response = self._do_delete('os-agents/%s' % agent_id)
self.assertEqual(response.status_code, 200)
class AgentsXmlTest(AgentsJsonTest):
ctype = "xml"
class FixedIpJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.fixed_ips.Fixed_ips"
def _get_flags(self):
f = super(FixedIpJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
return f
def setUp(self):
super(FixedIpJsonTest, self).setUp()
instance = dict(test_utils.get_test_instance(),
hostname='openstack', host='host')
fake_fixed_ips = [{'id': 1,
'address': '192.168.1.1',
'network_id': 1,
'virtual_interface_id': 1,
'instance_uuid': '1',
'allocated': False,
'leased': False,
'reserved': False,
'created_at': None,
'deleted_at': None,
'updated_at': None,
'deleted': None,
'instance': instance,
'network': test_network.fake_network,
'host': None},
{'id': 2,
'address': '192.168.1.2',
'network_id': 1,
'virtual_interface_id': 2,
'instance_uuid': '2',
'allocated': False,
'leased': False,
'reserved': False,
'created_at': None,
'deleted_at': None,
'updated_at': None,
'deleted': None,
'instance': instance,
'network': test_network.fake_network,
'host': None},
]
def fake_fixed_ip_get_by_address(context, address,
columns_to_join=None):
for fixed_ip in fake_fixed_ips:
if fixed_ip['address'] == address:
return fixed_ip
raise exception.FixedIpNotFoundForAddress(address=address)
def fake_fixed_ip_get_by_address_detailed(context, address):
network = {'id': 1,
'cidr': "192.168.1.0/24"}
host = {'host': "host",
'hostname': 'openstack'}
for fixed_ip in fake_fixed_ips:
if fixed_ip['address'] == address:
return (fixed_ip, network, host)
raise exception.FixedIpNotFoundForAddress(address=address)
def fake_fixed_ip_update(context, address, values):
fixed_ip = fake_fixed_ip_get_by_address(context, address)
if fixed_ip is None:
raise exception.FixedIpNotFoundForAddress(address=address)
else:
for key in values:
fixed_ip[key] = values[key]
self.stubs.Set(db, "fixed_ip_get_by_address",
fake_fixed_ip_get_by_address)
self.stubs.Set(db, "fixed_ip_get_by_address_detailed",
fake_fixed_ip_get_by_address_detailed)
self.stubs.Set(db, "fixed_ip_update", fake_fixed_ip_update)
def test_fixed_ip_reserve(self):
# Reserve a Fixed IP.
project = {'reserve': None}
response = self._do_post('os-fixed-ips/192.168.1.1/action',
'fixedip-post-req',
project)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_get_fixed_ip(self):
# Return data about the given fixed ip.
response = self._do_get('os-fixed-ips/192.168.1.1')
project = {'cidr': '192.168.1.0/24',
'hostname': 'openstack',
'host': 'host',
'address': '192.168.1.1'}
self._verify_response('fixedips-get-resp', project, response, 200)
class FixedIpXmlTest(FixedIpJsonTest):
ctype = "xml"
class AggregatesSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib" + \
".aggregates.Aggregates"
create_subs = {
"aggregate_id": '(?P<id>\d+)'
}
def _create_aggregate(self):
return self._do_post('os-aggregates', 'aggregate-post-req',
self.create_subs)
def test_aggregate_create(self):
response = self._create_aggregate()
subs = self.create_subs
subs.update(self._get_regexes())
return self._verify_response('aggregate-post-resp',
subs, response, 200)
def test_list_aggregates(self):
self._create_aggregate()
response = self._do_get('os-aggregates')
subs = self._get_regexes()
self._verify_response('aggregates-list-get-resp', subs, response, 200)
def test_aggregate_get(self):
self._create_aggregate()
response = self._do_get('os-aggregates/%s' % 1)
subs = self._get_regexes()
self._verify_response('aggregates-get-resp', subs, response, 200)
def test_add_metadata(self):
self._create_aggregate()
response = self._do_post('os-aggregates/%s/action' % 1,
'aggregate-metadata-post-req',
{'action': 'set_metadata'})
subs = self._get_regexes()
self._verify_response('aggregates-metadata-post-resp', subs,
response, 200)
def test_add_host(self):
self._create_aggregate()
subs = {
"host_name": self.compute.host,
}
response = self._do_post('os-aggregates/%s/action' % 1,
'aggregate-add-host-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('aggregates-add-host-post-resp', subs,
response, 200)
def test_remove_host(self):
self.test_add_host()
subs = {
"host_name": self.compute.host,
}
response = self._do_post('os-aggregates/1/action',
'aggregate-remove-host-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('aggregates-remove-host-post-resp',
subs, response, 200)
def test_update_aggregate(self):
self._create_aggregate()
response = self._do_put('os-aggregates/%s' % 1,
'aggregate-update-post-req', {})
subs = self._get_regexes()
self._verify_response('aggregate-update-post-resp',
subs, response, 200)
class AggregatesSampleXmlTest(AggregatesSampleJsonTest):
ctype = 'xml'
class CertificatesSamplesJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.certificates."
"Certificates")
def test_create_certificates(self):
response = self._do_post('os-certificates',
'certificate-create-req', {})
subs = self._get_regexes()
self._verify_response('certificate-create-resp', subs, response, 200)
def test_get_root_certificate(self):
response = self._do_get('os-certificates/root')
subs = self._get_regexes()
self._verify_response('certificate-get-root-resp', subs, response, 200)
class CertificatesSamplesXmlTest(CertificatesSamplesJsonTest):
ctype = 'xml'
class UsedLimitsSamplesJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.used_limits."
"Used_limits")
def test_get_used_limits(self):
# Get api sample to used limits.
response = self._do_get('limits')
subs = self._get_regexes()
self._verify_response('usedlimits-get-resp', subs, response, 200)
class UsedLimitsSamplesXmlTest(UsedLimitsSamplesJsonTest):
ctype = "xml"
class UsedLimitsForAdminSamplesJsonTest(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib.used_limits."
"Used_limits")
extension_name = (
"nova.api.openstack.compute.contrib.used_limits_for_admin."
"Used_limits_for_admin")
def test_get_used_limits_for_admin(self):
tenant_id = 'openstack'
response = self._do_get('limits?tenant_id=%s' % tenant_id)
subs = self._get_regexes()
return self._verify_response('usedlimitsforadmin-get-resp', subs,
response, 200)
class UsedLimitsForAdminSamplesXmlTest(UsedLimitsForAdminSamplesJsonTest):
ctype = "xml"
class MultipleCreateJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.multiple_create."
"Multiple_create")
def test_multiple_create(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'min_count': "2",
'max_count': "3"
}
response = self._do_post('servers', 'multiple-create-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('multiple-create-post-resp', subs, response, 202)
def test_multiple_create_without_reservation_id(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
'min_count': "2",
'max_count': "3"
}
response = self._do_post('servers', 'multiple-create-no-resv-post-req',
subs)
subs.update(self._get_regexes())
self._verify_response('multiple-create-no-resv-post-resp', subs,
response, 202)
class MultipleCreateXmlTest(MultipleCreateJsonTest):
ctype = 'xml'
class ServicesJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.services.Services"
def setUp(self):
super(ServicesJsonTest, self).setUp()
self.stubs.Set(db, "service_get_all",
test_services.fake_db_api_service_get_all)
self.stubs.Set(timeutils, "utcnow", test_services.fake_utcnow)
self.stubs.Set(timeutils, "utcnow_ts", test_services.fake_utcnow_ts)
self.stubs.Set(db, "service_get_by_args",
test_services.fake_service_get_by_host_binary)
self.stubs.Set(db, "service_update",
test_services.fake_service_update)
def tearDown(self):
super(ServicesJsonTest, self).tearDown()
timeutils.clear_time_override()
def fake_load(self, service_name):
return service_name == 'os-extended-services'
def test_services_list(self):
"""Return a list of all agent builds."""
response = self._do_get('os-services')
subs = {'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up'}
subs.update(self._get_regexes())
self._verify_response('services-list-get-resp', subs, response, 200)
def test_service_enable(self):
"""Enable an existing agent build."""
subs = {"host": "host1",
'binary': 'nova-compute'}
response = self._do_put('os-services/enable',
'service-enable-put-req', subs)
subs = {"host": "host1",
"binary": "nova-compute"}
self._verify_response('service-enable-put-resp', subs, response, 200)
def test_service_disable(self):
"""Disable an existing agent build."""
subs = {"host": "host1",
'binary': 'nova-compute'}
response = self._do_put('os-services/disable',
'service-disable-put-req', subs)
subs = {"host": "host1",
"binary": "nova-compute"}
self._verify_response('service-disable-put-resp', subs, response, 200)
def test_service_detail(self):
"""Return a list of all running services with the disable reason
information if that exists.
"""
self.stubs.Set(extensions.ExtensionManager, "is_loaded",
self.fake_load)
response = self._do_get('os-services')
self.assertEqual(response.status_code, 200)
subs = {'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up'}
subs.update(self._get_regexes())
self._verify_response('services-get-resp',
subs, response, 200)
def test_service_disable_log_reason(self):
"""Disable an existing service and log the reason."""
self.stubs.Set(extensions.ExtensionManager, "is_loaded",
self.fake_load)
subs = {"host": "host1",
'binary': 'nova-compute',
'disabled_reason': 'test2'}
response = self._do_put('os-services/disable-log-reason',
'service-disable-log-put-req', subs)
return self._verify_response('service-disable-log-put-resp',
subs, response, 200)
class ServicesXmlTest(ServicesJsonTest):
ctype = 'xml'
class ExtendedServicesJsonTest(ApiSampleTestBaseV2):
"""This extension is extending the functionalities of the
Services extension so the funcionalities introduced by this extension
are tested in the ServicesJsonTest and ServicesXmlTest classes.
"""
extension_name = ("nova.api.openstack.compute.contrib."
"extended_services.Extended_services")
class ExtendedServicesXmlTest(ExtendedServicesJsonTest):
"""This extension is tested in the ServicesXmlTest class."""
ctype = 'xml'
@mock.patch.object(db, 'service_get_all',
side_effect=test_services.fake_db_api_service_get_all)
@mock.patch.object(db, 'service_get_by_args',
side_effect=test_services.fake_service_get_by_host_binary)
class ExtendedServicesDeleteJsonTest(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib.services.Services")
extension_name = ("nova.api.openstack.compute.contrib."
"extended_services_delete.Extended_services_delete")
def setUp(self):
super(ExtendedServicesDeleteJsonTest, self).setUp()
timeutils.set_time_override(test_services.fake_utcnow())
def tearDown(self):
super(ExtendedServicesDeleteJsonTest, self).tearDown()
timeutils.clear_time_override()
def test_service_detail(self, *mocks):
"""Return a list of all running services with the disable reason
information if that exists.
"""
response = self._do_get('os-services')
self.assertEqual(response.status_code, 200)
subs = {'id': 1,
'binary': 'nova-compute',
'host': 'host1',
'zone': 'nova',
'status': 'disabled',
'state': 'up'}
subs.update(self._get_regexes())
return self._verify_response('services-get-resp',
subs, response, 200)
def test_service_delete(self, *mocks):
response = self._do_delete('os-services/1')
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, "")
class ExtendedServicesDeleteXmlTest(ExtendedServicesDeleteJsonTest):
"""This extension is tested in the ExtendedServicesDeleteJsonTest class."""
ctype = 'xml'
class SimpleTenantUsageSampleJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.simple_tenant_usage."
"Simple_tenant_usage")
def setUp(self):
"""setUp method for simple tenant usage."""
super(SimpleTenantUsageSampleJsonTest, self).setUp()
started = timeutils.utcnow()
now = started + datetime.timedelta(hours=1)
timeutils.set_time_override(started)
self._post_server()
timeutils.set_time_override(now)
self.query = {
'start': str(started),
'end': str(now)
}
def tearDown(self):
"""tearDown method for simple tenant usage."""
super(SimpleTenantUsageSampleJsonTest, self).tearDown()
timeutils.clear_time_override()
def test_get_tenants_usage(self):
# Get api sample to get all tenants usage request.
response = self._do_get('os-simple-tenant-usage?%s' % (
urllib.urlencode(self.query)))
subs = self._get_regexes()
self._verify_response('simple-tenant-usage-get', subs, response, 200)
def test_get_tenant_usage_details(self):
# Get api sample to get specific tenant usage request.
tenant_id = 'openstack'
response = self._do_get('os-simple-tenant-usage/%s?%s' % (tenant_id,
urllib.urlencode(self.query)))
subs = self._get_regexes()
self._verify_response('simple-tenant-usage-get-specific', subs,
response, 200)
class SimpleTenantUsageSampleXmlTest(SimpleTenantUsageSampleJsonTest):
ctype = "xml"
class ServerDiagnosticsSamplesJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.server_diagnostics."
"Server_diagnostics")
def test_server_diagnostics_get(self):
uuid = self._post_server()
response = self._do_get('servers/%s/diagnostics' % uuid)
subs = self._get_regexes()
self._verify_response('server-diagnostics-get-resp', subs,
response, 200)
class ServerDiagnosticsSamplesXmlTest(ServerDiagnosticsSamplesJsonTest):
ctype = "xml"
class AvailabilityZoneJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.availability_zone."
"Availability_zone")
def test_create_availability_zone(self):
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
"availability_zone": "nova"
}
response = self._do_post('servers', 'availability-zone-post-req', subs)
subs.update(self._get_regexes())
self._verify_response('availability-zone-post-resp', subs,
response, 202)
class AvailabilityZoneXmlTest(AvailabilityZoneJsonTest):
ctype = "xml"
class AdminActionsSamplesJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.admin_actions."
"Admin_actions")
def setUp(self):
"""setUp Method for AdminActions api samples extension
This method creates the server that will be used in each tests
"""
super(AdminActionsSamplesJsonTest, self).setUp()
self.uuid = self._post_server()
def test_post_pause(self):
# Get api samples to pause server request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-pause', {})
self.assertEqual(response.status_code, 202)
def test_post_unpause(self):
# Get api samples to unpause server request.
self.test_post_pause()
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-unpause', {})
self.assertEqual(response.status_code, 202)
def test_post_suspend(self):
# Get api samples to suspend server request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-suspend', {})
self.assertEqual(response.status_code, 202)
def test_post_resume(self):
# Get api samples to server resume request.
self.test_post_suspend()
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-resume', {})
self.assertEqual(response.status_code, 202)
@mock.patch('nova.conductor.manager.ComputeTaskManager._cold_migrate')
def test_post_migrate(self, mock_cold_migrate):
# Get api samples to migrate server request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-migrate', {})
self.assertEqual(response.status_code, 202)
def test_post_reset_network(self):
# Get api samples to reset server network request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-reset-network', {})
self.assertEqual(response.status_code, 202)
def test_post_inject_network_info(self):
# Get api samples to inject network info request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-inject-network-info', {})
self.assertEqual(response.status_code, 202)
def test_post_lock_server(self):
# Get api samples to lock server request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-lock-server', {})
self.assertEqual(response.status_code, 202)
def test_post_unlock_server(self):
# Get api samples to unlock server request.
self.test_post_lock_server()
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-unlock-server', {})
self.assertEqual(response.status_code, 202)
def test_post_backup_server(self):
# Get api samples to backup server request.
def image_details(self, context, **kwargs):
"""This stub is specifically used on the backup action."""
# NOTE(maurosr): I've added this simple stub cause backup action
# was trapped in infinite loop during fetch image phase since the
# fake Image Service always returns the same set of images
return []
self.stubs.Set(fake._FakeImageService, 'detail', image_details)
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-backup-server', {})
self.assertEqual(response.status_code, 202)
def test_post_live_migrate_server(self):
# Get api samples to server live migrate request.
def fake_live_migrate(_self, context, instance, scheduler_hint,
block_migration, disk_over_commit):
self.assertEqual(self.uuid, instance["uuid"])
host = scheduler_hint["host"]
self.assertEqual(self.compute.host, host)
self.stubs.Set(conductor_manager.ComputeTaskManager,
'_live_migrate',
fake_live_migrate)
def fake_get_compute(context, host):
service = dict(host=host,
binary='nova-compute',
topic='compute',
report_count=1,
updated_at='foo',
hypervisor_type='bar',
hypervisor_version=
utils.convert_version_to_int('1.0'),
disabled=False)
return {'compute_node': [service]}
self.stubs.Set(db, "service_get_by_compute_host", fake_get_compute)
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-live-migrate',
{'hostname': self.compute.host})
self.assertEqual(response.status_code, 202)
def test_post_reset_state(self):
# get api samples to server reset state request.
response = self._do_post('servers/%s/action' % self.uuid,
'admin-actions-reset-server-state', {})
self.assertEqual(response.status_code, 202)
class AdminActionsSamplesXmlTest(AdminActionsSamplesJsonTest):
ctype = 'xml'
class ConsolesSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".consoles.Consoles")
def setUp(self):
super(ConsolesSampleJsonTests, self).setUp()
self.flags(vnc_enabled=True)
self.flags(enabled=True, group='spice')
self.flags(enabled=True, group='rdp')
self.flags(enabled=True, group='serial_console')
def test_get_vnc_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-vnc-console-post-req',
{'action': 'os-getVNCConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-vnc-console-post-resp', subs, response, 200)
def test_get_spice_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-spice-console-post-req',
{'action': 'os-getSPICEConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-spice-console-post-resp', subs,
response, 200)
def test_get_rdp_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-rdp-console-post-req',
{'action': 'os-getRDPConsole'})
subs = self._get_regexes()
subs["url"] = \
"((https?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-rdp-console-post-resp', subs,
response, 200)
def test_get_serial_console(self):
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'get-serial-console-post-req',
{'action': 'os-getSerialConsole'})
subs = self._get_regexes()
subs["url"] = \
"((ws?):((//)|(\\\\))+([\w\d:#@%/;$()~_?\+-=\\\.&](#!)?)*)"
self._verify_response('get-serial-console-post-resp', subs,
response, 200)
class ConsolesSampleXmlTests(ConsolesSampleJsonTests):
ctype = 'xml'
class ConsoleAuthTokensSampleJsonTests(ServersSampleBase):
extends_name = ("nova.api.openstack.compute.contrib.consoles.Consoles")
extension_name = ("nova.api.openstack.compute.contrib.console_auth_tokens."
"Console_auth_tokens")
def _get_console_url(self, data):
return jsonutils.loads(data)["console"]["url"]
def _get_console_token(self, uuid):
response = self._do_post('servers/%s/action' % uuid,
'get-rdp-console-post-req',
{'action': 'os-getRDPConsole'})
url = self._get_console_url(response.content)
return re.match('.+?token=([^&]+)', url).groups()[0]
def test_get_console_connect_info(self):
self.flags(enabled=True, group='rdp')
uuid = self._post_server()
token = self._get_console_token(uuid)
response = self._do_get('os-console-auth-tokens/%s' % token)
subs = self._get_regexes()
subs["uuid"] = uuid
subs["host"] = r"[\w\.\-]+"
subs["port"] = "[0-9]+"
subs["internal_access_path"] = ".*"
self._verify_response('get-console-connect-info-get-resp', subs,
response, 200)
class ConsoleAuthTokensSampleXmlTests(ConsoleAuthTokensSampleJsonTests):
ctype = 'xml'
def _get_console_url(self, data):
return etree.fromstring(data).find('url').text
class DeferredDeleteSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".deferred_delete.Deferred_delete")
def setUp(self):
super(DeferredDeleteSampleJsonTests, self).setUp()
self.flags(reclaim_instance_interval=1)
def test_restore(self):
uuid = self._post_server()
response = self._do_delete('servers/%s' % uuid)
response = self._do_post('servers/%s/action' % uuid,
'restore-post-req', {})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_force_delete(self):
uuid = self._post_server()
response = self._do_delete('servers/%s' % uuid)
response = self._do_post('servers/%s/action' % uuid,
'force-delete-post-req', {})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class DeferredDeleteSampleXmlTests(DeferredDeleteSampleJsonTests):
ctype = 'xml'
class QuotasSampleJsonTests(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.quotas.Quotas"
def test_show_quotas(self):
# Get api sample to show quotas.
response = self._do_get('os-quota-sets/fake_tenant')
self._verify_response('quotas-show-get-resp', {}, response, 200)
def test_show_quotas_defaults(self):
# Get api sample to show quotas defaults.
response = self._do_get('os-quota-sets/fake_tenant/defaults')
self._verify_response('quotas-show-defaults-get-resp',
{}, response, 200)
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
self._verify_response('quotas-update-post-resp', {}, response, 200)
class QuotasSampleXmlTests(QuotasSampleJsonTests):
ctype = "xml"
class ExtendedQuotasSampleJsonTests(ApiSampleTestBaseV2):
extends_name = "nova.api.openstack.compute.contrib.quotas.Quotas"
extension_name = ("nova.api.openstack.compute.contrib"
".extended_quotas.Extended_quotas")
def test_delete_quotas(self):
# Get api sample to delete quota.
response = self._do_delete('os-quota-sets/fake_tenant')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas(self):
# Get api sample to update quotas.
response = self._do_put('os-quota-sets/fake_tenant',
'quotas-update-post-req',
{})
return self._verify_response('quotas-update-post-resp', {},
response, 200)
class ExtendedQuotasSampleXmlTests(ExtendedQuotasSampleJsonTests):
ctype = "xml"
class UserQuotasSampleJsonTests(ApiSampleTestBaseV2):
extends_name = "nova.api.openstack.compute.contrib.quotas.Quotas"
extension_name = ("nova.api.openstack.compute.contrib"
".user_quotas.User_quotas")
def fake_load(self, *args):
return True
def test_show_quotas_for_user(self):
# Get api sample to show quotas for user.
response = self._do_get('os-quota-sets/fake_tenant?user_id=1')
self._verify_response('user-quotas-show-get-resp', {}, response, 200)
def test_delete_quotas_for_user(self):
# Get api sample to delete quota for user.
self.stubs.Set(extensions.ExtensionManager, "is_loaded",
self.fake_load)
response = self._do_delete('os-quota-sets/fake_tenant?user_id=1')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_update_quotas_for_user(self):
# Get api sample to update quotas for user.
response = self._do_put('os-quota-sets/fake_tenant?user_id=1',
'user-quotas-update-post-req',
{})
return self._verify_response('user-quotas-update-post-resp', {},
response, 200)
class UserQuotasSampleXmlTests(UserQuotasSampleJsonTests):
ctype = "xml"
class ExtendedIpsSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_ips.Extended_ips")
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['id'] = uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedIpsSampleXmlTests(ExtendedIpsSampleJsonTests):
ctype = 'xml'
class ExtendedIpsMacSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_ips_mac.Extended_ips_mac")
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
self.assertEqual(response.status_code, 200)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
subs['hypervisor_hostname'] = r'[\w\.\-]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
self.assertEqual(response.status_code, 200)
subs = self._get_regexes()
subs['id'] = uuid
subs['hostid'] = '[a-f0-9]+'
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedIpsMacSampleXmlTests(ExtendedIpsMacSampleJsonTests):
ctype = 'xml'
class ExtendedStatusSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_status.Extended_status")
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['id'] = uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedStatusSampleXmlTests(ExtendedStatusSampleJsonTests):
ctype = 'xml'
class ExtendedVolumesSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_volumes.Extended_volumes")
def test_show(self):
uuid = self._post_server()
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fakes.stub_bdm_get_all_by_instance)
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
uuid = self._post_server()
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fakes.stub_bdm_get_all_by_instance)
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['id'] = uuid
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedVolumesSampleXmlTests(ExtendedVolumesSampleJsonTests):
ctype = 'xml'
class ServerUsageSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".server_usage.Server_usage")
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
return self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
return self._verify_response('servers-detail-resp', subs,
response, 200)
class ServerUsageSampleXmlTests(ServerUsageSampleJsonTests):
ctype = 'xml'
class ExtendedVIFNetSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_virtual_interfaces_net.Extended_virtual_interfaces_net")
def _get_flags(self):
f = super(ExtendedVIFNetSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# extended_virtual_interfaces_net_update also
# needs virtual_interfaces to be loaded
f['osapi_compute_extension'].append(
('nova.api.openstack.compute.contrib'
'.virtual_interfaces.Virtual_interfaces'))
return f
def test_vifs_list(self):
uuid = self._post_server()
response = self._do_get('servers/%s/os-virtual-interfaces' % uuid)
self.assertEqual(response.status_code, 200)
subs = self._get_regexes()
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
self._verify_response('vifs-list-resp', subs, response, 200)
class ExtendedVIFNetSampleXmlTests(ExtendedIpsSampleJsonTests):
ctype = 'xml'
class FlavorManageSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.flavormanage."
"Flavormanage")
def _create_flavor(self):
"""Create a flavor."""
subs = {
'flavor_id': 10,
'flavor_name': "test_flavor"
}
response = self._do_post("flavors",
"flavor-create-post-req",
subs)
subs.update(self._get_regexes())
self._verify_response("flavor-create-post-resp", subs, response, 200)
def test_create_flavor(self):
# Get api sample to create a flavor.
self._create_flavor()
def test_delete_flavor(self):
# Get api sample to delete a flavor.
self._create_flavor()
response = self._do_delete("flavors/10")
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class FlavorManageSampleXmlTests(FlavorManageSampleJsonTests):
ctype = "xml"
class ServerPasswordSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.server_password."
"Server_password")
def test_get_password(self):
# Mock password since there is no api to set it
def fake_ext_password(*args, **kwargs):
return ("xlozO3wLCBRWAa2yDjCCVx8vwNPypxnypmRYDa/zErlQ+EzPe1S/"
"Gz6nfmC52mOlOSCRuUOmG7kqqgejPof6M7bOezS387zjq4LSvvwp"
"28zUknzy4YzfFGhnHAdai3TxUJ26pfQCYrq8UTzmKF2Bq8ioSEtV"
"VzM0A96pDh8W2i7BOz6MdoiVyiev/I1K2LsuipfxSJR7Wdke4zNX"
"JjHHP2RfYsVbZ/k9ANu+Nz4iIH8/7Cacud/pphH7EjrY6a4RZNrj"
"QskrhKYed0YERpotyjYk1eDtRe72GrSiXteqCM4biaQ5w3ruS+Ac"
"X//PXk3uJ5kC7d67fPXaVz4WaQRYMg==")
self.stubs.Set(password, "extract_password", fake_ext_password)
uuid = self._post_server()
response = self._do_get('servers/%s/os-server-password' % uuid)
subs = self._get_regexes()
subs['encrypted_password'] = fake_ext_password().replace('+', '\\+')
self._verify_response('get-password-resp', subs, response, 200)
def test_reset_password(self):
uuid = self._post_server()
response = self._do_delete('servers/%s/os-server-password' % uuid)
self.assertEqual(response.status_code, 204)
class ServerPasswordSampleXmlTests(ServerPasswordSampleJsonTests):
ctype = "xml"
class DiskConfigJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.disk_config."
"Disk_config")
def test_list_servers_detail(self):
uuid = self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
subs['id'] = uuid
self._verify_response('list-servers-detail-get', subs, response, 200)
def test_get_server(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_update_server(self):
uuid = self._post_server()
response = self._do_put('servers/%s' % uuid,
'server-update-put-req', {})
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-update-put-resp', subs, response, 200)
def test_resize_server(self):
self.flags(allow_resize_to_same_host=True)
uuid = self._post_server()
response = self._do_post('servers/%s/action' % uuid,
'server-resize-post-req', {})
self.assertEqual(response.status_code, 202)
# NOTE(tmello): Resize does not return response body
# Bug #1085213.
self.assertEqual(response.content, "")
def test_rebuild_server(self):
uuid = self._post_server()
subs = {
'image_id': fake.get_valid_image_id(),
'host': self._get_host(),
}
response = self._do_post('servers/%s/action' % uuid,
'server-action-rebuild-req', subs)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-action-rebuild-resp',
subs, response, 202)
def test_get_image(self):
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s' % image_id)
subs = self._get_regexes()
subs['image_id'] = image_id
self._verify_response('image-get-resp', subs, response, 200)
def test_list_images(self):
response = self._do_get('images/detail')
subs = self._get_regexes()
self._verify_response('image-list-resp', subs, response, 200)
class DiskConfigXmlTest(DiskConfigJsonTest):
ctype = 'xml'
class OsNetworksJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.os_tenant_networks"
".Os_tenant_networks")
def setUp(self):
super(OsNetworksJsonTests, self).setUp()
CONF.set_override("enable_network_quota", True)
def fake(*args, **kwargs):
pass
self.stubs.Set(nova.quota.QUOTAS, "reserve", fake)
self.stubs.Set(nova.quota.QUOTAS, "commit", fake)
self.stubs.Set(nova.quota.QUOTAS, "rollback", fake)
self.stubs.Set(nova.quota.QuotaEngine, "reserve", fake)
self.stubs.Set(nova.quota.QuotaEngine, "commit", fake)
self.stubs.Set(nova.quota.QuotaEngine, "rollback", fake)
def test_list_networks(self):
response = self._do_get('os-tenant-networks')
subs = self._get_regexes()
self._verify_response('networks-list-res', subs, response, 200)
def test_create_network(self):
response = self._do_post('os-tenant-networks', "networks-post-req", {})
subs = self._get_regexes()
self._verify_response('networks-post-res', subs, response, 200)
def test_delete_network(self):
response = self._do_post('os-tenant-networks', "networks-post-req", {})
net = jsonutils.loads(response.content)
response = self._do_delete('os-tenant-networks/%s' %
net["network"]["id"])
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class OsNetworksXmlTests(OsNetworksJsonTests):
ctype = 'xml'
def test_delete_network(self):
response = self._do_post('os-tenant-networks', "networks-post-req", {})
net = etree.fromstring(response.content)
network_id = net.find('id').text
response = self._do_delete('os-tenant-networks/%s' % network_id)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class NetworksJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib"
".os_networks.Os_networks")
def setUp(self):
super(NetworksJsonTests, self).setUp()
fake_network_api = test_networks.FakeNetworkAPI()
self.stubs.Set(network_api.API, "get_all",
fake_network_api.get_all)
self.stubs.Set(network_api.API, "get",
fake_network_api.get)
self.stubs.Set(network_api.API, "associate",
fake_network_api.associate)
self.stubs.Set(network_api.API, "delete",
fake_network_api.delete)
self.stubs.Set(network_api.API, "create",
fake_network_api.create)
self.stubs.Set(network_api.API, "add_network_to_project",
fake_network_api.add_network_to_project)
def test_network_list(self):
response = self._do_get('os-networks')
subs = self._get_regexes()
self._verify_response('networks-list-resp', subs, response, 200)
def test_network_disassociate(self):
uuid = test_networks.FAKE_NETWORKS[0]['uuid']
response = self._do_post('os-networks/%s/action' % uuid,
'networks-disassociate-req', {})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_network_show(self):
uuid = test_networks.FAKE_NETWORKS[0]['uuid']
response = self._do_get('os-networks/%s' % uuid)
subs = self._get_regexes()
self._verify_response('network-show-resp', subs, response, 200)
def test_network_create(self):
response = self._do_post("os-networks",
'network-create-req', {})
subs = self._get_regexes()
self._verify_response('network-create-resp', subs, response, 200)
def test_network_add(self):
response = self._do_post("os-networks/add",
'network-add-req', {})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_network_delete(self):
response = self._do_delete('os-networks/always_delete')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class NetworksXmlTests(NetworksJsonTests):
ctype = 'xml'
class ExtendedNetworksJsonTests(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib."
"os_networks.Os_networks")
extension_name = ("nova.api.openstack.compute.contrib."
"extended_networks.Extended_networks")
def setUp(self):
super(ExtendedNetworksJsonTests, self).setUp()
fake_network_api = test_networks.FakeNetworkAPI()
self.stubs.Set(network_api.API, "get_all",
fake_network_api.get_all)
self.stubs.Set(network_api.API, "get",
fake_network_api.get)
self.stubs.Set(network_api.API, "associate",
fake_network_api.associate)
self.stubs.Set(network_api.API, "delete",
fake_network_api.delete)
self.stubs.Set(network_api.API, "create",
fake_network_api.create)
self.stubs.Set(network_api.API, "add_network_to_project",
fake_network_api.add_network_to_project)
def test_network_list(self):
response = self._do_get('os-networks')
subs = self._get_regexes()
self._verify_response('networks-list-resp', subs, response, 200)
def test_network_show(self):
uuid = test_networks.FAKE_NETWORKS[0]['uuid']
response = self._do_get('os-networks/%s' % uuid)
subs = self._get_regexes()
self._verify_response('network-show-resp', subs, response, 200)
def test_network_create(self):
response = self._do_post("os-networks",
'network-create-req', {})
subs = self._get_regexes()
self._verify_response('network-create-resp', subs, response, 200)
class ExtendedNetworksXmlTests(ExtendedNetworksJsonTests):
ctype = 'xml'
class NetworksAssociateJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib"
".networks_associate.Networks_associate")
_sentinel = object()
def _get_flags(self):
f = super(NetworksAssociateJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# Networks_associate requires Networks to be update
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.os_networks.Os_networks')
return f
def setUp(self):
super(NetworksAssociateJsonTests, self).setUp()
def fake_associate(self, context, network_id,
host=NetworksAssociateJsonTests._sentinel,
project=NetworksAssociateJsonTests._sentinel):
return True
self.stubs.Set(network_api.API, "associate", fake_associate)
def test_disassociate(self):
response = self._do_post('os-networks/1/action',
'network-disassociate-req',
{})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_disassociate_host(self):
response = self._do_post('os-networks/1/action',
'network-disassociate-host-req',
{})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_disassociate_project(self):
response = self._do_post('os-networks/1/action',
'network-disassociate-project-req',
{})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
def test_associate_host(self):
response = self._do_post('os-networks/1/action',
'network-associate-host-req',
{"host": "testHost"})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class NetworksAssociateXmlTests(NetworksAssociateJsonTests):
ctype = 'xml'
class FlavorDisabledSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.flavor_disabled."
"Flavor_disabled")
def test_show_flavor(self):
# Get api sample to show flavor_disabled attr. of a flavor.
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = self._get_regexes()
subs['flavor_id'] = flavor_id
self._verify_response('flavor-show-get-resp', subs, response, 200)
def test_detail_flavor(self):
# Get api sample to show details of a flavor.
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavor-detail-get-resp', subs, response, 200)
class FlavorDisabledSampleXmlTests(FlavorDisabledSampleJsonTests):
ctype = "xml"
class QuotaClassesSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.quota_classes."
"Quota_classes")
set_id = 'test_class'
def test_show_quota_classes(self):
# Get api sample to show quota classes.
response = self._do_get('os-quota-class-sets/%s' % self.set_id)
subs = {'set_id': self.set_id}
self._verify_response('quota-classes-show-get-resp', subs,
response, 200)
def test_update_quota_classes(self):
# Get api sample to update quota classes.
response = self._do_put('os-quota-class-sets/%s' % self.set_id,
'quota-classes-update-post-req',
{})
self._verify_response('quota-classes-update-post-resp',
{}, response, 200)
class QuotaClassesSampleXmlTests(QuotaClassesSampleJsonTests):
ctype = "xml"
class CellsSampleJsonTest(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.cells.Cells"
def setUp(self):
# db_check_interval < 0 makes cells manager always hit the DB
self.flags(enable=True, db_check_interval=-1, group='cells')
super(CellsSampleJsonTest, self).setUp()
self._stub_cells()
def _stub_cells(self, num_cells=5):
self.cells = []
self.cells_next_id = 1
def _fake_cell_get_all(context):
return self.cells
def _fake_cell_get(inst, context, cell_name):
for cell in self.cells:
if cell['name'] == cell_name:
return cell
raise exception.CellNotFound(cell_name=cell_name)
for x in xrange(num_cells):
cell = models.Cell()
our_id = self.cells_next_id
self.cells_next_id += 1
cell.update({'id': our_id,
'name': 'cell%s' % our_id,
'transport_url': 'rabbit://username%s@/' % our_id,
'is_parent': our_id % 2 == 0})
self.cells.append(cell)
self.stubs.Set(db, 'cell_get_all', _fake_cell_get_all)
self.stubs.Set(cells_rpcapi.CellsAPI, 'cell_get', _fake_cell_get)
def test_cells_empty_list(self):
# Override this
self._stub_cells(num_cells=0)
response = self._do_get('os-cells')
subs = self._get_regexes()
self._verify_response('cells-list-empty-resp', subs, response, 200)
def test_cells_list(self):
response = self._do_get('os-cells')
subs = self._get_regexes()
self._verify_response('cells-list-resp', subs, response, 200)
def test_cells_get(self):
response = self._do_get('os-cells/cell3')
subs = self._get_regexes()
self._verify_response('cells-get-resp', subs, response, 200)
class CellsSampleXmlTest(CellsSampleJsonTest):
ctype = 'xml'
class CellsCapacitySampleJsonTest(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib.cells.Cells")
extension_name = ("nova.api.openstack.compute.contrib."
"cell_capacities.Cell_capacities")
def setUp(self):
self.flags(enable=True, db_check_interval=-1, group='cells')
super(CellsCapacitySampleJsonTest, self).setUp()
# (navneetk/kaushikc) : Mock cell capacity to avoid the capacity
# being calculated from the compute nodes in the environment
self._mock_cell_capacity()
def test_get_cell_capacity(self):
state_manager = state.CellStateManager()
my_state = state_manager.get_my_state()
response = self._do_get('os-cells/%s/capacities' %
my_state.name)
subs = self._get_regexes()
return self._verify_response('cells-capacities-resp',
subs, response, 200)
def test_get_all_cells_capacity(self):
response = self._do_get('os-cells/capacities')
subs = self._get_regexes()
return self._verify_response('cells-capacities-resp',
subs, response, 200)
def _mock_cell_capacity(self):
self.mox.StubOutWithMock(self.cells.manager.state_manager,
'get_our_capacities')
response = {"ram_free":
{"units_by_mb": {"8192": 0, "512": 13,
"4096": 1, "2048": 3, "16384": 0},
"total_mb": 7680},
"disk_free":
{"units_by_mb": {"81920": 11, "20480": 46,
"40960": 23, "163840": 5, "0": 0},
"total_mb": 1052672}
}
self.cells.manager.state_manager.get_our_capacities(). \
AndReturn(response)
self.mox.ReplayAll()
class CellsCapacitySampleXmlTest(CellsCapacitySampleJsonTest):
ctype = 'xml'
class BareMetalNodesJsonTest(ApiSampleTestBaseV2, bm_db_base.BMDBTestCase):
extension_name = ('nova.api.openstack.compute.contrib.baremetal_nodes.'
'Baremetal_nodes')
def _get_subs(self):
subs = {}
return subs
def _create_node(self):
response = self._do_post("os-baremetal-nodes",
"baremetal-node-create-req",
{})
subs = self._get_subs()
subs.update({'node_id': '(?P<id>\d+)'})
return self._verify_response("baremetal-node-create-resp", subs,
response, 200)
def _create_node_with_address(self):
address = '12:34:56:78:90:ab'
req_subs = {'address': address}
response = self._do_post("os-baremetal-nodes",
"baremetal-node-create-with-address-req",
req_subs)
subs = self._get_subs()
subs.update({'node_id': '(?P<id>\d+)',
'interface_id': '\d+',
'address': address,
})
self._verify_response("baremetal-node-create-with-address-resp",
subs, response, 200)
def test_create_node(self):
self._create_node()
def test_create_node_with_address(self):
self._create_node_with_address()
def test_list_nodes(self):
node_id = self._create_node()
interface_id = self._add_interface(node_id)
response = self._do_get('os-baremetal-nodes')
subs = self._get_subs()
subs.update({'node_id': node_id,
'interface_id': interface_id,
'address': 'aa:aa:aa:aa:aa:aa',
})
self._verify_response('baremetal-node-list-resp', subs,
response, 200)
def test_show_node(self):
node_id = self._create_node()
interface_id = self._add_interface(node_id)
response = self._do_get('os-baremetal-nodes/%s' % node_id)
subs = self._get_subs()
subs.update({'node_id': node_id,
'interface_id': interface_id,
'address': 'aa:aa:aa:aa:aa:aa',
})
self._verify_response('baremetal-node-show-resp', subs, response, 200)
def test_delete_node(self):
node_id = self._create_node()
response = self._do_delete("os-baremetal-nodes/%s" % node_id)
self.assertEqual(response.status_code, 202)
def _add_interface(self, node_id):
response = self._do_post("os-baremetal-nodes/%s/action" % node_id,
"baremetal-node-add-interface-req",
{'address': 'aa:aa:aa:aa:aa:aa'})
subs = {'interface_id': r'(?P<id>\d+)'}
return self._verify_response("baremetal-node-add-interface-resp", subs,
response, 200)
def test_add_interface(self):
node_id = self._create_node()
self._add_interface(node_id)
def test_remove_interface(self):
node_id = self._create_node()
self._add_interface(node_id)
response = self._do_post("os-baremetal-nodes/%s/action" % node_id,
"baremetal-node-remove-interface-req",
{'address': 'aa:aa:aa:aa:aa:aa'})
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, "")
class BareMetalNodesXmlTest(BareMetalNodesJsonTest):
ctype = 'xml'
class BareMetalExtStatusJsonTest(BareMetalNodesJsonTest):
extension_name = ('nova.api.openstack.compute.contrib.'
'baremetal_ext_status.Baremetal_ext_status')
def _get_flags(self):
f = super(BareMetalExtStatusJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# BareMetalExtStatus extension also needs BareMetalNodes to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.baremetal_nodes.'
'Baremetal_nodes')
return f
def _get_subs(self):
vanilla_regexes = self._get_regexes()
subs = {'node_uuid': vanilla_regexes['uuid']}
return subs
class BareMetalExtStatusXmlTest(BareMetalExtStatusJsonTest):
ctype = 'xml'
class BlockDeviceMappingV2BootJsonTest(ServersSampleBase):
extension_name = ('nova.api.openstack.compute.contrib.'
'block_device_mapping_v2_boot.'
'Block_device_mapping_v2_boot')
def _get_flags(self):
f = super(BlockDeviceMappingV2BootJsonTest, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# We need the volumes extension as well
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.volumes.Volumes')
return f
def test_servers_post_with_bdm_v2(self):
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(cinder.API, 'check_attach',
fakes.stub_volume_check_attach)
return self._post_server()
class BlockDeviceMappingV2BootXmlTest(BlockDeviceMappingV2BootJsonTest):
ctype = 'xml'
class FloatingIPPoolsSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.floating_ip_pools."
"Floating_ip_pools")
def test_list_floatingippools(self):
pool_list = ["pool1", "pool2"]
def fake_get_floating_ip_pools(self, context):
return pool_list
self.stubs.Set(network_api.API, "get_floating_ip_pools",
fake_get_floating_ip_pools)
response = self._do_get('os-floating-ip-pools')
subs = {
'pool1': pool_list[0],
'pool2': pool_list[1]
}
self._verify_response('floatingippools-list-resp', subs, response, 200)
class FloatingIPPoolsSampleXmlTests(FloatingIPPoolsSampleJsonTests):
ctype = 'xml'
class MultinicSampleJsonTest(ServersSampleBase):
extension_name = "nova.api.openstack.compute.contrib.multinic.Multinic"
def _disable_instance_dns_manager(self):
# NOTE(markmc): it looks like multinic and instance_dns_manager are
# incompatible. See:
# https://bugs.launchpad.net/nova/+bug/1213251
self.flags(
instance_dns_manager='nova.network.noop_dns_driver.NoopDNSDriver')
def setUp(self):
self._disable_instance_dns_manager()
super(MultinicSampleJsonTest, self).setUp()
self.uuid = self._post_server()
def _add_fixed_ip(self):
subs = {"networkId": 1}
response = self._do_post('servers/%s/action' % (self.uuid),
'multinic-add-fixed-ip-req', subs)
self.assertEqual(response.status_code, 202)
def test_add_fixed_ip(self):
self._add_fixed_ip()
def test_remove_fixed_ip(self):
self._add_fixed_ip()
subs = {"ip": "10.0.0.4"}
response = self._do_post('servers/%s/action' % (self.uuid),
'multinic-remove-fixed-ip-req', subs)
self.assertEqual(response.status_code, 202)
class MultinicSampleXmlTest(MultinicSampleJsonTest):
ctype = "xml"
class InstanceUsageAuditLogJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib."
"instance_usage_audit_log.Instance_usage_audit_log")
def test_show_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log/%s' %
urllib.quote('2012-07-05 10:00:00'))
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('inst-usage-audit-log-show-get-resp',
subs, response, 200)
def test_index_instance_usage_audit_log(self):
response = self._do_get('os-instance_usage_audit_log')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('inst-usage-audit-log-index-get-resp',
subs, response, 200)
class InstanceUsageAuditLogXmlTest(InstanceUsageAuditLogJsonTest):
ctype = "xml"
class FlavorExtraSpecsSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.flavorextraspecs."
"Flavorextraspecs")
def _flavor_extra_specs_create(self):
subs = {'value1': 'value1',
'value2': 'value2'
}
response = self._do_post('flavors/1/os-extra_specs',
'flavor-extra-specs-create-req', subs)
self._verify_response('flavor-extra-specs-create-resp',
subs, response, 200)
def test_flavor_extra_specs_get(self):
subs = {'value1': 'value1'}
self._flavor_extra_specs_create()
response = self._do_get('flavors/1/os-extra_specs/key1')
self._verify_response('flavor-extra-specs-get-resp',
subs, response, 200)
def test_flavor_extra_specs_list(self):
subs = {'value1': 'value1',
'value2': 'value2'
}
self._flavor_extra_specs_create()
response = self._do_get('flavors/1/os-extra_specs')
self._verify_response('flavor-extra-specs-list-resp',
subs, response, 200)
def test_flavor_extra_specs_create(self):
self._flavor_extra_specs_create()
def test_flavor_extra_specs_update(self):
subs = {'value1': 'new_value1'}
self._flavor_extra_specs_create()
response = self._do_put('flavors/1/os-extra_specs/key1',
'flavor-extra-specs-update-req', subs)
self._verify_response('flavor-extra-specs-update-resp',
subs, response, 200)
def test_flavor_extra_specs_delete(self):
self._flavor_extra_specs_create()
response = self._do_delete('flavors/1/os-extra_specs/key1')
self.assertEqual(response.status_code, 200)
self.assertEqual(response.content, '')
class FlavorExtraSpecsSampleXmlTests(FlavorExtraSpecsSampleJsonTests):
ctype = 'xml'
class FpingSampleJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.fping.Fping")
def setUp(self):
super(FpingSampleJsonTests, self).setUp()
def fake_check_fping(self):
pass
self.stubs.Set(utils, "execute", test_fping.execute)
self.stubs.Set(fping.FpingController, "check_fping",
fake_check_fping)
def test_get_fping(self):
self._post_server()
response = self._do_get('os-fping')
subs = self._get_regexes()
self._verify_response('fping-get-resp', subs, response, 200)
def test_get_fping_details(self):
uuid = self._post_server()
response = self._do_get('os-fping/%s' % (uuid))
subs = self._get_regexes()
self._verify_response('fping-get-details-resp', subs, response, 200)
class FpingSampleXmlTests(FpingSampleJsonTests):
ctype = 'xml'
class ExtendedAvailabilityZoneJsonTests(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".extended_availability_zone"
".Extended_availability_zone")
def test_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('server-get-resp', subs, response, 200)
def test_detail(self):
self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
self._verify_response('servers-detail-resp', subs, response, 200)
class ExtendedAvailabilityZoneXmlTests(ExtendedAvailabilityZoneJsonTests):
ctype = 'xml'
class EvacuateJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".evacuate.Evacuate")
def test_server_evacuate(self):
uuid = self._post_server()
req_subs = {
'host': 'testHost',
"adminPass": "MySecretPass",
"onSharedStorage": 'False'
}
def fake_service_is_up(self, service):
"""Simulate validation of instance host is down."""
return False
def fake_service_get_by_compute_host(self, context, host):
"""Simulate that given host is a valid host."""
return {
'host_name': host,
'service': 'compute',
'zone': 'nova'
}
def fake_rebuild_instance(self, ctxt, instance, new_pass,
injected_files, image_ref, orig_image_ref,
orig_sys_metadata, bdms, recreate=False,
on_shared_storage=False, host=None,
preserve_ephemeral=False, kwargs=None):
return {
'adminPass': new_pass
}
self.stubs.Set(service_group_api.API, 'service_is_up',
fake_service_is_up)
self.stubs.Set(compute_api.HostAPI, 'service_get_by_compute_host',
fake_service_get_by_compute_host)
self.stubs.Set(compute_rpcapi.ComputeAPI, 'rebuild_instance',
fake_rebuild_instance)
response = self._do_post('servers/%s/action' % uuid,
'server-evacuate-req', req_subs)
subs = self._get_regexes()
self._verify_response('server-evacuate-resp', subs, response, 200)
class EvacuateXmlTest(EvacuateJsonTest):
ctype = 'xml'
class EvacuateFindHostSampleJsonTest(ServersSampleBase):
extends_name = ("nova.api.openstack.compute.contrib"
".evacuate.Evacuate")
extension_name = ("nova.api.openstack.compute.contrib"
".extended_evacuate_find_host.Extended_evacuate_find_host")
@mock.patch('nova.compute.manager.ComputeManager._check_instance_exists')
@mock.patch('nova.compute.api.HostAPI.service_get_by_compute_host')
@mock.patch('nova.conductor.manager.ComputeTaskManager.rebuild_instance')
def test_server_evacuate(self, rebuild_mock, service_get_mock,
check_instance_mock):
self.uuid = self._post_server()
req_subs = {
"adminPass": "MySecretPass",
"onSharedStorage": 'False'
}
check_instance_mock.return_value = False
def fake_service_get_by_compute_host(self, context, host):
return {
'host_name': host,
'service': 'compute',
'zone': 'nova'
}
service_get_mock.side_effect = fake_service_get_by_compute_host
with mock.patch.object(service_group_api.API, 'service_is_up',
return_value=False):
response = self._do_post('servers/%s/action' % self.uuid,
'server-evacuate-find-host-req', req_subs)
subs = self._get_regexes()
self._verify_response('server-evacuate-find-host-resp', subs,
response, 200)
rebuild_mock.assert_called_once_with(mock.ANY, instance=mock.ANY,
orig_image_ref=mock.ANY, image_ref=mock.ANY,
injected_files=mock.ANY, new_pass="MySecretPass",
orig_sys_metadata=mock.ANY, bdms=mock.ANY, recreate=mock.ANY,
on_shared_storage=False, preserve_ephemeral=mock.ANY,
host=None)
class EvacuateFindHostSampleXmlTests(EvacuateFindHostSampleJsonTest):
ctype = "xml"
class FloatingIpDNSJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.floating_ip_dns."
"Floating_ip_dns")
domain = 'domain1.example.org'
name = 'instance1'
scope = 'public'
project = 'project1'
dns_type = 'A'
ip = '192.168.1.1'
def _create_or_update(self):
subs = {'domain': self.domain,
'project': self.project,
'scope': self.scope}
response = self._do_put('os-floating-ip-dns/%s' % self.domain,
'floating-ip-dns-create-or-update-req', subs)
self._verify_response('floating-ip-dns-create-or-update-resp', subs,
response, 200)
def _create_or_update_entry(self):
subs = {'ip': self.ip, 'dns_type': self.dns_type}
response = self._do_put('os-floating-ip-dns/%s/entries/%s'
% (self.domain, self.name),
'floating-ip-dns-create-or-update-entry-req',
subs)
subs.update({'name': self.name, 'domain': self.domain})
self._verify_response('floating-ip-dns-create-or-update-entry-resp',
subs, response, 200)
def test_floating_ip_dns_list(self):
self._create_or_update()
response = self._do_get('os-floating-ip-dns')
subs = {'domain': self.domain,
'project': self.project,
'scope': self.scope}
self._verify_response('floating-ip-dns-list-resp', subs,
response, 200)
def test_floating_ip_dns_create_or_update(self):
self._create_or_update()
def test_floating_ip_dns_delete(self):
self._create_or_update()
response = self._do_delete('os-floating-ip-dns/%s' % self.domain)
self.assertEqual(response.status_code, 202)
def test_floating_ip_dns_create_or_update_entry(self):
self._create_or_update_entry()
def test_floating_ip_dns_entry_get(self):
self._create_or_update_entry()
response = self._do_get('os-floating-ip-dns/%s/entries/%s'
% (self.domain, self.name))
subs = {'domain': self.domain,
'ip': self.ip,
'name': self.name}
self._verify_response('floating-ip-dns-entry-get-resp', subs,
response, 200)
def test_floating_ip_dns_entry_delete(self):
self._create_or_update_entry()
response = self._do_delete('os-floating-ip-dns/%s/entries/%s'
% (self.domain, self.name))
self.assertEqual(response.status_code, 202)
def test_floating_ip_dns_entry_list(self):
self._create_or_update_entry()
response = self._do_get('os-floating-ip-dns/%s/entries/%s'
% (self.domain, self.ip))
subs = {'domain': self.domain,
'ip': self.ip,
'name': self.name}
self._verify_response('floating-ip-dns-entry-list-resp', subs,
response, 200)
class FloatingIpDNSXmlTest(FloatingIpDNSJsonTest):
ctype = 'xml'
class InstanceActionsSampleJsonTest(ApiSampleTestBaseV2):
extension_name = ('nova.api.openstack.compute.contrib.instance_actions.'
'Instance_actions')
def setUp(self):
super(InstanceActionsSampleJsonTest, self).setUp()
self.actions = fake_server_actions.FAKE_ACTIONS
self.events = fake_server_actions.FAKE_EVENTS
self.instance = test_utils.get_test_instance()
def fake_server_action_get_by_request_id(context, uuid, request_id):
return copy.deepcopy(self.actions[uuid][request_id])
def fake_server_actions_get(context, uuid):
return [copy.deepcopy(value) for value in
self.actions[uuid].itervalues()]
def fake_server_action_events_get(context, action_id):
return copy.deepcopy(self.events[action_id])
def fake_instance_get_by_uuid(context, instance_id):
return self.instance
def fake_get(self, context, instance_uuid, expected_attrs=None,
want_objects=True):
return {'uuid': instance_uuid}
self.stubs.Set(db, 'action_get_by_request_id',
fake_server_action_get_by_request_id)
self.stubs.Set(db, 'actions_get', fake_server_actions_get)
self.stubs.Set(db, 'action_events_get',
fake_server_action_events_get)
self.stubs.Set(db, 'instance_get_by_uuid', fake_instance_get_by_uuid)
self.stubs.Set(compute_api.API, 'get', fake_get)
def test_instance_action_get(self):
fake_uuid = fake_server_actions.FAKE_UUID
fake_request_id = fake_server_actions.FAKE_REQUEST_ID1
fake_action = self.actions[fake_uuid][fake_request_id]
response = self._do_get('servers/%s/os-instance-actions/%s' %
(fake_uuid, fake_request_id))
subs = self._get_regexes()
subs['action'] = '(reboot)|(resize)'
subs['instance_uuid'] = fake_uuid
subs['integer_id'] = '[0-9]+'
subs['request_id'] = fake_action['request_id']
subs['start_time'] = fake_action['start_time']
subs['result'] = '(Success)|(Error)'
subs['event'] = '(schedule)|(compute_create)'
self._verify_response('instance-action-get-resp', subs, response, 200)
def test_instance_actions_list(self):
fake_uuid = fake_server_actions.FAKE_UUID
response = self._do_get('servers/%s/os-instance-actions' % (fake_uuid))
subs = self._get_regexes()
subs['action'] = '(reboot)|(resize)'
subs['integer_id'] = '[0-9]+'
subs['request_id'] = ('req-[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}'
'-[0-9a-f]{4}-[0-9a-f]{12}')
self._verify_response('instance-actions-list-resp', subs,
response, 200)
class InstanceActionsSampleXmlTest(InstanceActionsSampleJsonTest):
ctype = 'xml'
class ImageSizeSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib"
".image_size.Image_size")
def test_show(self):
# Get api sample of one single image details request.
image_id = fake.get_valid_image_id()
response = self._do_get('images/%s' % image_id)
subs = self._get_regexes()
subs['image_id'] = image_id
self._verify_response('image-get-resp', subs, response, 200)
def test_detail(self):
# Get api sample of all images details request.
response = self._do_get('images/detail')
subs = self._get_regexes()
self._verify_response('images-details-get-resp', subs, response, 200)
class ImageSizeSampleXmlTests(ImageSizeSampleJsonTests):
ctype = 'xml'
class ConfigDriveSampleJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.config_drive."
"Config_drive")
def setUp(self):
super(ConfigDriveSampleJsonTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
fake.stub_out_image_service(self.stubs)
def test_config_drive_show(self):
uuid = self._post_server()
response = self._do_get('servers/%s' % uuid)
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
# config drive can be a string for True or empty value for False
subs['cdrive'] = '.*'
self._verify_response('server-config-drive-get-resp', subs,
response, 200)
def test_config_drive_detail(self):
self._post_server()
response = self._do_get('servers/detail')
subs = self._get_regexes()
subs['hostid'] = '[a-f0-9]+'
# config drive can be a string for True or empty value for False
subs['cdrive'] = '.*'
self._verify_response('servers-config-drive-details-resp',
subs, response, 200)
class ConfigDriveSampleXmlTest(ConfigDriveSampleJsonTest):
ctype = 'xml'
class FlavorAccessSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.flavor_access."
"Flavor_access")
def _get_flags(self):
f = super(FlavorAccessSampleJsonTests, self)._get_flags()
f['osapi_compute_extension'] = CONF.osapi_compute_extension[:]
# FlavorAccess extension also needs Flavormanage to be loaded.
f['osapi_compute_extension'].append(
'nova.api.openstack.compute.contrib.flavormanage.Flavormanage')
return f
def _add_tenant(self):
subs = {
'tenant_id': 'fake_tenant',
'flavor_id': 10
}
response = self._do_post('flavors/10/action',
'flavor-access-add-tenant-req',
subs)
self._verify_response('flavor-access-add-tenant-resp',
subs, response, 200)
def _create_flavor(self):
subs = {
'flavor_id': 10,
'flavor_name': 'test_flavor'
}
response = self._do_post("flavors",
"flavor-access-create-req",
subs)
subs.update(self._get_regexes())
self._verify_response("flavor-access-create-resp", subs, response, 200)
def test_flavor_access_create(self):
self._create_flavor()
def test_flavor_access_detail(self):
response = self._do_get('flavors/detail')
subs = self._get_regexes()
self._verify_response('flavor-access-detail-resp', subs, response, 200)
def test_flavor_access_list(self):
self._create_flavor()
self._add_tenant()
flavor_id = 10
response = self._do_get('flavors/%s/os-flavor-access' % flavor_id)
subs = {
'flavor_id': flavor_id,
'tenant_id': 'fake_tenant',
}
self._verify_response('flavor-access-list-resp', subs, response, 200)
def test_flavor_access_show(self):
flavor_id = 1
response = self._do_get('flavors/%s' % flavor_id)
subs = {
'flavor_id': flavor_id
}
subs.update(self._get_regexes())
self._verify_response('flavor-access-show-resp', subs, response, 200)
def test_flavor_access_add_tenant(self):
self._create_flavor()
self._add_tenant()
def test_flavor_access_remove_tenant(self):
self._create_flavor()
self._add_tenant()
subs = {
'tenant_id': 'fake_tenant',
}
response = self._do_post('flavors/10/action',
"flavor-access-remove-tenant-req",
subs)
exp_subs = {
"tenant_id": self.api.project_id,
"flavor_id": "10"
}
self._verify_response('flavor-access-remove-tenant-resp',
exp_subs, response, 200)
class FlavorAccessSampleXmlTests(FlavorAccessSampleJsonTests):
ctype = 'xml'
class HypervisorsSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.hypervisors."
"Hypervisors")
def setUp(self):
super(HypervisorsSampleJsonTests, self).setUp()
mock.patch("nova.servicegroup.API.service_is_up",
return_value=True).start()
self.addCleanup(mock.patch.stopall)
def test_hypervisors_list(self):
response = self._do_get('os-hypervisors')
self._verify_response('hypervisors-list-resp', {}, response, 200)
def test_hypervisors_search(self):
response = self._do_get('os-hypervisors/fake/search')
self._verify_response('hypervisors-search-resp', {}, response, 200)
def test_hypervisors_servers(self):
response = self._do_get('os-hypervisors/fake/servers')
self._verify_response('hypervisors-servers-resp', {}, response, 200)
def test_hypervisors_show(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/%s' % hypervisor_id)
subs.update(self._get_regexes())
self._verify_response('hypervisors-show-resp', subs, response, 200)
def test_hypervisors_statistics(self):
response = self._do_get('os-hypervisors/statistics')
self._verify_response('hypervisors-statistics-resp', {}, response, 200)
def test_hypervisors_uptime(self):
def fake_get_host_uptime(self, context, hyp):
return (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
self.stubs.Set(compute_api.HostAPI,
'get_host_uptime', fake_get_host_uptime)
hypervisor_id = 1
response = self._do_get('os-hypervisors/%s/uptime' % hypervisor_id)
subs = {
'hypervisor_id': hypervisor_id,
}
self._verify_response('hypervisors-uptime-resp', subs, response, 200)
class HypervisorsSampleXmlTests(HypervisorsSampleJsonTests):
ctype = "xml"
class ExtendedHypervisorsJsonTest(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib."
"hypervisors.Hypervisors")
extension_name = ("nova.api.openstack.compute.contrib."
"extended_hypervisors.Extended_hypervisors")
def test_hypervisors_show_with_ip(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/%s' % hypervisor_id)
subs.update(self._get_regexes())
self._verify_response('hypervisors-show-with-ip-resp',
subs, response, 200)
class ExtendedHypervisorsXmlTest(ExtendedHypervisorsJsonTest):
ctype = "xml"
class HypervisorStatusJsonTest(ApiSampleTestBaseV2):
extends_name = ("nova.api.openstack.compute.contrib."
"hypervisors.Hypervisors")
extension_name = ("nova.api.openstack.compute.contrib."
"hypervisor_status.Hypervisor_status")
def test_hypervisors_show_with_status(self):
hypervisor_id = 1
subs = {
'hypervisor_id': hypervisor_id
}
response = self._do_get('os-hypervisors/%s' % hypervisor_id)
subs.update(self._get_regexes())
self._verify_response('hypervisors-show-with-status-resp',
subs, response, 200)
class HypervisorStatusXmlTest(HypervisorStatusJsonTest):
ctype = 'xml'
@mock.patch("nova.servicegroup.API.service_is_up", return_value=True)
class HypervisorsCellsSampleJsonTests(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.hypervisors."
"Hypervisors")
def setUp(self):
self.flags(enable=True, cell_type='api', group='cells')
super(HypervisorsCellsSampleJsonTests, self).setUp()
def test_hypervisor_uptime(self, mocks):
fake_hypervisor = {'service': {'host': 'fake-mini',
'disabled': False,
'disabled_reason': None},
'id': 1, 'hypervisor_hostname': 'fake-mini'}
def fake_get_host_uptime(self, context, hyp):
return (" 08:32:11 up 93 days, 18:25, 12 users, load average:"
" 0.20, 0.12, 0.14")
def fake_compute_node_get(self, context, hyp):
return fake_hypervisor
self.stubs.Set(cells_api.HostAPI, 'compute_node_get',
fake_compute_node_get)
self.stubs.Set(cells_api.HostAPI,
'get_host_uptime', fake_get_host_uptime)
hypervisor_id = fake_hypervisor['id']
response = self._do_get('os-hypervisors/%s/uptime' % hypervisor_id)
subs = {'hypervisor_id': hypervisor_id}
self._verify_response('hypervisors-uptime-resp', subs, response, 200)
class HypervisorsCellsSampleXmlTests(HypervisorsCellsSampleJsonTests):
ctype = "xml"
class AttachInterfacesSampleJsonTest(ServersSampleBase):
extension_name = ('nova.api.openstack.compute.contrib.attach_interfaces.'
'Attach_interfaces')
def setUp(self):
super(AttachInterfacesSampleJsonTest, self).setUp()
def fake_list_ports(self, *args, **kwargs):
uuid = kwargs.get('device_id', None)
if not uuid:
raise exception.InstanceNotFound(instance_id=None)
port_data = {
"id": "ce531f90-199f-48c0-816c-13e38010b442",
"network_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "fa:16:3e:4c:2c:30",
"fixed_ips": [
{
"ip_address": "192.168.1.3",
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef"
}
],
"device_id": uuid,
}
ports = {'ports': [port_data]}
return ports
def fake_show_port(self, context, port_id=None):
if not port_id:
raise exception.PortNotFound(port_id=None)
port_data = {
"id": port_id,
"network_id": "3cb9bc59-5699-4588-a4b1-b87f96708bc6",
"admin_state_up": True,
"status": "ACTIVE",
"mac_address": "fa:16:3e:4c:2c:30",
"fixed_ips": [
{
"ip_address": "192.168.1.3",
"subnet_id": "f8a6e8f8-c2ec-497c-9f23-da9616de54ef"
}
],
"device_id": 'bece68a3-2f8b-4e66-9092-244493d6aba7',
}
port = {'port': port_data}
return port
def fake_attach_interface(self, context, instance,
network_id, port_id,
requested_ip='192.168.1.3'):
if not network_id:
network_id = "fake_net_uuid"
if not port_id:
port_id = "fake_port_uuid"
vif = fake_network_cache_model.new_vif()
vif['id'] = port_id
vif['network']['id'] = network_id
vif['network']['subnets'][0]['ips'][0] = requested_ip
return vif
def fake_detach_interface(self, context, instance, port_id):
pass
self.stubs.Set(network_api.API, 'list_ports', fake_list_ports)
self.stubs.Set(network_api.API, 'show_port', fake_show_port)
self.stubs.Set(compute_api.API, 'attach_interface',
fake_attach_interface)
self.stubs.Set(compute_api.API, 'detach_interface',
fake_detach_interface)
self.flags(auth_strategy=None, group='neutron')
self.flags(url='http://anyhost/', group='neutron')
self.flags(url_timeout=30, group='neutron')
def generalize_subs(self, subs, vanilla_regexes):
subs['subnet_id'] = vanilla_regexes['uuid']
subs['net_id'] = vanilla_regexes['uuid']
subs['port_id'] = vanilla_regexes['uuid']
subs['mac_addr'] = '(?:[a-f0-9]{2}:){5}[a-f0-9]{2}'
subs['ip_address'] = vanilla_regexes['ip']
return subs
def test_list_interfaces(self):
instance_uuid = self._post_server()
response = self._do_get('servers/%s/os-interface' % instance_uuid)
subs = {
'ip_address': '192.168.1.3',
'subnet_id': 'f8a6e8f8-c2ec-497c-9f23-da9616de54ef',
'mac_addr': 'fa:16:3e:4c:2c:30',
'net_id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6',
'port_id': 'ce531f90-199f-48c0-816c-13e38010b442',
'port_state': 'ACTIVE'
}
self._verify_response('attach-interfaces-list-resp', subs,
response, 200)
def _stub_show_for_instance(self, instance_uuid, port_id):
show_port = network_api.API().show_port(None, port_id)
show_port['port']['device_id'] = instance_uuid
self.stubs.Set(network_api.API, 'show_port', lambda *a, **k: show_port)
def test_show_interfaces(self):
instance_uuid = self._post_server()
port_id = 'ce531f90-199f-48c0-816c-13e38010b442'
self._stub_show_for_instance(instance_uuid, port_id)
response = self._do_get('servers/%s/os-interface/%s' %
(instance_uuid, port_id))
subs = {
'ip_address': '192.168.1.3',
'subnet_id': 'f8a6e8f8-c2ec-497c-9f23-da9616de54ef',
'mac_addr': 'fa:16:3e:4c:2c:30',
'net_id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6',
'port_id': port_id,
'port_state': 'ACTIVE'
}
self._verify_response('attach-interfaces-show-resp', subs,
response, 200)
def test_create_interfaces(self, instance_uuid=None):
if instance_uuid is None:
instance_uuid = self._post_server()
subs = {
'net_id': '3cb9bc59-5699-4588-a4b1-b87f96708bc6',
'port_id': 'ce531f90-199f-48c0-816c-13e38010b442',
'subnet_id': 'f8a6e8f8-c2ec-497c-9f23-da9616de54ef',
'ip_address': '192.168.1.3',
'port_state': 'ACTIVE',
'mac_addr': 'fa:16:3e:4c:2c:30',
}
self._stub_show_for_instance(instance_uuid, subs['port_id'])
response = self._do_post('servers/%s/os-interface' % instance_uuid,
'attach-interfaces-create-req', subs)
subs.update(self._get_regexes())
self._verify_response('attach-interfaces-create-resp', subs,
response, 200)
def test_delete_interfaces(self):
instance_uuid = self._post_server()
port_id = 'ce531f90-199f-48c0-816c-13e38010b442'
response = self._do_delete('servers/%s/os-interface/%s' %
(instance_uuid, port_id))
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class AttachInterfacesSampleXmlTest(AttachInterfacesSampleJsonTest):
ctype = 'xml'
class SnapshotsSampleJsonTests(ApiSampleTestBaseV2):
extension_name = "nova.api.openstack.compute.contrib.volumes.Volumes"
create_subs = {
'snapshot_name': 'snap-001',
'description': 'Daily backup',
'volume_id': '521752a6-acf6-4b2d-bc7a-119f9148cd8c'
}
def setUp(self):
super(SnapshotsSampleJsonTests, self).setUp()
self.stubs.Set(cinder.API, "get_all_snapshots",
fakes.stub_snapshot_get_all)
self.stubs.Set(cinder.API, "get_snapshot", fakes.stub_snapshot_get)
def _create_snapshot(self):
self.stubs.Set(cinder.API, "create_snapshot",
fakes.stub_snapshot_create)
response = self._do_post("os-snapshots",
"snapshot-create-req",
self.create_subs)
return response
def test_snapshots_create(self):
response = self._create_snapshot()
self.create_subs.update(self._get_regexes())
self._verify_response("snapshot-create-resp",
self.create_subs, response, 200)
def test_snapshots_delete(self):
self.stubs.Set(cinder.API, "delete_snapshot",
fakes.stub_snapshot_delete)
self._create_snapshot()
response = self._do_delete('os-snapshots/100')
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
def test_snapshots_detail(self):
response = self._do_get('os-snapshots/detail')
subs = self._get_regexes()
self._verify_response('snapshots-detail-resp', subs, response, 200)
def test_snapshots_list(self):
response = self._do_get('os-snapshots')
subs = self._get_regexes()
self._verify_response('snapshots-list-resp', subs, response, 200)
def test_snapshots_show(self):
response = self._do_get('os-snapshots/100')
subs = {
'snapshot_name': 'Default name',
'description': 'Default description'
}
subs.update(self._get_regexes())
self._verify_response('snapshots-show-resp', subs, response, 200)
class SnapshotsSampleXmlTests(SnapshotsSampleJsonTests):
ctype = "xml"
class AssistedVolumeSnapshotsJsonTest(ApiSampleTestBaseV2):
"""Assisted volume snapshots."""
extension_name = ("nova.api.openstack.compute.contrib."
"assisted_volume_snapshots.Assisted_volume_snapshots")
def _create_assisted_snapshot(self, subs):
self.stubs.Set(compute_api.API, 'volume_snapshot_create',
fakes.stub_compute_volume_snapshot_create)
response = self._do_post("os-assisted-volume-snapshots",
"snapshot-create-assisted-req",
subs)
return response
def test_snapshots_create_assisted(self):
subs = {
'snapshot_name': 'snap-001',
'description': 'Daily backup',
'volume_id': '521752a6-acf6-4b2d-bc7a-119f9148cd8c',
'snapshot_id': '421752a6-acf6-4b2d-bc7a-119f9148cd8c',
'type': 'qcow2',
'new_file': 'new_file_name'
}
subs.update(self._get_regexes())
response = self._create_assisted_snapshot(subs)
self._verify_response("snapshot-create-assisted-resp",
subs, response, 200)
def test_snapshots_delete_assisted(self):
self.stubs.Set(compute_api.API, 'volume_snapshot_delete',
fakes.stub_compute_volume_snapshot_delete)
snapshot_id = '100'
response = self._do_delete(
'os-assisted-volume-snapshots/%s?delete_info='
'{"volume_id":"521752a6-acf6-4b2d-bc7a-119f9148cd8c"}'
% snapshot_id)
self.assertEqual(response.status_code, 204)
self.assertEqual(response.content, '')
class AssistedVolumeSnapshotsXmlTest(AssistedVolumeSnapshotsJsonTest):
ctype = "xml"
class VolumeAttachmentsSampleBase(ServersSampleBase):
def _stub_db_bdms_get_all_by_instance(self, server_id):
def fake_bdms_get_all_by_instance(context, instance_uuid,
use_slave=False):
bdms = [
fake_block_device.FakeDbBlockDeviceDict(
{'id': 1, 'volume_id': 'a26887c6-c47b-4654-abb5-dfadf7d3f803',
'instance_uuid': server_id, 'source_type': 'volume',
'destination_type': 'volume', 'device_name': '/dev/sdd'}),
fake_block_device.FakeDbBlockDeviceDict(
{'id': 2, 'volume_id': 'a26887c6-c47b-4654-abb5-dfadf7d3f804',
'instance_uuid': server_id, 'source_type': 'volume',
'destination_type': 'volume', 'device_name': '/dev/sdc'})
]
return bdms
self.stubs.Set(db, 'block_device_mapping_get_all_by_instance',
fake_bdms_get_all_by_instance)
def _stub_compute_api_get(self):
def fake_compute_api_get(self, context, instance_id,
want_objects=False):
if want_objects:
return fake_instance.fake_instance_obj(
context, **{'uuid': instance_id})
else:
return {'uuid': instance_id}
self.stubs.Set(compute_api.API, 'get', fake_compute_api_get)
class VolumeAttachmentsSampleJsonTest(VolumeAttachmentsSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.volumes.Volumes")
def test_attach_volume_to_server(self):
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(cinder.API, 'check_attach', lambda *a, **k: None)
self.stubs.Set(cinder.API, 'reserve_volume', lambda *a, **k: None)
device_name = '/dev/vdd'
bdm = objects.BlockDeviceMapping()
bdm['device_name'] = device_name
self.stubs.Set(compute_manager.ComputeManager,
"reserve_block_device_name",
lambda *a, **k: bdm)
self.stubs.Set(compute_manager.ComputeManager,
'attach_volume',
lambda *a, **k: None)
self.stubs.Set(objects.BlockDeviceMapping, 'get_by_volume_id',
classmethod(lambda *a, **k: None))
volume = fakes.stub_volume_get(None, context.get_admin_context(),
'a26887c6-c47b-4654-abb5-dfadf7d3f803')
subs = {
'volume_id': volume['id'],
'device': device_name
}
server_id = self._post_server()
response = self._do_post('servers/%s/os-volume_attachments'
% server_id,
'attach-volume-to-server-req', subs)
subs.update(self._get_regexes())
self._verify_response('attach-volume-to-server-resp', subs,
response, 200)
def test_list_volume_attachments(self):
server_id = self._post_server()
self._stub_db_bdms_get_all_by_instance(server_id)
response = self._do_get('servers/%s/os-volume_attachments'
% server_id)
subs = self._get_regexes()
self._verify_response('list-volume-attachments-resp', subs,
response, 200)
def test_volume_attachment_detail(self):
server_id = self._post_server()
attach_id = "a26887c6-c47b-4654-abb5-dfadf7d3f803"
self._stub_db_bdms_get_all_by_instance(server_id)
self._stub_compute_api_get()
response = self._do_get('servers/%s/os-volume_attachments/%s'
% (server_id, attach_id))
subs = self._get_regexes()
self._verify_response('volume-attachment-detail-resp', subs,
response, 200)
def test_volume_attachment_delete(self):
server_id = self._post_server()
attach_id = "a26887c6-c47b-4654-abb5-dfadf7d3f803"
self._stub_db_bdms_get_all_by_instance(server_id)
self._stub_compute_api_get()
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(compute_api.API, 'detach_volume', lambda *a, **k: None)
response = self._do_delete('servers/%s/os-volume_attachments/%s'
% (server_id, attach_id))
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class VolumeAttachmentsSampleXmlTest(VolumeAttachmentsSampleJsonTest):
ctype = 'xml'
class VolumeAttachUpdateSampleJsonTest(VolumeAttachmentsSampleBase):
extends_name = ("nova.api.openstack.compute.contrib.volumes.Volumes")
extension_name = ("nova.api.openstack.compute.contrib."
"volume_attachment_update.Volume_attachment_update")
def test_volume_attachment_update(self):
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
subs = {
'volume_id': 'a26887c6-c47b-4654-abb5-dfadf7d3f805',
'device': '/dev/sdd'
}
server_id = self._post_server()
attach_id = 'a26887c6-c47b-4654-abb5-dfadf7d3f803'
self._stub_db_bdms_get_all_by_instance(server_id)
self._stub_compute_api_get()
self.stubs.Set(cinder.API, 'get', fakes.stub_volume_get)
self.stubs.Set(compute_api.API, 'swap_volume', lambda *a, **k: None)
response = self._do_put('servers/%s/os-volume_attachments/%s'
% (server_id, attach_id),
'update-volume-req',
subs)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class VolumeAttachUpdateSampleXmlTest(VolumeAttachUpdateSampleJsonTest):
ctype = 'xml'
class VolumesSampleJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib.volumes.Volumes")
def _get_volume_id(self):
return 'a26887c6-c47b-4654-abb5-dfadf7d3f803'
def _stub_volume(self, id, displayname="Volume Name",
displaydesc="Volume Description", size=100):
volume = {
'id': id,
'size': size,
'availability_zone': 'zone1:host1',
'instance_uuid': '3912f2b4-c5ba-4aec-9165-872876fe202e',
'mountpoint': '/',
'status': 'in-use',
'attach_status': 'attached',
'name': 'vol name',
'display_name': displayname,
'display_description': displaydesc,
'created_at': datetime.datetime(2008, 12, 1, 11, 1, 55),
'snapshot_id': None,
'volume_type_id': 'fakevoltype',
'volume_metadata': [],
'volume_type': {'name': 'Backup'}
}
return volume
def _stub_volume_get(self, context, volume_id):
return self._stub_volume(volume_id)
def _stub_volume_delete(self, context, *args, **param):
pass
def _stub_volume_get_all(self, context, search_opts=None):
id = self._get_volume_id()
return [self._stub_volume(id)]
def _stub_volume_create(self, context, size, name, description, snapshot,
**param):
id = self._get_volume_id()
return self._stub_volume(id)
def setUp(self):
super(VolumesSampleJsonTest, self).setUp()
fakes.stub_out_networking(self.stubs)
fakes.stub_out_rate_limiting(self.stubs)
self.stubs.Set(cinder.API, "delete", self._stub_volume_delete)
self.stubs.Set(cinder.API, "get", self._stub_volume_get)
self.stubs.Set(cinder.API, "get_all", self._stub_volume_get_all)
def _post_volume(self):
subs_req = {
'volume_name': "Volume Name",
'volume_desc': "Volume Description",
}
self.stubs.Set(cinder.API, "create", self._stub_volume_create)
response = self._do_post('os-volumes', 'os-volumes-post-req',
subs_req)
subs = self._get_regexes()
subs.update(subs_req)
self._verify_response('os-volumes-post-resp', subs, response, 200)
def test_volumes_show(self):
subs = {
'volume_name': "Volume Name",
'volume_desc': "Volume Description",
}
vol_id = self._get_volume_id()
response = self._do_get('os-volumes/%s' % vol_id)
subs.update(self._get_regexes())
self._verify_response('os-volumes-get-resp', subs, response, 200)
def test_volumes_index(self):
subs = {
'volume_name': "Volume Name",
'volume_desc': "Volume Description",
}
response = self._do_get('os-volumes')
subs.update(self._get_regexes())
self._verify_response('os-volumes-index-resp', subs, response, 200)
def test_volumes_detail(self):
# For now, index and detail are the same.
# See the volumes api
subs = {
'volume_name': "Volume Name",
'volume_desc': "Volume Description",
}
response = self._do_get('os-volumes/detail')
subs.update(self._get_regexes())
self._verify_response('os-volumes-detail-resp', subs, response, 200)
def test_volumes_create(self):
self._post_volume()
def test_volumes_delete(self):
self._post_volume()
vol_id = self._get_volume_id()
response = self._do_delete('os-volumes/%s' % vol_id)
self.assertEqual(response.status_code, 202)
self.assertEqual(response.content, '')
class VolumesSampleXmlTest(VolumesSampleJsonTest):
ctype = 'xml'
class MigrationsSamplesJsonTest(ApiSampleTestBaseV2):
extension_name = ("nova.api.openstack.compute.contrib.migrations."
"Migrations")
def _stub_migrations(self, context, filters):
fake_migrations = [
{
'id': 1234,
'source_node': 'node1',
'dest_node': 'node2',
'source_compute': 'compute1',
'dest_compute': 'compute2',
'dest_host': '1.2.3.4',
'status': 'Done',
'instance_uuid': 'instance_id_123',
'old_instance_type_id': 1,
'new_instance_type_id': 2,
'created_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'updated_at': datetime.datetime(2012, 10, 29, 13, 42, 2),
'deleted_at': None,
'deleted': False
},
{
'id': 5678,
'source_node': 'node10',
'dest_node': 'node20',
'source_compute': 'compute10',
'dest_compute': 'compute20',
'dest_host': '5.6.7.8',
'status': 'Done',
'instance_uuid': 'instance_id_456',
'old_instance_type_id': 5,
'new_instance_type_id': 6,
'created_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'updated_at': datetime.datetime(2013, 10, 22, 13, 42, 2),
'deleted_at': None,
'deleted': False
}
]
return fake_migrations
def setUp(self):
super(MigrationsSamplesJsonTest, self).setUp()
self.stubs.Set(compute_api.API, 'get_migrations',
self._stub_migrations)
def test_get_migrations(self):
response = self._do_get('os-migrations')
subs = self._get_regexes()
self.assertEqual(response.status_code, 200)
self._verify_response('migrations-get', subs, response, 200)
class MigrationsSamplesXmlTest(MigrationsSamplesJsonTest):
ctype = 'xml'
class PreserveEphemeralOnRebuildJsonTest(ServersSampleBase):
extension_name = ('nova.api.openstack.compute.contrib.'
'preserve_ephemeral_rebuild.'
'Preserve_ephemeral_rebuild')
def _test_server_action(self, uuid, action,
subs=None, resp_tpl=None, code=202):
subs = subs or {}
subs.update({'action': action})
response = self._do_post('servers/%s/action' % uuid,
'server-action-%s' % action.lower(),
subs)
if resp_tpl:
subs.update(self._get_regexes())
self._verify_response(resp_tpl, subs, response, code)
else:
self.assertEqual(response.status_code, code)
self.assertEqual(response.content, "")
def test_rebuild_server_preserve_ephemeral_false(self):
uuid = self._post_server()
image = self.api.get_images()[0]['id']
subs = {'host': self._get_host(),
'uuid': image,
'name': 'foobar',
'pass': 'seekr3t',
'ip': '1.2.3.4',
'ip6': 'fe80::100',
'hostid': '[a-f0-9]+',
'preserve_ephemeral': 'false'}
self._test_server_action(uuid, 'rebuild', subs,
'server-action-rebuild-resp')
def test_rebuild_server_preserve_ephemeral_true(self):
image = self.api.get_images()[0]['id']
subs = {'host': self._get_host(),
'uuid': image,
'name': 'new-server-test',
'pass': 'seekr3t',
'ip': '1.2.3.4',
'ip6': 'fe80::100',
'hostid': '[a-f0-9]+',
'preserve_ephemeral': 'true'}
def fake_rebuild(self_, context, instance, image_href, admin_password,
**kwargs):
self.assertTrue(kwargs['preserve_ephemeral'])
self.stubs.Set(compute_api.API, 'rebuild', fake_rebuild)
instance_uuid = self._post_server()
response = self._do_post('servers/%s/action' % instance_uuid,
'server-action-rebuild', subs)
self.assertEqual(response.status_code, 202)
class PreserveEphemeralOnRebuildXmlTest(PreserveEphemeralOnRebuildJsonTest):
ctype = 'xml'
class ServerExternalEventsJsonTest(ServersSampleBase):
extension_name = ('nova.api.openstack.compute.contrib.'
'server_external_events.Server_external_events')
def test_create_event(self):
instance_uuid = self._post_server()
subs = {
'uuid': instance_uuid,
'name': 'network-changed',
'status': 'completed',
'tag': 'foo',
}
response = self._do_post('os-server-external-events',
'event-create-req',
subs)
subs.update(self._get_regexes())
self._verify_response('event-create-resp', subs, response, 200)
class ServerExternalEventsXmlTest(ServerExternalEventsJsonTest):
ctype = 'xml'
class ServerGroupsSampleJsonTest(ServersSampleBase):
extension_name = ("nova.api.openstack.compute.contrib"
".server_groups.Server_groups")
def _get_create_subs(self):
return {'name': 'test'}
def _post_server_group(self):
"""Verify the response status code and returns the UUID of the
newly created server group.
"""
subs = self._get_create_subs()
response = self._do_post('os-server-groups',
'server-groups-post-req', subs)
subs = self._get_regexes()
subs['name'] = 'test'
return self._verify_response('server-groups-post-resp',
subs, response, 200)
def _create_server_group(self):
subs = self._get_create_subs()
return self._do_post('os-server-groups',
'server-groups-post-req', subs)
def test_server_groups_post(self):
return self._post_server_group()
def test_server_groups_list(self):
subs = self._get_create_subs()
uuid = self._post_server_group()
response = self._do_get('os-server-groups')
subs.update(self._get_regexes())
subs['id'] = uuid
self._verify_response('server-groups-list-resp',
subs, response, 200)
def test_server_groups_get(self):
# Get api sample of server groups get request.
subs = {'name': 'test'}
uuid = self._post_server_group()
subs['id'] = uuid
response = self._do_get('os-server-groups/%s' % uuid)
self._verify_response('server-groups-get-resp', subs, response, 200)
def test_server_groups_delete(self):
uuid = self._post_server_group()
response = self._do_delete('os-server-groups/%s' % uuid)
self.assertEqual(response.status_code, 204)
class ServerGroupsSampleXmlTest(ServerGroupsSampleJsonTest):
ctype = 'xml'
class ServerGroupQuotas_LimitsSampleJsonTest(LimitsSampleJsonTest):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
class ServerGroupQuotas_LimitsSampleXmlTest(LimitsSampleXmlTest):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
class ServerGroupQuotas_UsedLimitsSamplesJsonTest(UsedLimitsSamplesJsonTest):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = ("nova.api.openstack.compute.contrib.used_limits."
"Used_limits")
class ServerGroupQuotas_UsedLimitsSamplesXmlTest(UsedLimitsSamplesXmlTest):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = ("nova.api.openstack.compute.contrib.used_limits."
"Used_limits")
class ServerGroupQuotas_QuotasSampleJsonTests(QuotasSampleJsonTests):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = "nova.api.openstack.compute.contrib.quotas.Quotas"
class ServerGroupQuotas_QuotasSampleXmlTests(QuotasSampleXmlTests):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = "nova.api.openstack.compute.contrib.quotas.Quotas"
class ServerGroupQuotasQuota_ClassesSampleJsonTests(
QuotaClassesSampleJsonTests):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = ("nova.api.openstack.compute.contrib.quota_classes."
"Quota_classes")
class ServerGroupQuotas_QuotaClassesSampleXmlTests(
QuotaClassesSampleXmlTests):
extension_name = ("nova.api.openstack.compute.contrib."
"server_group_quotas.Server_group_quotas")
extends_name = ("nova.api.openstack.compute.contrib.quota_classes."
"Quota_classes")
|
berrange/nova
|
nova/tests/integrated/test_api_samples.py
|
Python
|
apache-2.0
| 173,795
|
"""Test code for upsampling"""
import numpy as np
import tvm
import topi
import topi.testing
import math
def verify_upsampling(batch, in_channel, in_height, in_width, scale, layout='NCHW', method="NEAREST_NEIGHBOR"):
if layout == 'NCHW':
A = tvm.placeholder((batch, in_channel, in_height, in_width), name='A')
dtype = A.dtype
out_shape = (batch, in_channel, in_height*scale, in_width*scale)
a_np = np.random.uniform(size=(batch, in_channel, in_height, in_width)).astype(dtype)
elif layout == 'NHWC':
A = tvm.placeholder((batch, in_height, in_width, in_channel), name='A')
dtype = A.dtype
out_shape = (batch, in_height*scale, in_width*scale, in_channel)
a_np = np.random.uniform(size=(batch, in_height, in_width, in_channel)).astype(dtype)
else:
raise NotImplementedError(
'Layout not supported {} '.format(layout))
B = topi.nn.upsampling(A, scale, layout=layout, method=method)
if method == "BILINEAR":
out_size = (in_height*scale, in_width*scale)
b_np = topi.testing.bilinear_resize_python(a_np, out_size, layout)
else:
b_np = topi.testing.upsampling_python(a_np, scale, layout)
def check_device(device):
ctx = tvm.context(device, 0)
if not ctx.exist:
print("Skip because %s is not enabled" % device)
return
print("Running on target: %s" % device)
with tvm.target.create(device):
s = topi.generic.schedule_injective(B)
a = tvm.nd.array(a_np, ctx)
b = tvm.nd.array(np.zeros(out_shape, dtype=dtype), ctx)
f = tvm.build(s, [A, B], device)
f(a, b)
np.testing.assert_allclose(b.asnumpy(), b_np, rtol=1e-5, atol=1e-5)
for device in ['llvm', 'cuda', 'vulkan', 'nvptx']:
check_device(device)
def test_upsampling():
# NEAREST_NEIGHBOR - NCHW
verify_upsampling(8, 16, 32, 32, 2)
verify_upsampling(12, 32, 64, 64, 3)
# NEAREST_NEIGHBOR - NHWC
verify_upsampling(8, 16, 32, 32, 2, layout="NHWC")
verify_upsampling(12, 32, 64, 64, 3, layout="NHWC")
# BILINEAR - NCHW
verify_upsampling(2, 2, 32, 32, 2, method="BILINEAR")
verify_upsampling(2, 2, 32, 32, 3, method="BILINEAR")
# BILINEAR - NHWC
verify_upsampling(2, 2, 32, 32, 2, layout="NHWC", method="BILINEAR")
verify_upsampling(2, 2, 32, 32, 3, layout="NHWC", method="BILINEAR")
if __name__ == "__main__":
test_upsampling()
|
mlperf/training_results_v0.6
|
Fujitsu/benchmarks/resnet/implementations/mxnet/3rdparty/tvm/topi/tests/python/test_topi_upsampling.py
|
Python
|
apache-2.0
| 2,484
|
##########################################################################
#
# Copyright (c) 2012, Image Engine Design Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
import IECoreGL
IECoreGL.init( False )
class FontLoaderTest( unittest.TestCase ) :
def test( self ) :
fl = IECoreGL.FontLoader( IECore.SearchPath( "./test/IECore/data/fonts" ) )
f = fl.load( "Vera.ttf" )
self.failUnless( isinstance( f, IECoreGL.Font ) )
f2 = fl.load( "Vera.ttf" )
self.failUnless( f.isSame( f2 ) )
fl.clear()
f3 = fl.load( "Vera.ttf" )
self.failIf( f3.isSame( f2 ) )
if __name__ == "__main__":
unittest.main()
|
appleseedhq/cortex
|
test/IECoreGL/FontLoaderTest.py
|
Python
|
bsd-3-clause
| 2,272
|
# coding: utf-8
import json
from .tapioca import TapiocaInstantiator
from .exceptions import (
ResponseProcessException, ClientError, ServerError)
def generate_wrapper_from_adapter(adapter_class):
return TapiocaInstantiator(adapter_class)
class TapiocaAdapter(object):
def get_api_root(self, api_params):
return self.api_root
def fill_resource_template_url(self, template, params):
return template.format(**params)
def get_request_kwargs(self, api_params, *args, **kwargs):
kwargs.update({
'data': self.format_data_to_request(kwargs.get('data')),
})
return kwargs
def process_response(self, response):
if str(response.status_code).startswith('5'):
raise ResponseProcessException(ServerError, None)
data = self.response_to_native(response)
if str(response.status_code).startswith('4'):
raise ResponseProcessException(ClientError, data)
return data
def format_data_to_request(self, data):
raise NotImplementedError()
def response_to_native(self, response):
raise NotImplementedError()
def get_iterator_list(self, response_data):
raise NotImplementedError()
def get_iterator_next_request_kwargs(self, iterator_request_kwargs,
response_data, response):
raise NotImplementedError()
class FormAdapterMixin(object):
def format_data_to_request(self, data):
return data
def response_to_native(self, response):
return {'text': response.text}
class JSONAdapterMixin(object):
def get_request_kwargs(self, api_params, *args, **kwargs):
arguments = super(JSONAdapterMixin, self).get_request_kwargs(
api_params, *args, **kwargs)
if 'headers' not in arguments:
arguments['headers'] = {}
arguments['headers']['Content-Type'] = 'application/json'
return arguments
def format_data_to_request(self, data):
if data:
return json.dumps(data)
def response_to_native(self, response):
if response.content.strip():
return response.json()
|
vu3jej/tapioca-wrapper
|
tapioca/adapters.py
|
Python
|
mit
| 2,191
|
# Copyright 2013-2020 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from __future__ import print_function
import sys
import itertools
import spack.cmd
import spack.environment as ev
import spack.error
import spack.package
import spack.cmd.common.arguments as arguments
import spack.repo
import spack.store
from spack.database import InstallStatuses
from llnl.util import tty
from llnl.util.tty.colify import colify
description = "remove installed packages"
section = "build"
level = "short"
error_message = """You can either:
a) use a more specific spec, or
b) specify the spec by its hash (e.g. `spack uninstall /hash`), or
c) use `spack uninstall --all` to uninstall ALL matching specs.
"""
# Arguments for display_specs when we find ambiguity
display_args = {
'long': True,
'show_flags': False,
'variants': False,
'indent': 4,
}
def setup_parser(subparser):
epilog_msg = ("Specs to be uninstalled are specified using the spec syntax"
" (`spack help --spec`) and can be identified by their "
"hashes. To remove packages that are needed only at build "
"time and were not explicitly installed see `spack gc -h`."
"\n\nWhen using the --all option ALL packages matching the "
"supplied specs will be uninstalled. For instance, "
"`spack uninstall --all libelf` uninstalls all the versions "
"of `libelf` currently present in Spack's store. If no spec "
"is supplied, all installed packages will be uninstalled. "
"If used in an environment, all packages in the environment "
"will be uninstalled.")
subparser.epilog = epilog_msg
subparser.add_argument(
'-f', '--force', action='store_true', dest='force',
help="remove regardless of whether other packages or environments "
"depend on this one")
arguments.add_common_arguments(
subparser, ['recurse_dependents', 'yes_to_all', 'installed_specs'])
subparser.add_argument(
'-a', '--all', action='store_true', dest='all',
help="remove ALL installed packages that match each supplied spec"
)
def find_matching_specs(env, specs, allow_multiple_matches=False, force=False):
"""Returns a list of specs matching the not necessarily
concretized specs given from cli
Args:
env (Environment): active environment, or ``None`` if there is not one
specs (list): list of specs to be matched against installed packages
allow_multiple_matches (bool): if True multiple matches are admitted
Return:
list of specs
"""
# constrain uninstall resolution to current environment if one is active
hashes = env.all_hashes() if env else None
# List of specs that match expressions given via command line
specs_from_cli = []
has_errors = False
for spec in specs:
install_query = [InstallStatuses.INSTALLED, InstallStatuses.DEPRECATED]
matching = spack.store.db.query_local(spec, hashes=hashes,
installed=install_query)
# For each spec provided, make sure it refers to only one package.
# Fail and ask user to be unambiguous if it doesn't
if not allow_multiple_matches and len(matching) > 1:
tty.error('{0} matches multiple packages:'.format(spec))
print()
spack.cmd.display_specs(matching, **display_args)
print()
has_errors = True
# No installed package matches the query
if len(matching) == 0 and spec is not any:
if env:
pkg_type = "packages in environment '%s'" % env.name
else:
pkg_type = 'installed packages'
tty.die('{0} does not match any {1}.'.format(spec, pkg_type))
specs_from_cli.extend(matching)
if has_errors:
tty.die(error_message)
return specs_from_cli
def installed_dependents(specs, env):
"""Map each spec to a list of its installed dependents.
Args:
specs (list): list of Specs
env (Environment): the active environment, or None
Returns:
(tuple of dicts): two mappings: one from specs to their dependent
environments in the active environment (or global scope if
there is no environment), and one from specs to their
dependents in *inactive* environments (empty if there is no
environment
"""
active_dpts = {}
inactive_dpts = {}
env_hashes = set(env.all_hashes()) if env else set()
all_specs_in_db = spack.store.db.query()
for spec in specs:
installed = [x for x in all_specs_in_db if spec in x]
# separate installed dependents into dpts in this environment and
# dpts that are outside this environment
for dpt in installed:
if dpt not in specs:
if not env or dpt.dag_hash() in env_hashes:
active_dpts.setdefault(spec, set()).add(dpt)
else:
inactive_dpts.setdefault(spec, set()).add(dpt)
return active_dpts, inactive_dpts
def dependent_environments(specs):
"""Map each spec to environments that depend on it.
Args:
specs (list): list of Specs
Returns:
(dict): mapping from spec to lists of dependent Environments
"""
dependents = {}
for env in ev.all_environments():
hashes = set(env.all_hashes())
for spec in specs:
if spec.dag_hash() in hashes:
dependents.setdefault(spec, []).append(env)
return dependents
def inactive_dependent_environments(spec_envs):
"""Strip the active environment from a dependent map.
Take the output of ``dependent_environment()`` and remove the active
environment from all mappings. Remove any specs in the map that now
have no dependent environments. Return the result.
Args:
(dict): mapping from spec to lists of dependent Environments
Returns:
(dict): mapping from spec to lists of *inactive* dependent Environments
"""
spec_inactive_envs = {}
for spec, de_list in spec_envs.items():
inactive = [de for de in de_list if not de.active]
if inactive:
spec_inactive_envs[spec] = inactive
return spec_inactive_envs
def _remove_from_env(spec, env):
"""Remove a spec from an environment if it is a root."""
try:
# try removing the spec from the current active
# environment. this will fail if the spec is not a root
env.remove(spec, force=True)
except ev.SpackEnvironmentError:
pass # ignore non-root specs
def do_uninstall(env, specs, force):
"""Uninstalls all the specs in a list.
Args:
env (Environment): active environment, or ``None`` if there is not one
specs (list): list of specs to be uninstalled
force (bool): force uninstallation (boolean)
"""
packages = []
for item in specs:
try:
# should work if package is known to spack
packages.append(item.package)
except spack.repo.UnknownEntityError:
# The package.py file has gone away -- but still
# want to uninstall.
spack.package.Package.uninstall_by_spec(item, force=True)
# A package is ready to be uninstalled when nothing else references it,
# unless we are requested to force uninstall it.
is_ready = lambda x: not spack.store.db.query_by_spec_hash(x)[1].ref_count
if force:
is_ready = lambda x: True
while packages:
ready = [x for x in packages if is_ready(x.spec.dag_hash())]
if not ready:
msg = 'unexpected error [cannot proceed uninstalling specs with' \
' remaining dependents {0}]'
msg = msg.format(', '.join(x.name for x in packages))
raise spack.error.SpackError(msg)
packages = [x for x in packages if x not in ready]
for item in ready:
item.do_uninstall(force=force)
def get_uninstall_list(args, specs, env):
# Gets the list of installed specs that match the ones give via cli
# args.all takes care of the case where '-a' is given in the cli
uninstall_list = find_matching_specs(env, specs, args.all, args.force)
# Takes care of '-R'
active_dpts, inactive_dpts = installed_dependents(uninstall_list, env)
# if we are in the global scope, we complain if you try to remove a
# spec that's in an environment. If we're in an environment, we'll
# just *remove* it from the environment, so we ignore this
# error when *in* an environment
spec_envs = dependent_environments(uninstall_list)
spec_envs = inactive_dependent_environments(spec_envs)
# Process spec_dependents and update uninstall_list
has_error = not args.force and (
(active_dpts and not args.dependents) # dependents in the current env
or (not env and spec_envs) # there are environments that need specs
)
# say why each problem spec is needed
if has_error:
specs = set(active_dpts)
if not env:
specs.update(set(spec_envs)) # environments depend on this
for i, spec in enumerate(sorted(specs)):
# space out blocks of reasons
if i > 0:
print()
spec_format = '{name}{@version}{%compiler}{/hash:7}'
tty.info("Will not uninstall %s" % spec.cformat(spec_format),
format='*r')
dependents = active_dpts.get(spec)
if dependents:
print('The following packages depend on it:')
spack.cmd.display_specs(dependents, **display_args)
if not env:
envs = spec_envs.get(spec)
if envs:
print('It is used by the following environments:')
colify([e.name for e in envs], indent=4)
msgs = []
if active_dpts:
msgs.append(
'use `spack uninstall --dependents` to remove dependents too')
if spec_envs:
msgs.append('use `spack env remove` to remove from environments')
print()
tty.die('There are still dependents.', *msgs)
elif args.dependents:
for spec, lst in active_dpts.items():
uninstall_list.extend(lst)
uninstall_list = list(set(uninstall_list))
# only force-remove (don't completely uninstall) specs that still
# have external dependent envs or pkgs
removes = set(inactive_dpts)
if env:
removes.update(spec_envs)
# remove anything in removes from the uninstall list
uninstall_list = set(uninstall_list) - removes
return uninstall_list, removes
def uninstall_specs(args, specs):
env = ev.get_env(args, 'uninstall')
uninstall_list, remove_list = get_uninstall_list(args, specs, env)
anything_to_do = set(uninstall_list).union(set(remove_list))
if not anything_to_do:
tty.warn('There are no package to uninstall.')
return
if not args.yes_to_all:
confirm_removal(anything_to_do)
if env:
# Remove all the specs that are supposed to be uninstalled or just
# removed.
with env.write_transaction():
for spec in itertools.chain(remove_list, uninstall_list):
_remove_from_env(spec, env)
env.write()
# Uninstall everything on the list
do_uninstall(env, uninstall_list, args.force)
def confirm_removal(specs):
"""Display the list of specs to be removed and ask for confirmation.
Args:
specs (list): specs to be removed
"""
tty.msg('The following packages will be uninstalled:\n')
spack.cmd.display_specs(specs, **display_args)
print('')
answer = tty.get_yes_or_no('Do you want to proceed?', default=False)
if not answer:
tty.msg('Aborting uninstallation')
sys.exit(0)
def uninstall(parser, args):
if not args.specs and not args.all:
tty.die('uninstall requires at least one package argument.',
' Use `spack uninstall --all` to uninstall ALL packages.')
# [any] here handles the --all case by forcing all specs to be returned
specs = spack.cmd.parse_specs(args.specs) if args.specs else [any]
uninstall_specs(args, specs)
|
rspavel/spack
|
lib/spack/spack/cmd/uninstall.py
|
Python
|
lgpl-2.1
| 12,598
|
# -*- coding: utf-8 -*-
"""
***************************************************************************
SelectByAttribute.py
---------------------
Date : May 2010
Copyright : (C) 2010 by Michael Minn
Email : pyqgis at michaelminn dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Michael Minn'
__date__ = 'May 2010'
__copyright__ = '(C) 2010, Michael Minn'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
from qgis.PyQt.QtCore import QVariant
from qgis.core import (QgsExpression,
QgsProcessingException,
QgsProcessingParameterVectorLayer,
QgsProcessingParameterField,
QgsProcessingParameterEnum,
QgsProcessingParameterString,
QgsProcessingOutputVectorLayer)
from processing.algs.qgis.QgisAlgorithm import QgisAlgorithm
class SelectByAttribute(QgisAlgorithm):
INPUT = 'INPUT'
FIELD = 'FIELD'
OPERATOR = 'OPERATOR'
VALUE = 'VALUE'
OUTPUT = 'OUTPUT'
OPERATORS = ['=',
'!=',
'>',
'>=',
'<',
'<=',
'begins with',
'contains',
'is null',
'is not null',
'does not contain'
]
STRING_OPERATORS = ['begins with',
'contains',
'does not contain']
def tags(self):
return self.tr('select,attribute,value,contains,null,field').split(',')
def group(self):
return self.tr('Vector selection')
def groupId(self):
return 'vectorselection'
def __init__(self):
super().__init__()
def initAlgorithm(self, config=None):
self.i18n_operators = ['=',
'!=',
'>',
'>=',
'<',
'<=',
self.tr('begins with'),
self.tr('contains'),
self.tr('is null'),
self.tr('is not null'),
self.tr('does not contain')
]
self.addParameter(QgsProcessingParameterVectorLayer(self.INPUT, self.tr('Input layer')))
self.addParameter(QgsProcessingParameterField(self.FIELD,
self.tr('Selection attribute'), parentLayerParameterName=self.INPUT))
self.addParameter(QgsProcessingParameterEnum(self.OPERATOR,
self.tr('Operator'), self.i18n_operators))
self.addParameter(QgsProcessingParameterString(self.VALUE, self.tr('Value')))
self.addOutput(QgsProcessingOutputVectorLayer(self.OUTPUT, self.tr('Selected (attribute)')))
def name(self):
return 'selectbyattribute'
def displayName(self):
return self.tr('Select by attribute')
def processAlgorithm(self, parameters, context, feedback):
layer = self.parameterAsVectorLayer(parameters, self.INPUT, context)
fieldName = self.parameterAsString(parameters, self.FIELD, context)
operator = self.OPERATORS[self.parameterAsEnum(parameters, self.OPERATOR, context)]
value = self.parameterAsString(parameters, self.VALUE, context)
fields = layer.fields()
idx = layer.fields().lookupField(fieldName)
fieldType = fields[idx].type()
if fieldType != QVariant.String and operator in self.STRING_OPERATORS:
op = ''.join(['"%s", ' % o for o in self.STRING_OPERATORS])
raise QgsProcessingException(
self.tr('Operators {0} can be used only with string fields.').format(op))
field_ref = QgsExpression.quotedColumnRef(fieldName)
quoted_val = QgsExpression.quotedValue(value)
if operator == 'is null':
expression_string = '{} IS NULL'.format(field_ref)
elif operator == 'is not null':
expression_string = '{} IS NOT NULL'.format(field_ref)
elif operator == 'begins with':
expression_string = """%s LIKE '%s%%'""" % (field_ref, value)
elif operator == 'contains':
expression_string = """%s LIKE '%%%s%%'""" % (field_ref, value)
elif operator == 'does not contain':
expression_string = """%s NOT LIKE '%%%s%%'""" % (field_ref, value)
else:
expression_string = '{} {} {}'.format(field_ref, operator, quoted_val)
expression = QgsExpression(expression_string)
if expression.hasParserError():
raise QgsProcessingException(expression.parserErrorString())
layer.selectByExpression(expression_string)
return {self.OUTPUT: parameters[self.INPUT]}
|
spaceof7/QGIS
|
python/plugins/processing/algs/qgis/SelectByAttribute.py
|
Python
|
gpl-2.0
| 5,585
|
from django.test import TestCase
from ..middleware.frontendcontext import FrontendContextMiddleware
class MockRequest(object):
pass
class FrontendContextMiddlewareTests(TestCase):
def test_middleware_frontend_context_dict(self):
"""Middleware sets frontend_context dict on request"""
request = MockRequest()
FrontendContextMiddleware().process_request(request)
self.assertEqual(request.frontend_context, {})
|
1905410/Misago
|
misago/core/tests/test_frontendcontext_middleware.py
|
Python
|
gpl-2.0
| 454
|
# Copyright (c) 2021 by Rocky Bernstein
"""
Python PyPy 3.7 decompiler scanner.
Does some additional massaging of xdis-disassembled instructions to
make things easier for decompilation.
"""
import decompyle3.scanners.scanner37 as scan
# bytecode verification, verify(), uses JUMP_OPS from here
from xdis.opcodes import opcode_37pypy as opc # is this right?
JUMP_OPs = opc.JUMP_OPS
# We base this off of 3.7
class ScannerPyPy37(scan.Scanner37):
def __init__(self, show_asm):
# There are no differences in initialization between
# pypy 3.7 and 3.7
scan.Scanner37.__init__(self, show_asm, is_pypy=True)
self.version = (3, 7)
self.opc = opc
self.is_pypy = True
return
|
rocky/python-uncompyle6
|
uncompyle6/scanners/pypy37.py
|
Python
|
gpl-3.0
| 731
|
from django.shortcuts import render
from django.views.generic import TemplateView
from comments.models import Comments
from comments.forms import CommentsForm
class CommentsView(TemplateView):
template_name='comments.html'
def get(self, request):
form = CommentsForm()
return render(
request,
self.template_name,
{
'form' : form
}
)
def post(self, request):
form = CommentsForm(data=request.POST)
if form.is_valid():
cleaned_data = form.cleaned_data
Comments.objects.create(
name=cleaned_data['name'],
comment=cleaned_data['comment']
)
return render(
request,
self.template_name,
{
'form': form
}
)
|
oy-np/django-contact-form
|
django_contact_form/comments/views.py
|
Python
|
mit
| 866
|
#!/usr/bin/env python
# coding: utf8
# ____ _____
# ________ _________ ____ / __ \/ ___/
# / ___/ _ \/ ___/ __ \/ __ \/ / / /\__ \
# / / / __/ /__/ /_/ / / / / /_/ /___/ /
# /_/ \___/\___/\____/_/ /_/\____//____/
#
# ======================================================================
#
# project: ReconOS
# author: Enno Lübbers, University of Paderborn
# description: API for parsing and manipulationg mhs files.
#
# ======================================================================
import string
import sys
# return a binary representation of a number
# x: number
# n: number of binary digits
def ntob(x, n):
s = "";
for i in range(0, n):
if (x << i) & (1 << n-1):
s += "1";
else:
s += "0";
return s;
class MHSLine:
"""
This class represents a single line of a mhs file
fields: self.type : the first word on the line (eg. PARAMETER, PORT,...)
self.content: list containing key/value pairs
"""
def __init__(self, line, line_num = 0):
s = line.split()
self.type = s[0]
s = " ".join(s[1:])
s = s.split(",")
self.content = []
self.line_num = line_num
for x in s:
y = map(lambda x: x.strip(), x.split("="))
if not len(y) == 2:
raise "parse error at line %i" % line_num
self.content.append((y[0],y[1]))
def __str__(self):
s = self.type + " " + self.content[0][0] + " = " + str(self.content[0][1])
for k in self.content[1:]:
s += ", " + k[0] + " = " + k[1]
return s
class MHSPCore:
"""
This class represents a pcore instance
fields: self.ip_name
self.instance_name
self.content : list of lines
"""
def __init__(self,ip_name):
self.ip_name = ip_name
self.content = []
def addLine(self,line):
if line.type == "PARAMETER" and line.content[0][0] == "INSTANCE":
self.instance_name = line.content[0][1]
return
self.content.append(line)
def getValue(self,key):
for line in self.content:
if line.content[0][0].lower() == key.lower(): # MHS files are case insensitive
return line.content[0][1]
return None
def setValue(self,key,value):
for line in self.content:
if line.content[0][0] == key:
line.content[0] = (line.content[0][0],value)
def addEntry(self,name,key,value):
self.addLine(MHSLine(name + " " + key + " = " + str(value)))
def __str__(self):
result = "BEGIN " + self.ip_name + "\n"
result += "\tPARAMETER INSTANCE = " + self.instance_name + "\n"
for k in self.content:
result += "\t" + str(k) + "\n"
result += "END\n"
return result
class MHS:
"""
This class represents a mhs file.
fields: self.pcores : list of MHSPCore objects
self.toplevel : list of MHSLine objects
"""
def __init__(self, filename = None):
self.pcores = []
self.toplevel = [MHSLine("PARAMETER VERSION = 2.1.0",0)]
if filename:
self.parse(filename)
def isComment(self,line_trimmed):
return line_trimmed[0] == '#'
def addPCore(self,pcore):
self.pcores.append(pcore)
def parse(self,filename):
STATE_TOPLEVEL = 0
STATE_PCORE = 1
state = STATE_TOPLEVEL
line_count = 0
fin = open(filename,"r")
self.pcores = []
self.toplevel = []
pcore = None
while True:
line_count += 1
line = fin.readline()
if not line:
if state == STATE_PCORE:
raise "unexpected end of file: '%s' at line %i" % (filename,line_count)
break
line = line.strip()
if not line: continue
if self.isComment(line): continue
s = line.split()
name = s[0]
s = " ".join(s[1:])
if state == STATE_TOPLEVEL:
if name == "BEGIN":
state = STATE_PCORE
pcore = MHSPCore(s)
continue
else:
self.toplevel.append(MHSLine(line,line_count))
continue
else:
if name == "END":
state = STATE_TOPLEVEL
self.pcores.append(pcore)
continue
else:
pcore.addLine(MHSLine(line,line_count))
continue
def __str__(self):
result = ""
for k in self.toplevel:
result += str(k) + "\n"
for pcore in self.pcores:
result += "\n" + str(pcore)
return result
def getPCores(self,ip_name):
result = []
for pcore in self.pcores:
if pcore.ip_name == ip_name:
result.append(pcore)
return result
def getPCore(self,instance_name):
for pcore in self.pcores:
if pcore.instance_name == instance_name:
return pcore
return None
def delPCore(self, instance_name):
pcore = self.getPcore(instance_name)
self.pcores.remove(pcore)
|
Daverball/reconos
|
tools/python/mhstools.py
|
Python
|
gpl-2.0
| 7,320
|
import string
class Solution(object):
def isPalindrome(self, s):
"""
:type s: str
:rtype: bool
"""
l, r = 0, len(s) - 1
while l < r:
while l < r and not s[l].isalnum():
l += 1
while l < r and not s[r].isalnum():
r -= 1
if s[l].lower() != s[r].lower():
return False
l += 1
r -= 1
return True
S = Solution()
print S.isPalindrome("A man, a plan, a canal: Panama")
|
Jspsun/LEETCodePractice
|
Python/ValidPalindrome.py
|
Python
|
mit
| 533
|
# Copyright 2017-2021 TensorHub, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import
from __future__ import division
import click
from guild import click_util
from . import runs_stop
@click.command(name="stop")
@runs_stop.runs_stop_params
@click.pass_context
@click_util.use_args
@click_util.render_doc
def stop(ctx, args):
"""{{ runs_stop.stop_runs }}"""
from . import runs_impl
runs_impl.stop_runs(args, ctx)
|
guildai/guild
|
guild/commands/stop.py
|
Python
|
apache-2.0
| 965
|
#
# Copyright 2017-2019 Red Hat, Inc.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
#
# Refer to the README and COPYING files for full details of the license
#
from __future__ import absolute_import
from __future__ import division
import os
import tempfile
from unittest import mock
from monkeypatch import MonkeyPatch
from testlib import VdsmTestCase as TestCaseBase
from testlib import make_config
from testlib import namedTemporaryDir
from testlib import permutations, expandPermutations
from virt import vmfakelib as fake
from vdsm import hugepages
from vdsm import osinfo
from vdsm.common import cpuarch
from vdsm.common import supervdsm
from vdsm.supervdsm_api import virt
from vdsm.virt import vm
_STATE = {
'resv_hugepages': 1234,
'free_hugepages': 1234,
'nr_overcommit_hugepages': 1234,
'surplus_hugepages': 1234,
'nr_hugepages': 1234,
'nr_hugepages_mempolicy': 1234,
'vm.free_hugepages': 1234
}
_VM_HUGEPAGES_METADATA = '''
<ovirt-vm:vm>
<ovirt-vm:custom>
<ovirt-vm:hugepages>{hugepages}</ovirt-vm:hugepages>
</ovirt-vm:custom>
</ovirt-vm:vm>
'''
@expandPermutations
class TestHugepages(TestCaseBase):
@permutations([
[b'1024', 1024, 1024],
[b'1024', -1024, -1024],
[b'1024', -512, -512],
[b'1024', 0, 0],
])
@MonkeyPatch(hugepages, '_size_from_dir', lambda x: x)
@MonkeyPatch(hugepages, 'state', lambda: {2048: _STATE})
@MonkeyPatch(supervdsm, 'getProxy', lambda: virt)
def test_alloc(self, default, count, expected):
with tempfile.NamedTemporaryFile() as f:
f.write(default)
f.flush()
ret = hugepages._alloc(count, size=2048, path=f.name)
f.seek(0)
self.assertEqual(ret, expected)
@MonkeyPatch(hugepages, '_size_from_dir', lambda x: x)
def test_supported(self):
with namedTemporaryDir() as src:
# A list of 3 file names, where the files are temporary.
sizes = [os.path.basename(f.name) for f in [
tempfile.NamedTemporaryFile(
dir=src, delete=False
) for _ in range(3)
]]
with mock.patch('{}.open'.format(hugepages.__name__),
mock.mock_open(read_data='0'),
create=True):
self.assertEqual(set(hugepages.supported(src)), set(sizes))
@MonkeyPatch(hugepages, '_size_from_dir', lambda x: x)
def test_state(self):
with namedTemporaryDir() as src:
# A list of 3 file names, where the files are temporary.
sizes = [os.path.basename(f.name) for f in [
tempfile.NamedTemporaryFile(
dir=src, delete=False
) for _ in range(3)
]]
with mock.patch('{}.open'.format(hugepages.__name__),
mock.mock_open(read_data='1234'),
create=True):
self.assertEqual(len(hugepages.state(src)), len(sizes))
for value in hugepages.state(src).values():
self.assertEqual(value, _STATE)
@permutations([
['hugepages-2048Kb', 2048],
['hugepages-10000Kb', 10000],
['hugepages-1Kb', 1],
])
def test_size_from_dir(self, filename, expected):
self.assertEqual(hugepages._size_from_dir(filename), expected)
class TestIntelligentAllocation(TestCaseBase):
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "9"),
("performance", "reserved_hugepage_size", "2048"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 12,
'free_hugepages': 12}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_allocation_1_page(self):
vm_hugepagesz = 2048
vm_hugepages = 4
vdsm_vms = vm_hugepages + 0
cif = FakeClientIF({0: FakeVM(vdsm_vms, vm_hugepagesz)})
# We should allocate 1 new hugepage:
# - 12 total (and also free) pages
# - 9 pages are reserved
# - vm requires 4 pages; we allocate 1 to avoid touching reserved pages
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 1
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "4"),
("performance", "reserved_hugepage_size", "2048"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 8,
'free_hugepages': 4}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_allocation_4_pages(self):
vm_hugepagesz = 2048
vm_hugepages = 4
vdsm_vms = vm_hugepages + 4
cif = FakeClientIF({0: FakeVM(vdsm_vms, vm_hugepagesz)})
# We expect 4 new hugepages:
# - there are 8 hugepages
# - 4 are free, 4 used by vdsm, 4 reserved -> the free ones are
# reserved
# - vm requires 4 new hugepages; we can't touch reserved pages
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 4
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "12"),
("performance", "reserved_hugepage_size", "2048"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 16,
'free_hugepages': 4}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_allocation_0_pages(self):
vm_hugepagesz = 2048
vm_hugepages = 4
vdsm_vms = vm_hugepages + 0
cif = FakeClientIF({0: FakeVM(vdsm_vms, vm_hugepagesz)})
# We expect no new hugepages:
# - there are 4 free hugepages
# - vdsm doesn't use any pages (yet)
# - 12 are reserved and used
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 0
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "12"),
("performance", "reserved_hugepage_size", "2048"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 16,
'free_hugepages': 4}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_allocation_0_pages_mixedenv(self):
# Simulate the code, 0 means that the VM doesn't have hugepages...
vm_hugepagesz = 0
# but let's introduce something that would normally throw us off.
vm_hugepages = 4
vdsm_vms = vm_hugepages + 0
cif = FakeClientIF({0: FakeVM(vdsm_vms, vm_hugepagesz)})
# We expect no new hugepages:
# - there are 4 free hugepages
# - vdsm doesn't use any pages (yet)
# - 12 are reserved and used
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 0
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "4"),
("performance", "reserved_hugepage_size", "1048576"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 4,
'free_hugepages': 4}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_allocation_different_size_reserved(self):
vm_hugepagesz = 2048
vm_hugepages = 4
cif = FakeClientIF({0: FakeVM(12, 1048576)})
# We expect no new hugepages:
# - pages of different size are reserved
# - VMs that exist use different hugepage size
# - we have 4 free hugepages of correct size
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 0
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "false"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{2048: {'nr_hugepages': 4,
'free_hugepages': 4}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
def test_pure_dynamic_hugepages(self):
vm_hugepagesz = 2048
vm_hugepages = 4
cif = FakeClientIF({0: FakeVM(0, 1048576)})
# Fully dynamic, allocate pages for whole VM.
self.assertEqual(hugepages.calculate_required_allocation(
cif, vm_hugepages, vm_hugepagesz), 4
)
class TestIntelligentDeallocation(TestCaseBase):
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "13"),
("performance", "reserved_hugepage_size", "1048576"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{1048576: {'nr_hugepages': 17,
'free_hugepages': 0}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(osinfo, 'kernel_args_dict', lambda:
{'hugepagesz': '1G', 'hugepages': '16'})
def test_deallocation_1_page(self):
vm_hugepagesz = 1048576
vm_hugepages = 4
self.assertEqual(hugepages.calculate_required_deallocation(
vm_hugepages, vm_hugepagesz), 1
)
# - 17 pages in the system, 16 allocated on boot time
# - VM uses 4 pages, 13 are reserved
# - since we don't touch boot-time allocated pages, we're only able to
# deallocate a single page
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "12"),
("performance", "reserved_hugepage_size", "1048576"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{1048576: {'nr_hugepages': 17,
'free_hugepages': 0}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(osinfo, 'kernel_args_dict', lambda:
{})
def test_deallocation_4_pages_no_cmdline(self):
vm_hugepagesz = 1048576
vm_hugepages = 4
# There are 17 pages in the system, none of which were allocated on
# boot.
# - the VM consumed 4 pages, no other consumption
# - there are 12 pages reserved
# - that means we could deallocate up to 5 pages, but we don't touch
# pages out of VM's domain - therefore deallocating only 4 pages
self.assertEqual(hugepages.calculate_required_deallocation(
vm_hugepages, vm_hugepagesz), 4
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "true"),
("performance", "reserved_hugepage_count", "12"),
("performance", "reserved_hugepage_size", "1048576"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{1048576: {'nr_hugepages': 20,
'free_hugepages': 0}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(osinfo, 'kernel_args_dict', lambda:
{'hugepagesz': '1G', 'hugepages': '16'})
def test_deallocation_4_pages(self):
vm_hugepagesz = 1048576
vm_hugepages = 4
# The VM was solely in the dynamic allocation space (16 preallocated,
# 12 reserved but 20 total), we can fully deallocate it.
self.assertEqual(hugepages.calculate_required_deallocation(
vm_hugepages, vm_hugepagesz), 4
)
@MonkeyPatch(hugepages, 'config',
make_config([
("performance", "use_preallocated_hugepages", "false"),
]))
@MonkeyPatch(hugepages, 'state', lambda:
{1048576: {'nr_hugepages': 16,
'free_hugepages': 16}
})
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(osinfo, 'kernel_args_dict', lambda:
{'hugepagesz': '1G', 'hugepages': '16'})
def test_pure_dynamic_hugepages(self):
vm_hugepagesz = 1048576
vm_hugepages = 4
# Fully dynamic deallocation (= deallocate the size of the VM)
self.assertEqual(hugepages.calculate_required_deallocation(
vm_hugepages, vm_hugepagesz), 4
)
@expandPermutations
class TestVmHugepages(TestCaseBase):
@permutations([
[-1, False],
[0, False],
[1, True],
[2048, True],
[1048576, True],
])
def test_hugepages_allowed(self, hugepages, expected):
metadata = _VM_HUGEPAGES_METADATA.format(hugepages=hugepages)
with fake.VM(metadata=metadata) as vm:
self.assertEqual(vm.hugepages, expected)
@permutations([
[1, 2048],
[2048, 2048],
[1048576, 1048576],
[10485760, 2048],
])
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(hugepages, 'supported', lambda: [2048, 1048576])
def test_hugepagesz(self, hugepages, expected):
metadata = _VM_HUGEPAGES_METADATA.format(hugepages=hugepages)
with fake.VM(metadata=metadata) as vm:
self.assertEqual(vm.hugepagesz, expected)
@permutations([
[1, 1, 1],
[1, 3, 2],
[1048576, 1023, 1],
[1048576, 1025, 2],
])
@MonkeyPatch(cpuarch, 'real', lambda: cpuarch.X86_64)
@MonkeyPatch(hugepages, 'supported', lambda: [2048, 1048576])
def test_nr_hugepages(self, hugepages, memory, expected):
with mock.patch.object(vm.Vm, 'mem_size_mb', lambda _: memory):
metadata = _VM_HUGEPAGES_METADATA.format(hugepages=hugepages)
with fake.VM(metadata=metadata) as fakevm:
self.assertEqual(fakevm.nr_hugepages, expected)
class FakeClientIF(object):
def __init__(self, vmContainer):
self.vmContainer = vmContainer
def getVMs(self):
return self.vmContainer
class FakeVM(object):
def __init__(self, hugepages, hugepagesz):
self.nr_hugepages = hugepages
self.hugepagesz = hugepagesz
|
oVirt/vdsm
|
tests/hugepages_test.py
|
Python
|
gpl-2.0
| 16,013
|
# -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Meta data of line-bot-sdk."""
from __future__ import unicode_literals
__version__ = '1.2.0'
__author__ = 'LINE Corporation'
__copyright__ = 'Copyright 2016, LINE Corporation'
__license__ = 'Apache 2.0'
__all__ = (
'__version__'
)
|
monhustla/line-bot-sdk-python
|
linebot/__about__.py
|
Python
|
apache-2.0
| 821
|
# PyAlgoTrade
#
# Copyright 2011-2015 Gabriel Martin Becedillas Ruiz
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
.. moduleauthor:: Gabriel Martin Becedillas Ruiz <gabriel.becedillas@gmail.com>
"""
import datetime
import math
from distutils import version
import pytz
import numpy
import common
import strategy_test
import position_test
from pyalgotrade.barfeed import ninjatraderfeed
from pyalgotrade.barfeed import csvfeed
from pyalgotrade.stratanalyzer import trades
from pyalgotrade import broker
from pyalgotrade.broker import backtesting
def buildUTCDateTime(year, month, day, hour, minute):
ret = datetime.datetime(year, month, day, hour, minute)
ret = pytz.utc.localize(ret)
return ret
class TradesAnalyzerTestCase(common.TestCase):
TestInstrument = "spy"
def __loadBarFeed(self):
ret = ninjatraderfeed.Feed(ninjatraderfeed.Frequency.MINUTE)
barFilter = csvfeed.USEquitiesRTH()
ret.setBarFilter(barFilter)
ret.addBarsFromCSV(TradesAnalyzerTestCase.TestInstrument, common.get_data_file_path("nt-spy-minute-2011.csv"))
return ret
def __createStrategy(self):
barFeed = self.__loadBarFeed()
return strategy_test.TestStrategy(barFeed, 1000)
def __createPositionStrategy(self):
barFeed = self.__loadBarFeed()
return position_test.TestStrategy(barFeed, TradesAnalyzerTestCase.TestInstrument, 1000)
def testNoTrades(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
strat.run()
self.assertTrue(strat.getBroker().getCash() == 1000)
self.assertTrue(stratAnalyzer.getCount() == 0)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(stratAnalyzer.getProfitableCount() == 0)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 0)
def testSomeTrades_Position(self):
strat = self.__createPositionStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Winning trade
strat.addPosEntry(buildUTCDateTime(2011, 1, 3, 15, 0), strat.enterLong, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
strat.addPosExit(buildUTCDateTime(2011, 1, 3, 15, 16)) # 127.16
# Losing trade
strat.addPosEntry(buildUTCDateTime(2011, 1, 3, 15, 30), strat.enterLong, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.addPosExit(buildUTCDateTime(2011, 1, 3, 15, 31)) # 127.16
# Winning trade
strat.addPosEntry(buildUTCDateTime(2011, 1, 3, 15, 38), strat.enterLong, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
strat.addPosExit(buildUTCDateTime(2011, 1, 3, 15, 42)) # 127.26
# Unfinished trade not closed
strat.addPosEntry(buildUTCDateTime(2011, 1, 3, 15, 47), strat.enterLong, TradesAnalyzerTestCase.TestInstrument, 1) # 127.34
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.16 - 127.14) + (127.16 - 127.2) + (127.26 - 127.16) - 127.34, 2))
self.assertTrue(stratAnalyzer.getCount() == 3)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.03)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 2) == 0.07)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=0), 2) == 0.06)
self.assertTrue(stratAnalyzer.getProfitableCount() == 2)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.06)
self.assertTrue(round(stratAnalyzer.getProfits().std(ddof=1), 2) == 0.06)
self.assertTrue(round(stratAnalyzer.getProfits().std(ddof=0), 2) == 0.04)
self.assertEqual(stratAnalyzer.getPositiveReturns()[0], (127.16 - 127.14) / 127.14)
self.assertEqual(stratAnalyzer.getPositiveReturns()[1], (127.26 - 127.16) / 127.16)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.04)
if version.LooseVersion(numpy.__version__) >= version.LooseVersion("1.6.2"):
self.assertTrue(math.isnan(stratAnalyzer.getLosses().std(ddof=1)))
else:
self.assertTrue(stratAnalyzer.getLosses().std(ddof=1) == 0)
self.assertTrue(stratAnalyzer.getLosses().std(ddof=0) == 0)
self.assertEqual(stratAnalyzer.getNegativeReturns()[0], (127.16 - 127.2) / 127.2)
def testSomeTrades(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Winning trade
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Losing trade
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 31), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Winning trade
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 38), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 42), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.26
# Open trade.
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 47), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.34
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.16 - 127.14) + (127.16 - 127.2) + (127.26 - 127.16) - 127.34, 2))
self.assertTrue(stratAnalyzer.getCount() == 3)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.03)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 2) == 0.07)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=0), 2) == 0.06)
self.assertTrue(stratAnalyzer.getProfitableCount() == 2)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.06)
self.assertTrue(round(stratAnalyzer.getProfits().std(ddof=1), 2) == 0.06)
self.assertTrue(round(stratAnalyzer.getProfits().std(ddof=0), 2) == 0.04)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.04)
if version.LooseVersion(numpy.__version__) >= version.LooseVersion("1.6.2"):
self.assertTrue(math.isnan(stratAnalyzer.getLosses().std(ddof=1)))
else:
self.assertTrue(stratAnalyzer.getLosses().std(ddof=1) == 0)
self.assertTrue(stratAnalyzer.getLosses().std(ddof=0) == 0)
def testSomeTradesWithCommissions(self):
strat = self.__createStrategy()
strat.getBroker().setCommission(backtesting.FixedPerTrade(0.01))
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Losing trade
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 31), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Winning trade
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 38), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 42), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.26
# Open trade.
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 47), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.34
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.16 - 127.2) + (127.26 - 127.16) - 127.34 - 0.01*5, 2))
self.assertTrue(numpy.array_equal(stratAnalyzer.getCommissionsForAllTrades(), numpy.array([0.02, 0.02])))
self.assertTrue(numpy.array_equal(stratAnalyzer.getCommissionsForProfitableTrades(), numpy.array([0.02])))
self.assertTrue(numpy.array_equal(stratAnalyzer.getCommissionsForUnprofitableTrades(), numpy.array([0.02])))
self.assertTrue(numpy.array_equal(stratAnalyzer.getCommissionsForEvenTrades(), numpy.array([])))
def testProportionalCommissionBug(self):
# Regression test for a bug reported by 'Jackson Sam' on 30/Aug/2013.
strat = self.__createStrategy()
strat.getBroker().setCommission(backtesting.FixedPerTrade(0.01))
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# There are 3 trades here:
# Trade 1 (Long)
# Buy 1 @ 127.16 Commission: 0.01
# Sell 1 @ 127.26 Commission: 0.005
# Trade 2 (Short)
# Sell 1 @ 127.26 Commission: 0.005
# Buy 1 @ 127.37 Commission: 0.005
# Trade 3 (Long)
# Buy 1 @ 127.37 Commission: 0.005
# Sell 1 @ 127.4 Commission: 0.01
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 38), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # Fill at 127.16
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 42), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 2) # Fill at 127.26
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 53), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 2) # Fill at 127.37
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 58), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # Fill at 127.4
strat.run()
allReturns = stratAnalyzer.getAllReturns()
self.assertEqual(round(allReturns[0], 6), 0.000668)
self.assertEqual(round(allReturns[1], 6), -0.000943)
self.assertEqual(round(allReturns[2], 6), 0.000118)
def testLongShort(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Exit long and enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 2) # 127.16
# Exit short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.16 - 127.14) + (127.16 - 127.2), 2))
self.assertTrue(stratAnalyzer.getCount() == 2)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == -0.01)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 4) == 0.0424)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.02)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.04)
def testLongShort2(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Exit long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.16 - 127.14) + (127.16 - 127.2), 2))
self.assertTrue(stratAnalyzer.getCount() == 2)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == -0.01)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 4) == 0.0424)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.02)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.04)
def testShortLong(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Exit short and enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 2) # 127.16
# Exit long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.14 - 127.16) + (127.2 - 127.16), 2))
self.assertTrue(stratAnalyzer.getCount() == 2)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.01)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 4) == 0.0424)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.04)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.02)
def testShortLong2(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Exit short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.14 - 127.16) + (127.2 - 127.16), 2))
self.assertTrue(stratAnalyzer.getCount() == 2)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.01)
self.assertTrue(round(stratAnalyzer.getAll().std(ddof=1), 4) == 0.0424)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.04)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.02)
def testLong2(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Extend long position
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 2) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.2 - 127.14) + (127.2 - 127.16), 2))
self.assertTrue(stratAnalyzer.getCount() == 1)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.1)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.1)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 0)
def testLong3(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.BUY, TradesAnalyzerTestCase.TestInstrument, 2) # 127.14
# Decrease long position
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit long
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.SELL, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.2 - 127.14) + (127.16 - 127.14), 2))
self.assertTrue(stratAnalyzer.getCount() == 1)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == 0.08)
self.assertTrue(stratAnalyzer.getProfitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getProfits().mean(), 2) == 0.08)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 0)
def testShort2(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 1) # 127.14
# Extend short position
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 2) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.14 - 127.2) + (127.16 - 127.2), 2))
self.assertTrue(stratAnalyzer.getCount() == 1)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == -0.1)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.1)
self.assertTrue(stratAnalyzer.getProfitableCount() == 0)
def testShort3(self):
strat = self.__createStrategy()
stratAnalyzer = trades.Trades()
strat.attachAnalyzer(stratAnalyzer)
# Enter short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 0), strat.getBroker().createMarketOrder, broker.Order.Action.SELL_SHORT, TradesAnalyzerTestCase.TestInstrument, 2) # 127.14
# Decrease short position
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 16), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 1) # 127.16
# Exit short
strat.addOrder(buildUTCDateTime(2011, 1, 3, 15, 30), strat.getBroker().createMarketOrder, broker.Order.Action.BUY_TO_COVER, TradesAnalyzerTestCase.TestInstrument, 1) # 127.2
strat.run()
self.assertTrue(round(strat.getBroker().getCash(), 2) == round(1000 + (127.14 - 127.16) + (127.14 - 127.2), 2))
self.assertTrue(stratAnalyzer.getCount() == 1)
self.assertTrue(stratAnalyzer.getEvenCount() == 0)
self.assertTrue(round(stratAnalyzer.getAll().mean(), 2) == -0.08)
self.assertTrue(stratAnalyzer.getUnprofitableCount() == 1)
self.assertTrue(round(stratAnalyzer.getLosses().mean(), 2) == -0.08)
self.assertTrue(stratAnalyzer.getProfitableCount() == 0)
|
cgqyh/pyalgotrade-mod
|
testcases/trades_analyzer_test.py
|
Python
|
apache-2.0
| 22,683
|
# Author: Roberto Polli <rpolli@redhat.com>
#
# NOTE: Edit the jcmd location according to your path or use update-alternatives.
global BIN_JCMD
BIN_JCMD = '/usr/bin/jcmd'
class dstat_plugin(dstat):
"""
This plugin gathers jvm stats via jcmd.
Usage:
JVM_PID=15123 dstat --jvm-full
Minimize the impacts of jcmd and consider using:
dstat --noupdate
For full informations on jcmd see:
- http://docs.oracle.com/javase/7/docs/technotes/tools/solaris/jcmd.html
- https://docs.oracle.com/javase/8/docs/technotes/guides/troubleshoot/tooldescr006.html
This requires the presence of /tmp/hsperfdata_* directory, so
it WON'T WORK if you add -XX:-UsePerfData or -XX:+PerfDisableSharedMem.
"""
def __init__(self):
self.name = 'jvm_full'
self.vars = ('clsL', 'clsU', 'fgc', 'heap', 'heap%',
'heapmax', 'perm', 'perm%', 'permmax')
self.type = 'f'
self.width = 5
self.scale = 1000
def check(self):
"""Preliminar checks. If no pid is passed, defaults to 0.
"""
if not os.access(BIN_JCMD, os.X_OK):
raise Exception('Needs jstat binary')
try:
self.jvm_pid = int(os.environ.get('JVM_PID',0))
except Exception as e:
self.jvm_pid = 0
return True
@staticmethod
def _to_stat(k, v):
try:
return k, int(v)
except (KeyError, ValueError, AttributeError):
return k, v
@staticmethod
def _cmd_splitlines(cmd):
"""Splits a txt output of lines like key=value.
"""
for l in os.popen(cmd):
yield l.strip().split("=", 1)
def extract(self):
try:
lines = self._cmd_splitlines(
'%s %s PerfCounter.print ' % (BIN_JCMD, self.jvm_pid))
table = dict(self._to_stat(*l) for l in lines
if len(l) > 1)
if table:
# Number of loaded classes.
self.set2['clsL'] = table['java.cls.loadedClasses']
self.set2['clsU'] = table['java.cls.unloadedClasses']
# Number of Full Garbage Collection events.
self.set2['fgc'] = table['sun.gc.collector.1.invocations']
# The heap space is made up of Old Generation and Young
# Generation (which is divided in Eden, Survivor0 and
# Survivor1)
self.set2['heap'] = table['sun.gc.generation.1.capacity'] + table[
'sun.gc.generation.0.capacity']
# Usage is hidden in the nested spaces.
self.set2['heapu'] = sum(table[k] for k in table
if 'sun.gc.generation.' in k
and 'used' in k)
self.set2['heapmax'] = table['sun.gc.generation.1.maxCapacity'] + table[
'sun.gc.generation.0.maxCapacity']
# Use PermGen on jdk7 and the new metaspace on jdk8
try:
self.set2['perm'] = table['sun.gc.generation.2.capacity']
self.set2['permu'] = sum(table[k] for k in table
if 'sun.gc.generation.2.' in k
and 'used' in k)
self.set2['permmax'] = table[
'sun.gc.generation.2.maxCapacity']
except KeyError:
self.set2['perm'] = table['sun.gc.metaspace.capacity']
self.set2['permu'] = table['sun.gc.metaspace.used']
self.set2['permmax'] = table[
'sun.gc.metaspace.maxCapacity']
# Evaluate statistics on memory usage.
for name in ('heap', 'perm'):
self.set2[name + '%'] = 100 * self.set2[
name + 'u'] / self.set2[name]
for name in self.vars:
self.val[name] = self.set2[name]
if step == op.delay:
self.set1.update(self.set2)
except IOError as e:
if op.debug > 1:
print('%s: lost pipe to jstat, %s' % (self.filename, e))
for name in self.vars:
self.val[name] = -1
except Exception as e:
if op.debug > 1:
print('%s: exception' % e)
for name in self.vars:
self.val[name] = -1
# vim:ts=4:sw=4:et
|
dagwieers/dstat
|
plugins/dstat_jvm_full.py
|
Python
|
gpl-2.0
| 4,523
|
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.serialization import jsonutils
from novaclient.tests.unit import fakes
from novaclient.tests.unit.fixture_data import base
class V1(base.Fixture):
base_url = 'images'
def setUp(self):
super(V1, self).setUp()
get_images = {
'images': [
{'id': 1, 'name': 'CentOS 5.2'},
{'id': 2, 'name': 'My Server Backup'}
]
}
headers = {'Content-Type': 'application/json'}
self.requests.register_uri('GET', self.url(),
json=get_images,
headers=headers)
image_1 = {
'id': 1,
'name': 'CentOS 5.2',
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "ACTIVE",
"metadata": {
"test_key": "test_value",
},
"links": {},
}
image_2 = {
"id": 2,
"name": "My Server Backup",
"serverId": 1234,
"updated": "2010-10-10T12:00:00Z",
"created": "2010-08-10T12:00:00Z",
"status": "SAVING",
"progress": 80,
"links": {},
}
self.requests.register_uri('GET', self.url('detail'),
json={'images': [image_1, image_2]},
headers=headers)
self.requests.register_uri('GET', self.url(1),
json={'image': image_1},
headers=headers)
def post_images_1_metadata(request, context):
body = jsonutils.loads(request.body)
assert list(body) == ['metadata']
fakes.assert_has_keys(body['metadata'], required=['test_key'])
return {'metadata': image_1['metadata']}
self.requests.register_uri('POST', self.url(1, 'metadata'),
json=post_images_1_metadata,
headers=headers)
for u in (1, '1/metadata/test_key'):
self.requests.register_uri('DELETE', self.url(u), status_code=204)
class V3(V1):
base_url = 'v1/images'
|
CCI-MOC/python-novaclient
|
novaclient/tests/unit/fixture_data/images.py
|
Python
|
apache-2.0
| 2,783
|
import modeltranslation
from modeltranslation.translator import translator
def expand_model_fields(model, field_names):
model_class = type(model)
try:
trans_field_mapping = translator.get_options_for_model(model_class).fields
except modeltranslation.translator.NotRegistered:
return field_names
def expand_field(field_name):
translated_versions = trans_field_mapping.get(field_name)
if translated_versions is not None:
return (f.name for f in translated_versions)
else:
return [field_name]
return [expanded
for unexpanded in field_names
for expanded in expand_field(unexpanded)]
|
City-of-Helsinki/linkedevents
|
events/translation_utils.py
|
Python
|
mit
| 691
|
# Copyright (c) 2012 Santosh Philip
# =======================================================================
# Distributed under the MIT License.
# (See accompanying file LICENSE or copy at
# http://opensource.org/licenses/MIT)
# =======================================================================
"""py.test for examples. (ex_*.py files)"""
from eppy.idfreader import idfreader
import eppy.snippet as snippet
import os
import eppy.pytest_helpers as pytest_helpers
import eppy.modeleditor as modeleditor
# iddfile = "../iddfiles/Energy+V7_0_0_036.idd"
# fname = "../idffiles/V_7_0/5ZoneSupRetPlenRAB.idf"
# iddsnippet = snippet.iddsnippet
from eppy.iddcurrent import iddcurrent
iddsnippet = iddcurrent.iddtxt
idfsnippet = snippet.idfsnippet
from io import StringIO
idffhandle = StringIO(idfsnippet)
iddfhandle = StringIO(iddsnippet)
bunchdt, data, commdct = idfreader(idffhandle, iddfhandle)
def test_readwrite():
"""py.test for ex_readwrite"""
txt = str(data)
head = 'Zone,\n PLENUM-1,\n 0.0,\n 0.0,\n 0.0,\n 0.0,\n 1,\n 1,\n 0.609600067,\n 283.2;\n\n'
tail = '\n\nBuildingSurface:Detailed,\n WALL-1PF,\n WALL,\n WALL-1,\n PLENUM-1,\n Outdoors,\n ,\n SunExposed,\n WindExposed,\n 0.5,\n 4,\n 0.0,\n 0.0,\n 3.0,\n 0.0,\n 0.0,\n 2.4,\n 30.5,\n 0.0,\n 2.4,\n 30.5,\n 0.0,\n 3.0;\n\n'
# assert head == txt[:108]
# assert tail == txt[-280:]
def test_pythonic():
"""py.test for ex_pythonic.py"""
zones = bunchdt['zone'.upper()] # all the zones
zone0 = zones[0]
# -
printout = "PLENUM-1"
assert zone0.Name == printout
# -
printout = [
'PLENUM-1', 'SPACE1-1', 'SPACE2-1', 'SPACE3-1', 'SPACE4-1',
'SPACE5-1', 'Sup-PLENUM-1']
zonenames = [zone.Name for zone in zones]
assert printout == zonenames
# -
printout = [
'283.2', '239.247360229', '103.311355591', '239.247360229',
'103.311355591', '447.682556152', '208.6']
zonevolumes = [zone.Volume for zone in zones]
for item1, item2 in zip(printout, zonevolumes):
item1, item2 = float(item1), float(item2)
assert pytest_helpers.almostequal(item1, item2)
# -
printout = [('SPACE2-1', '103.311355591'), ('SPACE4-1', '103.311355591')]
smallzones = [zn for zn in zones if float(zn.Volume) < 150]
namevolume = [(zn.Name, zn.Volume) for zn in smallzones]
for (n1, v1), (n2, v2) in zip(printout, namevolume):
(n1, v1) = (n1, float(v1))
(n2, v2) = (n2, float(v2))
assert n1 == n2
assert pytest_helpers.almostequal(v1, v2)
# -
printout = 2
assert printout == len(smallzones)
# -
printout = [
'PLENUM-1', 'SPACE1-1', 'FIRST-SMALL-ZONE', 'SPACE3-1',
'SECOND-SMALL-ZONE', 'SPACE5-1', 'Sup-PLENUM-1']
smallzones[0].Name = "FIRST-SMALL-ZONE"
smallzones[1].Name = "SECOND-SMALL-ZONE"
# now the zone names are:
zonenames = [zone.Name for zone in zones]
assert printout == zonenames
def test_addobject():
"""py.test for ex_addobject.py"""
zones = bunchdt['zone'.upper()] # all the zones
assert len(zones) == 7
modeleditor.addobject(
bunchdt, data,
commdct, "Zone".upper(), aname="NewZone")
assert len(zones) == 8
assert zones[-1].obj == [
'ZONE', 'NewZone', 0., 0., 0., 0., 1, 1,
'autocalculate', 'autocalculate', 'autocalculate', '', '', 'Yes']
def test_functions():
"""py.test for ex_functions.py"""
surfaces = bunchdt['BuildingSurface:Detailed'.upper()] # all the surfaces
assert len(surfaces) == 1
surface = surfaces[0]
assert surface.Name == "WALL-1PF"
assert surface.azimuth == 180.0
assert surface.tilt == 90.0
assert pytest_helpers.almostequal(surface.area, 18.3)
|
pachi/eppy
|
p3/eppy/tests/test_examples.py
|
Python
|
mit
| 3,864
|
# Mercurial extension to provide 'hg relink' command
#
# Copyright (C) 2007 Brendan Cully <brendan@kublai.com>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""recreates hardlinks between repository clones"""
from mercurial import hg, util
from mercurial.i18n import _
import os, stat
def relink(ui, repo, origin=None, **opts):
"""recreate hardlinks between two repositories
When repositories are cloned locally, their data files will be
hardlinked so that they only use the space of a single repository.
Unfortunately, subsequent pulls into either repository will break
hardlinks for any files touched by the new changesets, even if
both repositories end up pulling the same changes.
Similarly, passing --rev to "hg clone" will fail to use any
hardlinks, falling back to a complete copy of the source
repository.
This command lets you recreate those hardlinks and reclaim that
wasted space.
This repository will be relinked to share space with ORIGIN, which
must be on the same local disk. If ORIGIN is omitted, looks for
"default-relink", then "default", in [paths].
Do not attempt any read operations on this repository while the
command is running. (Both repositories will be locked against
writes.)
"""
if (not util.safehasattr(util, 'samefile') or
not util.safehasattr(util, 'samedevice')):
raise util.Abort(_('hardlinks are not supported on this system'))
src = hg.repository(ui, ui.expandpath(origin or 'default-relink',
origin or 'default'))
if not src.local():
raise util.Abort(_('must specify local origin repository'))
ui.status(_('relinking %s to %s\n') % (src.store.path, repo.store.path))
if repo.root == src.root:
ui.status(_('there is nothing to relink\n'))
return
locallock = repo.lock()
try:
remotelock = src.lock()
try:
candidates = sorted(collect(src, ui))
targets = prune(candidates, src.store.path, repo.store.path, ui)
do_relink(src.store.path, repo.store.path, targets, ui)
finally:
remotelock.release()
finally:
locallock.release()
def collect(src, ui):
seplen = len(os.path.sep)
candidates = []
live = len(src['tip'].manifest())
# Your average repository has some files which were deleted before
# the tip revision. We account for that by assuming that there are
# 3 tracked files for every 2 live files as of the tip version of
# the repository.
#
# mozilla-central as of 2010-06-10 had a ratio of just over 7:5.
total = live * 3 // 2
src = src.store.path
pos = 0
ui.status(_("tip has %d files, estimated total number of files: %s\n")
% (live, total))
for dirpath, dirnames, filenames in os.walk(src):
dirnames.sort()
relpath = dirpath[len(src) + seplen:]
for filename in sorted(filenames):
if not filename[-2:] in ('.d', '.i'):
continue
st = os.stat(os.path.join(dirpath, filename))
if not stat.S_ISREG(st.st_mode):
continue
pos += 1
candidates.append((os.path.join(relpath, filename), st))
ui.progress(_('collecting'), pos, filename, _('files'), total)
ui.progress(_('collecting'), None)
ui.status(_('collected %d candidate storage files\n') % len(candidates))
return candidates
def prune(candidates, src, dst, ui):
def linkfilter(src, dst, st):
try:
ts = os.stat(dst)
except OSError:
# Destination doesn't have this file?
return False
if util.samefile(src, dst):
return False
if not util.samedevice(src, dst):
# No point in continuing
raise util.Abort(
_('source and destination are on different devices'))
if st.st_size != ts.st_size:
return False
return st
targets = []
total = len(candidates)
pos = 0
for fn, st in candidates:
pos += 1
srcpath = os.path.join(src, fn)
tgt = os.path.join(dst, fn)
ts = linkfilter(srcpath, tgt, st)
if not ts:
ui.debug('not linkable: %s\n' % fn)
continue
targets.append((fn, ts.st_size))
ui.progress(_('pruning'), pos, fn, _('files'), total)
ui.progress(_('pruning'), None)
ui.status(_('pruned down to %d probably relinkable files\n') % len(targets))
return targets
def do_relink(src, dst, files, ui):
def relinkfile(src, dst):
bak = dst + '.bak'
os.rename(dst, bak)
try:
util.oslink(src, dst)
except OSError:
os.rename(bak, dst)
raise
os.remove(bak)
CHUNKLEN = 65536
relinked = 0
savedbytes = 0
pos = 0
total = len(files)
for f, sz in files:
pos += 1
source = os.path.join(src, f)
tgt = os.path.join(dst, f)
# Binary mode, so that read() works correctly, especially on Windows
sfp = file(source, 'rb')
dfp = file(tgt, 'rb')
sin = sfp.read(CHUNKLEN)
while sin:
din = dfp.read(CHUNKLEN)
if sin != din:
break
sin = sfp.read(CHUNKLEN)
sfp.close()
dfp.close()
if sin:
ui.debug('not linkable: %s\n' % f)
continue
try:
relinkfile(source, tgt)
ui.progress(_('relinking'), pos, f, _('files'), total)
relinked += 1
savedbytes += sz
except OSError, inst:
ui.warn('%s: %s\n' % (tgt, str(inst)))
ui.progress(_('relinking'), None)
ui.status(_('relinked %d files (%s reclaimed)\n') %
(relinked, util.bytecount(savedbytes)))
cmdtable = {
'relink': (
relink,
[],
_('[ORIGIN]')
)
}
|
mikel-egana-aranguren/SADI-Galaxy-Docker
|
galaxy-dist/eggs/mercurial-2.2.3-py2.7-linux-x86_64-ucs4.egg/hgext/relink.py
|
Python
|
gpl-3.0
| 6,076
|
from django.conf import settings
from django.contrib.auth import authenticate
from django.contrib.auth import login
from ...forms import RegistrationForm
from ...models import User
class SimpleBackend(object):
"""
A registration backend which implements the simplest possible
workflow: a user supplies a username, email address and password
(the bare minimum for a useful account), and is immediately signed
up and logged in.
"""
def register(self, request, **kwargs):
"""
Create and immediately log in a new user.
"""
username, email, password = kwargs['username'], kwargs['email'], kwargs['password1']
user = User()
user.username = username
user.email = email
user.set_password(password)
user.save()
# authenticate() always has to be called before login(), and
# will return the user we just created.
new_user = authenticate(username=username, password=password)
login(request, new_user)
return new_user
def activate(self, **kwargs):
raise NotImplementedError
def registration_allowed(self, request):
"""
Indicate whether account registration is currently permitted,
based on the value of the setting ``REGISTRATION_OPEN``. This
is determined as follows:
* If ``REGISTRATION_OPEN`` is not specified in settings, or is
set to ``True``, registration is permitted.
* If ``REGISTRATION_OPEN`` is both specified and set to
``False``, registration is not permitted.
"""
return getattr(settings, 'REGISTRATION_OPEN', True)
def get_form_class(self, request):
return RegistrationForm
def post_registration_redirect(self, request, user):
"""
After registration, redirect to the user's account page.
"""
return (user.get_absolute_url(), (), {})
def post_activation_redirect(self, request, user):
raise NotImplementedError
|
stefankoegl/django-couchdb-utils
|
django_couchdb_utils/registration/backends/simple/__init__.py
|
Python
|
bsd-3-clause
| 2,028
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class VpnClientConfiguration(Model):
"""VpnClientConfiguration for P2S client.
:param vpn_client_address_pool: The reference of the address space
resource which represents Address space for P2S VpnClient.
:type vpn_client_address_pool:
~azure.mgmt.network.v2017_06_01.models.AddressSpace
:param vpn_client_root_certificates: VpnClientRootCertificate for virtual
network gateway.
:type vpn_client_root_certificates:
list[~azure.mgmt.network.v2017_06_01.models.VpnClientRootCertificate]
:param vpn_client_revoked_certificates: VpnClientRevokedCertificate for
Virtual network gateway.
:type vpn_client_revoked_certificates:
list[~azure.mgmt.network.v2017_06_01.models.VpnClientRevokedCertificate]
:param vpn_client_protocols: VpnClientProtocols for Virtual network
gateway.
:type vpn_client_protocols: list[str or
~azure.mgmt.network.v2017_06_01.models.VpnClientProtocol]
:param radius_server_address: The radius server address property of the
VirtualNetworkGateway resource for vpn client connection.
:type radius_server_address: str
:param radius_server_secret: The radius secret property of the
VirtualNetworkGateway resource for vpn client connection.
:type radius_server_secret: str
"""
_attribute_map = {
'vpn_client_address_pool': {'key': 'vpnClientAddressPool', 'type': 'AddressSpace'},
'vpn_client_root_certificates': {'key': 'vpnClientRootCertificates', 'type': '[VpnClientRootCertificate]'},
'vpn_client_revoked_certificates': {'key': 'vpnClientRevokedCertificates', 'type': '[VpnClientRevokedCertificate]'},
'vpn_client_protocols': {'key': 'vpnClientProtocols', 'type': '[str]'},
'radius_server_address': {'key': 'radiusServerAddress', 'type': 'str'},
'radius_server_secret': {'key': 'radiusServerSecret', 'type': 'str'},
}
def __init__(self, vpn_client_address_pool=None, vpn_client_root_certificates=None, vpn_client_revoked_certificates=None, vpn_client_protocols=None, radius_server_address=None, radius_server_secret=None):
super(VpnClientConfiguration, self).__init__()
self.vpn_client_address_pool = vpn_client_address_pool
self.vpn_client_root_certificates = vpn_client_root_certificates
self.vpn_client_revoked_certificates = vpn_client_revoked_certificates
self.vpn_client_protocols = vpn_client_protocols
self.radius_server_address = radius_server_address
self.radius_server_secret = radius_server_secret
|
AutorestCI/azure-sdk-for-python
|
azure-mgmt-network/azure/mgmt/network/v2017_06_01/models/vpn_client_configuration.py
|
Python
|
mit
| 3,065
|
import pytest
from django.urls import reverse
users = (
('editor', 'editor'),
('reviewer', 'reviewer'),
('user', 'user'),
('api', 'api'),
('anonymous', None),
)
status_map = {
'list': {
'editor': 200, 'reviewer': 200, 'api': 200, 'user': 200, 'anonymous': 401
},
}
urlnames = {
'list': 'v1-questions:widgettype-list',
}
@pytest.mark.parametrize('username,password', users)
def test_list(db, client, username, password):
client.login(username=username, password=password)
url = reverse(urlnames['list'])
response = client.get(url)
assert response.status_code == status_map['list'][username], response.json()
|
rdmorganiser/rdmo
|
rdmo/questions/tests/test_viewset_widgettype.py
|
Python
|
apache-2.0
| 669
|
from openstack_portation import utils
from neutronclient.common import exceptions as neutron_exceptions
import logging
log = logging.getLogger(__name__)
def create_network(neutron, keystone, **args):
log.debug('Creating network:%s' % args)
tenant = utils.find_project(keystone,
args.pop('tenant_name', None))
if not tenant:
tenant_id = None
else:
tenant_id = tenant.id
net = utils.find_network(neutron, args['name'], tenant_id)
if net:
log.info('Network already exists:%s' % net['id'])
return
if tenant:
args['tenant_id'] = tenant.id
network = neutron.create_network({'network' : args})
log.info('Created network:%s' % network['network']['id'])
return {'network' : network['network']['id']}
def create_subnet(neutron, keystone, **args):
log.debug('Creating subnet:%s' % args)
tenant = utils.find_project(keystone,
args.pop('tenant_name', None))
if not tenant:
tenant_id = None
else:
tenant_id = tenant.id
network = utils.find_network(neutron, args.pop('network', None), tenant_id)
args['network_id'] = network['id']
sub = utils.find_subnet(neutron, args['name'], tenant_id, network['id'])
if sub:
log.info('Subnet already exists:%s' % sub['id'])
return
if tenant:
args['tenant_id'] = tenant.id
try:
subnet = neutron.create_subnet({"subnet" : args})
except neutron_exceptions.BadRequest as e:
log.error('Cannot create subnet:%s' % str(e))
return
log.info('Created subnet:%s' % subnet['subnet']['id'])
return {'subnet' : subnet['subnet']['id']}
def create_router(neutron, keystone, **args):
log.debug('Create router:%s' % args)
tenant = utils.find_project(keystone,
args.pop('tenant_name', None))
if tenant:
args['tenant_id'] = tenant.id
router = utils.find_router(neutron, args['name'], None)
external = utils.find_network(neutron, args.pop('external_network', None),
None)
internal = utils.find_subnet(neutron,
args.pop('internal_subnet', None),
None, None)
if router:
log.info('Router already exists:%s' % router['id'])
else:
router = neutron.create_router({'router' : args})['router']
log.info('Created router:%s' % router['id'])
if external:
data = {'network_id' : external['id']}
neutron.add_gateway_router(router['id'],
data)
log.info('Set external network:%s for router:%s' % (external['id'],
router['id']))
if internal:
data = {'subnet_id' : internal['id']}
try:
neutron.add_interface_router(router['id'], data)
log.info('Set internal subnet:%s for router:%s' % (internal['id'],
router['id']))
except neutron_exceptions.BadRequest as e:
log.error('Cannot add internal subnet:%s' % str(e))
return {'router' : router['id']}
|
tnoff/OpenStack-Account-Setup
|
openstack_portation/openstack/neutron.py
|
Python
|
bsd-2-clause
| 3,246
|
"""Library implementing different ways to preprocess the data.
"""
import re
import numpy as np
import skimage.io
import skimage.transform
from itertools import izip
from functools import partial
import quasi_random
import utils
from configuration import config
from image_transform import resize_to_make_it_fit, resize_to_make_sunny_fit, resize_and_augment_sunny, \
resize_and_augment, normscale_resize_and_augment, build_rescale_transform, build_shift_center_transform, \
build_augmentation_transform, build_center_uncenter_transforms, fast_warp
def uint_to_float(img):
return img / np.float32(255.0)
DEFAULT_AUGMENTATION_PARAMETERS = {
"zoom_x":[1, 1],
"zoom_y":[1, 1],
"rotate":[0, 0],
"shear":[0, 0],
"skew_x":[0, 0],
"skew_y":[0, 0],
"translate_x":[0, 0],
"translate_y":[0, 0],
"flip_vert": [0, 0],
"roll_time": [0, 0],
"flip_time": [0, 0],
"change_brightness": [0, 0],
}
quasi_random_generator = None
def sample_augmentation_parameters():
global quasi_random_generator
augm = config().augmentation_params
if "translation" in augm:
newdict = dict()
if "translation" in augm:
newdict["translate_x"] = augm["translation"]
newdict["translate_y"] = augm["translation"]
if "shear" in augm:
newdict["shear"] = augm["shear"]
if "flip_vert" in augm:
newdict["flip_vert"] = augm["flip_vert"]
if "roll_time" in augm:
newdict["roll_time"] = augm["roll_time"]
if "flip_time" in augm:
newdict["flip_time"] = augm["flip_time"]
augmentation_params = dict(DEFAULT_AUGMENTATION_PARAMETERS, **newdict)
else:
augmentation_params = dict(DEFAULT_AUGMENTATION_PARAMETERS, **augm)
if quasi_random_generator is None:
quasi_random_generator = quasi_random.scrambled_halton_sequence_generator(dimension=len(augmentation_params),
permutation='Braaten-Weller')
res = dict()
try:
sample = quasi_random_generator.next()
except ValueError:
quasi_random_generator = quasi_random.scrambled_halton_sequence_generator(dimension=len(augmentation_params),
permutation='Braaten-Weller')
sample = quasi_random_generator.next()
for rand, (key, (a, b)) in izip(sample, augmentation_params.iteritems()):
#res[key] = config().rng.uniform(a,b)
res[key] = a + rand*(b-a)
return res
def sample_test_augmentation_parameters():
global quasi_random_generator
augm = config().augmentation_params_test if hasattr(config(), 'augmentation_params_test') else config().augmentation_params
if "translation" in augm:
newdict = dict()
if "translation" in augm:
newdict["translate_x"] = augm["translation"]
newdict["translate_y"] = augm["translation"]
if "shear" in augm:
newdict["shear"] = augm["shear"]
if "flip_vert" in augm:
newdict["flip_vert"] = augm["flip_vert"]
if "roll_time" in augm:
newdict["roll_time"] = augm["roll_time"]
if "flip_time" in augm:
newdict["flip_time"] = augm["flip_time"]
augmentation_params = dict(DEFAULT_AUGMENTATION_PARAMETERS, **newdict)
else:
augmentation_params = dict(DEFAULT_AUGMENTATION_PARAMETERS, **augm)
if quasi_random_generator is None:
quasi_random_generator = quasi_random.scrambled_halton_sequence_generator(dimension=len(augmentation_params),
permutation='Braaten-Weller')
res = dict()
try:
sample = quasi_random_generator.next()
except ValueError:
quasi_random_generator = quasi_random.scrambled_halton_sequence_generator(dimension=len(augmentation_params),
permutation='Braaten-Weller')
sample = quasi_random_generator.next()
for rand, (key, (a, b)) in izip(sample, augmentation_params.iteritems()):
#res[key] = config().rng.uniform(a,b)
res[key] = a + rand*(b-a)
return res
def put_in_the_middle(target_tensor, data_tensor, pad_better=False, is_padded=None):
"""
put data_sensor with arbitrary number of dimensions in the middle of target tensor.
if data_tensor is bigger, data is cut off
if target_sensor is bigger, original values (probably zeros) are kept
:param target_tensor:
:param data_tensor:
:return:
"""
target_shape = target_tensor.shape
data_shape = data_tensor.shape
def get_indices(target_width, data_width):
if target_width>data_width:
diff = target_width - data_width
target_slice = slice(diff/2, target_width-(diff-diff/2))
data_slice = slice(None, None)
else:
diff = data_width - target_width
data_slice = slice(diff/2, data_width-(diff-diff/2))
target_slice = slice(None, None)
return target_slice, data_slice
t_sh = [get_indices(l1,l2) for l1, l2 in zip(target_shape, data_shape)]
target_indices, data_indices = zip(*t_sh)
target_tensor[target_indices] = data_tensor[data_indices]
if is_padded is not None:
is_padded[:] = True
is_padded[target_indices] = False
if pad_better:
if target_indices[0].start:
for i in xrange(0, target_indices[0].start):
target_tensor[i] = data_tensor[0]
if target_indices[0].stop:
for i in xrange(target_indices[0].stop, len(target_tensor)):
target_tensor[i] = data_tensor[-1]
def sunny_preprocess(chunk_x, img, chunk_y, lbl):
image = uint_to_float(img).astype(np.float32)
chunk_x[:, :] = resize_to_make_sunny_fit(image, output_shape=chunk_x.shape[-2:])
segmentation = lbl.astype(np.float32)
chunk_y[:] = resize_to_make_sunny_fit(segmentation, output_shape=chunk_y.shape[-2:])
def sunny_preprocess_with_augmentation(chunk_x, img, chunk_y, lbl):
augmentation_parameters = sample_augmentation_parameters()
image = uint_to_float(img).astype(np.float32)
chunk_x[:, :] = resize_and_augment_sunny(image, output_shape=chunk_x.shape[-2:], augment=augmentation_parameters)
segmentation = lbl.astype(np.float32)
chunk_y[:] = resize_and_augment_sunny(segmentation, output_shape=chunk_y.shape[-2:], augment=augmentation_parameters)
def sunny_preprocess_validation(chunk_x, img, chunk_y, lbl):
image = uint_to_float(img).astype(np.float32)
chunk_x[:, :] = resize_to_make_sunny_fit(image, output_shape=chunk_x.shape[-2:])
segmentation = lbl.astype(np.float32)
chunk_y[:] = resize_to_make_sunny_fit(segmentation, output_shape=chunk_y.shape[-2:])
def _make_4d_tensor(tensors):
"""
Input: list of 3d tensors with a different first dimension.
Output: 4d tensor
"""
max_frames = max([t.shape[0] for t in tensors])
min_frames = min([t.shape[0] for t in tensors])
# If all dimensions are equal, just make an array out of it
if min_frames == max_frames:
return np.array(tensors)
# Otherwise, we need to do it manually
else:
res = np.zeros((len(tensors), max_frames, tensors[0].shape[1], tensors[0].shape[2]))
for i, t in enumerate(tensors):
nr_padding_frames = max_frames - len(t)
res[i] = np.vstack([t] + [t[:1]]*nr_padding_frames)
return res
def preprocess_normscale(patient_data, result, index, augment=True,
metadata=None,
normscale_resize_and_augment_function=normscale_resize_and_augment,
testaug=False):
"""Normalizes scale and augments the data.
Args:
patient_data: the data to be preprocessed.
result: dict to store the result in.
index: index indicating in which slot the result dict the data
should go.
augment: flag indicating wheter augmentation is needed.
metadata: metadata belonging to the patient data.
"""
if augment:
if testaug:
augmentation_params = sample_test_augmentation_parameters()
else:
augmentation_params = sample_augmentation_parameters()
else:
augmentation_params = None
zoom_factor = None
# Iterate over different sorts of data
for tag, data in patient_data.iteritems():
if tag in metadata:
metadata_tag = metadata[tag]
desired_shape = result[tag][index].shape
cleaning_processes = getattr(config(), 'cleaning_processes', [])
cleaning_processes_post = getattr(config(), 'cleaning_processes_post', [])
if tag.startswith("sliced:data:singleslice"):
# Cleaning data before extracting a patch
data = clean_images(
[patient_data[tag]], metadata=metadata_tag,
cleaning_processes=cleaning_processes)
# Augment and extract patch
# Decide which roi to use.
shift_center = (None, None)
if getattr(config(), 'use_hough_roi', False):
shift_center = metadata_tag["hough_roi"]
patient_3d_tensor = normscale_resize_and_augment_function(
data, output_shape=desired_shape[-2:],
augment=augmentation_params,
pixel_spacing=metadata_tag["PixelSpacing"],
shift_center=shift_center[::-1])[0]
if augmentation_params is not None:
zoom_factor = augmentation_params["zoom_x"] * augmentation_params["zoom_y"]
else:
zoom_factor = 1.0
# Clean data further
patient_3d_tensor = clean_images(
patient_3d_tensor, metadata=metadata_tag,
cleaning_processes=cleaning_processes_post)
if "area_per_pixel:sax" in result:
raise NotImplementedError()
if augmentation_params and not augmentation_params.get("change_brightness", 0) == 0:
patient_3d_tensor = augment_brightness(patient_3d_tensor, augmentation_params["change_brightness"])
put_in_the_middle(result[tag][index], patient_3d_tensor, True)
elif tag.startswith("sliced:data:randomslices"):
# Clean each slice separately
data = [
clean_images([slicedata], metadata=metadata, cleaning_processes=cleaning_processes)[0]
for slicedata, metadata in zip(data, metadata_tag)]
# Augment and extract patches
shift_centers = [(None, None)] * len(data)
if getattr(config(), 'use_hough_roi', False):
shift_centers = [m["hough_roi"] for m in metadata_tag]
patient_3d_tensors = [
normscale_resize_and_augment_function(
[slicedata], output_shape=desired_shape[-2:],
augment=augmentation_params,
pixel_spacing=metadata["PixelSpacing"],
shift_center=shift_center[::-1])[0]
for slicedata, metadata, shift_center in zip(data, metadata_tag, shift_centers)]
if augmentation_params is not None:
zoom_factor = augmentation_params["zoom_x"] * augmentation_params["zoom_y"]
else:
zoom_factor = 1.0
# Clean data further
patient_3d_tensors = [
clean_images([patient_3d_tensor], metadata=metadata, cleaning_processes=cleaning_processes_post)[0]
for patient_3d_tensor, metadata in zip(patient_3d_tensors, metadata_tag)]
patient_4d_tensor = _make_4d_tensor(patient_3d_tensors)
if augmentation_params and not augmentation_params.get("change_brightness", 0) == 0:
patient_4d_tensor = augment_brightness(patient_4d_tensor, augmentation_params["change_brightness"])
if "area_per_pixel:sax" in result:
raise NotImplementedError()
put_in_the_middle(result[tag][index], patient_4d_tensor, True)
elif tag.startswith("sliced:data:sax:locations"):
pass # will be filled in by the next one
elif tag.startswith("sliced:data:sax:is_not_padded"):
pass # will be filled in by the next one
elif tag.startswith("sliced:data:sax"):
# step 1: sort (data, metadata_tag) with slice_location_finder
slice_locations, sorted_indices, sorted_distances = slice_location_finder({i: metadata for i,metadata in enumerate(metadata_tag)})
data = [data[idx] for idx in sorted_indices]
metadata_tag = [metadata_tag[idx] for idx in sorted_indices]
slice_locations = np.array([slice_locations[idx]["relative_position"] for idx in sorted_indices])
slice_locations = slice_locations - (slice_locations[-1] + slice_locations[0])/2.0
data = [
clean_images([slicedata], metadata=metadata, cleaning_processes=cleaning_processes)[0]
for slicedata, metadata in zip(data, metadata_tag)]
# Augment and extract patches
shift_centers = [(None, None)] * len(data)
if getattr(config(), 'use_hough_roi', False):
shift_centers = [m["hough_roi"] for m in metadata_tag]
patient_3d_tensors = [
normscale_resize_and_augment_function(
[slicedata], output_shape=desired_shape[-2:],
augment=augmentation_params,
pixel_spacing=metadata["PixelSpacing"],
shift_center=shift_center[::-1])[0]
for slicedata, metadata, shift_center in zip(data, metadata_tag, shift_centers)]
if augmentation_params is not None:
zoom_factor = augmentation_params["zoom_x"] * augmentation_params["zoom_y"]
else:
zoom_factor = 1.0
# Clean data further
patient_3d_tensors = [
clean_images([patient_3d_tensor], metadata=metadata, cleaning_processes=cleaning_processes_post)[0]
for patient_3d_tensor, metadata in zip(patient_3d_tensors, metadata_tag)]
patient_4d_tensor = _make_4d_tensor(patient_3d_tensors)
if augmentation_params and not augmentation_params.get("change_brightness", 0) == 0:
patient_4d_tensor = augment_brightness(patient_4d_tensor, augmentation_params["change_brightness"])
# Augment sax order
if augmentation_params and augmentation_params.get("flip_sax", 0) > 0.5:
patient_4d_tensor = patient_4d_tensor[::-1]
slice_locations = slice_locations[::-1]
# Put data (images and metadata) in right location
put_in_the_middle(result[tag][index], patient_4d_tensor, True)
if "sliced:data:sax:locations" in result:
eps_location = 1e-7
is_padded = np.array([False]*len(result["sliced:data:sax:locations"][index]))
put_in_the_middle(result["sliced:data:sax:locations"][index], slice_locations + eps_location, True, is_padded)
if "sliced:data:sax:distances" in result:
eps_location = 1e-7
sorted_distances.append(0.0) # is easier for correct padding
is_padded = np.array([False]*len(result["sliced:data:sax:distances"][index]))
put_in_the_middle(result["sliced:data:sax:distances"][index], np.array(sorted_distances) + eps_location, True, is_padded)
if "sliced:data:sax:is_not_padded" in result:
result["sliced:data:sax:is_not_padded"][index] = np.logical_not(is_padded)
elif tag.startswith("sliced:data:chanzoom:2ch"):
# step 1: sort (data, metadata_tag) with slice_location_finder
slice_locations, sorted_indices, sorted_distances = slice_location_finder({i: metadata for i,metadata in enumerate(metadata_tag[2])})
top_slice_metadata = metadata_tag[2][sorted_indices[0]]
bottom_slice_metadata = metadata_tag[2][sorted_indices[-1]]
ch2_metadata = metadata_tag[1]
ch4_metadata = metadata_tag[0]
trf_2ch, trf_4ch = get_chan_transformations(
ch2_metadata=ch2_metadata,
ch4_metadata=ch4_metadata,
top_point_metadata = top_slice_metadata,
bottom_point_metadata = bottom_slice_metadata,
output_width=desired_shape[-1]
)
ch4_3d_patient_tensor, ch2_3d_patient_tensor = [], []
ch4_data = data[0]
ch2_data = data[1]
if ch4_data is None and ch2_data is not None:
ch4_data = ch2_data
ch4_metadata = ch2_metadata
if ch2_data is None and ch4_data is not None:
ch2_data = ch4_data
ch2_metadata = ch4_metadata
for ch, ch_result, transform, metadata in [(ch4_data, ch4_3d_patient_tensor, trf_4ch, ch4_metadata),
(ch2_data, ch2_3d_patient_tensor, trf_2ch, ch2_metadata)]:
tform_shift_center, tform_shift_uncenter = build_center_uncenter_transforms(desired_shape[-2:])
zoom_factor = np.sqrt(np.abs(np.linalg.det(transform.params[:2,:2])) * np.prod(metadata["PixelSpacing"]))
normalise_zoom_transform = build_augmentation_transform(zoom_x=zoom_factor, zoom_y=zoom_factor)
if augmentation_params:
augment_tform = build_augmentation_transform(**augmentation_params)
total_tform = tform_shift_uncenter + augment_tform + normalise_zoom_transform + tform_shift_center + transform
else:
total_tform = tform_shift_uncenter + normalise_zoom_transform + tform_shift_center + transform
ch_result[:] = [fast_warp(c, total_tform, output_shape=desired_shape[-2:]) for c in ch]
# print "zoom factor:", zoom_factor
if augmentation_params is not None:
zoom_factor = augmentation_params["zoom_x"] * augmentation_params["zoom_y"]
else:
zoom_factor = 1.0
# Clean data further
ch4_3d_patient_tensor = clean_images(np.array([ch4_3d_patient_tensor]), metadata=ch4_metadata, cleaning_processes=cleaning_processes_post)[0]
ch2_3d_patient_tensor = clean_images(np.array([ch2_3d_patient_tensor]), metadata=ch2_metadata, cleaning_processes=cleaning_processes_post)[0]
# Put data (images and metadata) in right location
put_in_the_middle(result["sliced:data:chanzoom:2ch"][index], ch2_3d_patient_tensor, True)
put_in_the_middle(result["sliced:data:chanzoom:4ch"][index], ch4_3d_patient_tensor, True)
elif tag.startswith("sliced:data:shape"):
raise NotImplementedError()
elif tag.startswith("sliced:data"):
# put time dimension first, then axis dimension
data = clean_images(patient_data[tag], metadata=metadata_tag)
patient_4d_tensor, zoom_ratios = resize_and_augment(data, output_shape=desired_shape[-2:], augment=augmentation_parameters)
if "area_per_pixel:sax" in result:
result["area_per_pixel:sax"][index] = zoom_ratios[0] * np.prod(metadata_tag[0]["PixelSpacing"])
if "noswitch" not in tag:
patient_4d_tensor = np.swapaxes(patient_4d_tensor,1,0)
put_in_the_middle(result[tag][index], patient_4d_tensor)
elif tag.startswith("sliced:meta:all"):
# TODO: this probably doesn't work very well yet
result[tag][index] = patient_data[tag]
elif tag.startswith("sliced:meta:PatientSex"):
result[tag][index][0] = -1. if patient_data[tag]=='M' else 1.
elif tag.startswith("sliced:meta:PatientAge"):
number, letter = patient_data[tag][:3], patient_data[tag][-1]
letter_rescale_factors = {'D': 365.25, 'W': 52.1429, 'M': 12., 'Y': 1.}
result[tag][index][0] = float(patient_data[tag][:3]) / letter_rescale_factors[letter]
if augmentation_params and zoom_factor:
label_correction_function = lambda x: x * zoom_factor
classification_correction_function = lambda x: utils.zoom_array(x, 1./zoom_factor)
return label_correction_function, classification_correction_function
else:
return lambda x: x, lambda x: x
def preprocess_with_augmentation(patient_data, result, index, augment=True, metadata=None, testaug=False):
"""
Load the resulting data, augment it if needed, and put it in result at the correct index
:param patient_data:
:param result:
:param index:
:return:
"""
if augment:
augmentation_parameters = sample_augmentation_parameters()
else:
augmentation_parameters = None
for tag, data in patient_data.iteritems():
metadata_tag = metadata[tag]
desired_shape = result[tag][index].shape
# try to fit data into the desired shape
if tag.startswith("sliced:data:singleslice"):
cleaning_processes = getattr(config(), 'cleaning_processes', [])
data = clean_images(
[patient_data[tag]], metadata=metadata_tag,
cleaning_processes=cleaning_processes)
patient_4d_tensor, zoom_ratios = resize_and_augment(data, output_shape=desired_shape[-2:], augment=augmentation_parameters)[0]
if "area_per_pixel:sax" in result:
result["area_per_pixel:sax"][index] = zoom_ratios[0] * np.prod(metadata_tag["PixelSpacing"])
put_in_the_middle(result[tag][index], patient_4d_tensor)
elif tag.startswith("sliced:data"):
# put time dimension first, then axis dimension
data = clean_images(patient_data[tag], metadata=metadata_tag)
patient_4d_tensor, zoom_ratios = resize_and_augment(data, output_shape=desired_shape[-2:], augment=augmentation_parameters)
if "area_per_pixel:sax" in result:
result["area_per_pixel:sax"][index] = zoom_ratios[0] * np.prod(metadata_tag[0]["PixelSpacing"])
if "noswitch" not in tag:
patient_4d_tensor = np.swapaxes(patient_4d_tensor,1,0)
put_in_the_middle(result[tag][index], patient_4d_tensor)
if tag.startswith("sliced:data:shape"):
result[tag][index] = patient_data[tag]
if tag.startswith("sliced:meta:"):
# TODO: this probably doesn't work very well yet
result[tag][index] = patient_data[tag]
return
preprocess = partial(preprocess_with_augmentation, augment=False)
def clean_images(data, metadata, cleaning_processes):
"""
clean up 4d-tensor of imdata consistently (fix contrast, move upside up, etc...)
:param data:
:return:
"""
for process in cleaning_processes:
data = process(data, metadata)
return data
def normalize_contrast(imdata, metadata=None, percentiles=(5.0,95.0)):
lp, hp = percentiles
flat_data = np.concatenate([i.flatten() for i in imdata]).flatten()
high = np.percentile(flat_data, hp)
low = np.percentile(flat_data, lp)
for i in xrange(len(imdata)):
image = imdata[i]
image = 1.0 * (image - low) / (high - low)
image = np.clip(image, 0.0, 1.0)
imdata[i] = image
return imdata
def normalize_contrast_zmuv(imdata, metadata=None, z=2):
flat_data = np.concatenate([i.flatten() for i in imdata]).flatten()
mean = np.mean(flat_data)
std = np.std(flat_data)
for i in xrange(len(imdata)):
image = imdata[i]
image = ((image - mean) / (2 * std * z) + 0.5)
image = np.clip(image, -0.0, 1.0)
imdata[i] = image
return imdata
def set_upside_up(data, metadata=None):
out_data = []
for idx, dslice in enumerate(data):
out_data.append(set_upside_up_slice(dslice, metadata))
return out_data
_TAG_ROI_UPSIDEUP = 'ROI_UPSIDEUP'
def set_upside_up_slice(dslice, metadata=None, do_flip=False):
# turn upside up
F = np.array(metadata["ImageOrientationPatient"]).reshape((2, 3))
f_1 = F[1, :] / np.linalg.norm(F[1, :])
f_2 = F[0, :] / np.linalg.norm(F[0, :])
x_e = np.array([1, 0, 0])
y_e = np.array([0, 1, 0])
if abs(np.dot(y_e, f_1)) >= abs(np.dot(y_e, f_2)):
out_data = np.transpose(dslice, (0, 2, 1))
out_roi = list(metadata["hough_roi"][::-1])
f_1, f_2 = f_2, f_1
else:
out_data = dslice
out_roi = list(metadata["hough_roi"])
if np.dot(y_e, f_1) < 0 and do_flip:
# Flip vertically
out_data = out_data[:, ::-1, :]
if out_roi[0]: out_roi[0] = 1 - out_roi[0]
if np.dot(x_e, f_2) < 0 and do_flip:
# Flip horizontally
out_data = out_data[:, :, ::-1]
if out_roi[1]: out_roi[1] = 1 - out_roi[1]
if not _TAG_ROI_UPSIDEUP in metadata:
metadata["hough_roi"] = tuple(out_roi)
metadata[_TAG_ROI_UPSIDEUP] = True
return out_data
def slice_location_finder(metadata_dict):
"""
:param metadata_dict: dict with arbitrary keys, and metadata values
:return: dict with "relative_position" and "middle_pixel_position" (and others)
"""
datadict = dict()
for key, metadata in metadata_dict.iteritems():
#d1 = all_data['data']
d2 = metadata
image_orientation = [float(i) for i in metadata["ImageOrientationPatient"]]
image_position = [float(i) for i in metadata["ImagePositionPatient"]]
pixel_spacing = [float(i) for i in metadata["PixelSpacing"]]
datadict[key] = {
"orientation": image_orientation,
"position": image_position,
"pixel_spacing": pixel_spacing,
"rows": int(d2["Rows"]),
"columns": int(d2["Columns"]),
}
for key, data in datadict.iteritems():
# calculate value of middle pixel
F = np.array(data["orientation"]).reshape( (2,3) )
pixel_spacing = data["pixel_spacing"]
i,j = data["columns"] / 2.0, data["rows"] / 2.0 # reversed order, as per http://nipy.org/nibabel/dicom/dicom_orientation.html
im_pos = np.array([[i*pixel_spacing[0],j*pixel_spacing[1]]],dtype='float32')
pos = np.array(data["position"]).reshape((1,3))
position = np.dot(im_pos, F) + pos
data["middle_pixel_position"] = position[0,:]
# find the keys of the 2 points furthest away from each other
if len(datadict)<=1:
for key, data in datadict.iteritems():
data["relative_position"] = 0.0
else:
max_dist = -1.0
max_dist_keys = []
for key1, data1 in datadict.iteritems():
for key2, data2 in datadict.iteritems():
if key1==key2:
continue
p1 = data1["middle_pixel_position"]
p2 = data2["middle_pixel_position"]
distance = np.sqrt(np.sum((p1-p2)**2))
if distance>max_dist:
max_dist_keys = [key1, key2]
max_dist = distance
# project the others on the line between these 2 points
# sort the keys, so the order is more or less the same as they were
max_dist_keys.sort()
p_ref1 = datadict[max_dist_keys[0]]["middle_pixel_position"]
p_ref2 = datadict[max_dist_keys[1]]["middle_pixel_position"]
v1 = p_ref2-p_ref1
v1 = v1 / np.linalg.norm(v1)
for key, data in datadict.iteritems():
v2 = data["middle_pixel_position"]-p_ref1
scalar = np.inner(v1, v2)
data["relative_position"] = scalar
sorted_indices = [key for key in sorted(datadict.iterkeys(), key=lambda x: datadict[x]["relative_position"])]
sorted_distances = []
for i in xrange(len(sorted_indices)-1):
res = []
for d1, d2 in [(datadict[sorted_indices[i]], datadict[sorted_indices[i+1]]),
(datadict[sorted_indices[i+1]], datadict[sorted_indices[i]])]:
F = np.array(d1["orientation"]).reshape( (2,3) )
n = np.cross(F[0,:], F[1,:])
n = n/np.sqrt(np.sum(n*n))
p = d2["middle_pixel_position"] - d1["position"]
distance = np.abs(np.sum(n*p))
res.append(distance)
sorted_distances.append(np.mean(res))
return datadict, sorted_indices, sorted_distances
def orthogonal_projection_on_slice(percentual_coordinate, source_metadata, target_metadata):
point = np.array([[percentual_coordinate[0]],
[percentual_coordinate[1]],
[0],
[1]])
image_size = [source_metadata["Rows"], source_metadata["Columns"]]
point = np.dot(np.array( [[image_size[0],0,0,0],
[0,image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = source_metadata["PixelSpacing"]
point = np.dot(np.array( [[pixel_spacing[0],0,0,0],
[0,pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
Fa = np.array(source_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
posa = source_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[Fa[0,0],Fa[1,0],0,posa[0]],
[Fa[0,1],Fa[1,1],0,posa[1]],
[Fa[0,2],Fa[1,2],0,posa[2]],
[0,0,0,1]]), point)
posb = target_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[1,0,0,-posb[0]],
[0,1,0,-posb[1]],
[0,0,1,-posb[2]],
[0,0,0,1]]), point)
Fb = np.array(target_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
ff0 = np.sqrt(np.sum(Fb[0,:]*Fb[0,:]))
ff1 = np.sqrt(np.sum(Fb[1,:]*Fb[1,:]))
point = np.dot(np.array( [[Fb[0,0]/ff0,Fb[0,1]/ff0,Fb[0,2]/ff0,0],
[Fb[1,0]/ff1,Fb[1,1]/ff1,Fb[1,2]/ff1,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = target_metadata["PixelSpacing"]
point = np.dot(np.array( [[1./pixel_spacing[0],0,0,0],
[0,1./pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
image_size = [target_metadata["Rows"], target_metadata["Columns"]]
point = np.dot(np.array( [[1./image_size[0],0,0,0],
[0,1./image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
return point[:2,0] # percentual coordinate as well
def patient_coor_from_slice(percentual_coordinate, source_metadata):
point = np.array([[percentual_coordinate[0]],
[percentual_coordinate[1]],
[0],
[1]])
image_size = [source_metadata["Rows"], source_metadata["Columns"]]
point = np.dot(np.array( [[image_size[0],0,0,0],
[0,image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = source_metadata["PixelSpacing"]
point = np.dot(np.array( [[pixel_spacing[0],0,0,0],
[0,pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
Fa = np.array(source_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
posa = source_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[Fa[0,0],Fa[1,0],0,posa[0]],
[Fa[0,1],Fa[1,1],0,posa[1]],
[Fa[0,2],Fa[1,2],0,posa[2]],
[0,0,0,1]]), point)
return point[:3,0] # patient coordinate
def point_projection_on_slice(point, target_metadata):
point = np.array([[point[0]],
[point[1]],
[point[2]],
[1]])
posb = target_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[1,0,0,-posb[0]],
[0,1,0,-posb[1]],
[0,0,1,-posb[2]],
[0,0,0,1]]), point)
Fb = np.array(target_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
ff0 = np.sqrt(np.sum(Fb[0,:]*Fb[0,:]))
ff1 = np.sqrt(np.sum(Fb[1,:]*Fb[1,:]))
point = np.dot(np.array( [[Fb[0,0]/ff0,Fb[0,1]/ff0,Fb[0,2]/ff0,0],
[Fb[1,0]/ff1,Fb[1,1]/ff1,Fb[1,2]/ff1,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = target_metadata["PixelSpacing"]
point = np.dot(np.array( [[1./pixel_spacing[0],0,0,0],
[0,1./pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
return point[:2,0] # percentual coordinate as well
def get_chan_transformations(ch2_metadata=None,
ch4_metadata=None,
top_point_metadata=None,
bottom_point_metadata=None,
output_width = 100):
has_both_chans = False
if ch2_metadata is None and ch4_metadata is None:
raise "Need at least one of these slices"
elif ch2_metadata and ch4_metadata is None:
ch4_metadata = ch2_metadata
elif ch4_metadata and ch2_metadata is None:
ch2_metadata = ch4_metadata
else:
has_both_chans = True
F2 = np.array(ch2_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
F4 = np.array(ch4_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
n2 = np.cross(F2[0,:], F2[1,:])
n4 = np.cross(F4[0,:], F4[1,:])
b2 = np.sum(n2 * np.array(ch2_metadata["ImagePositionPatient"]))
b4 = np.sum(n4 * np.array(ch4_metadata["ImagePositionPatient"]))
# find top and bottom of my view
top_point = patient_coor_from_slice(top_point_metadata["hough_roi"], top_point_metadata)
bottom_point = patient_coor_from_slice(bottom_point_metadata["hough_roi"], bottom_point_metadata)
# if it has both chan's: middle line is the common line!
if has_both_chans:
F5 = np.cross(n2, n4)
A = np.array([n2, n4])
b = np.array([b2, b4])
#print A, b
P, rnorm, rank, s = np.linalg.lstsq(A,b)
#print P, rnorm, rank, s
# find top and bottom on the line
A = np.array([F5]).T
b = np.array(top_point)
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
top_point = sc[0] * F5 + P
A = np.array([F5]).T
b = np.array(bottom_point)
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
bottom_point = sc[0] * F5 + P
## FIND THE affine transformation ch2 needs:
ch2_top_point = point_projection_on_slice(top_point, ch2_metadata)
ch2_bottom_point = point_projection_on_slice(bottom_point, ch2_metadata)
n = np.array([ch2_bottom_point[1] - ch2_top_point[1], ch2_top_point[0] - ch2_bottom_point[0]])
ch2_third_point = ch2_top_point + n/2
A = np.array([[ch2_top_point[0], ch2_top_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_top_point[0], ch2_top_point[1], 1],
[ch2_bottom_point[0], ch2_bottom_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_bottom_point[0], ch2_bottom_point[1], 1],
[ch2_third_point[0], ch2_third_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_third_point[0], ch2_third_point[1], 1],])
b = np.array([0,0.5*output_width,output_width,0.5*output_width,0,0])
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
# these need to be mixed up a little, because we have non-standard x-y-order
tform_matrix = np.linalg.inv(np.array([[sc[4], sc[3], sc[5]],
[sc[1], sc[0], sc[2]],
[ 0, 0, 1]]))
ch2_form_fix = skimage.transform.ProjectiveTransform(matrix=tform_matrix)
# same for ch4
ch4_top_point = point_projection_on_slice(top_point, ch4_metadata)
ch4_bottom_point = point_projection_on_slice(bottom_point, ch4_metadata)
n = np.array([ch4_bottom_point[1] - ch4_top_point[1], ch4_top_point[0] - ch4_bottom_point[0]])
ch4_third_point = ch4_top_point + n/2
A = np.array([[ch4_top_point[0], ch4_top_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_top_point[0], ch4_top_point[1], 1],
[ch4_bottom_point[0], ch4_bottom_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_bottom_point[0], ch4_bottom_point[1], 1],
[ch4_third_point[0], ch4_third_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_third_point[0], ch4_third_point[1], 1],])
b = np.array([0,0.5*output_width,output_width,0.5*output_width,0,0])
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
# these need to be mixed up a little, because we have non-standard x-y-order
tform_matrix = np.linalg.inv(np.array([[sc[4], sc[3], sc[5]],
[sc[1], sc[0], sc[2]],
[ 0, 0, 1]]))
ch4_form_fix = skimage.transform.ProjectiveTransform(matrix=tform_matrix)
return ch2_form_fix, ch4_form_fix
def augment_brightness(patient_tensor, brightness_adjustment):
# print "augmenting", brightness_adjustment
return np.clip(patient_tensor + brightness_adjustment * np.mean(patient_tensor), 0, 1)
def orthogonal_projection_on_slice(percentual_coordinate, source_metadata, target_metadata):
point = np.array([[percentual_coordinate[0]],
[percentual_coordinate[1]],
[0],
[1]])
image_size = [source_metadata["Rows"], source_metadata["Columns"]]
point = np.dot(np.array( [[image_size[0],0,0,0],
[0,image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = source_metadata["PixelSpacing"]
point = np.dot(np.array( [[pixel_spacing[0],0,0,0],
[0,pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
Fa = np.array(source_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
posa = source_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[Fa[0,0],Fa[1,0],0,posa[0]],
[Fa[0,1],Fa[1,1],0,posa[1]],
[Fa[0,2],Fa[1,2],0,posa[2]],
[0,0,0,1]]), point)
posb = target_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[1,0,0,-posb[0]],
[0,1,0,-posb[1]],
[0,0,1,-posb[2]],
[0,0,0,1]]), point)
Fb = np.array(target_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
ff0 = np.sqrt(np.sum(Fb[0,:]*Fb[0,:]))
ff1 = np.sqrt(np.sum(Fb[1,:]*Fb[1,:]))
point = np.dot(np.array( [[Fb[0,0]/ff0,Fb[0,1]/ff0,Fb[0,2]/ff0,0],
[Fb[1,0]/ff1,Fb[1,1]/ff1,Fb[1,2]/ff1,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = target_metadata["PixelSpacing"]
point = np.dot(np.array( [[1./pixel_spacing[0],0,0,0],
[0,1./pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
image_size = [target_metadata["Rows"], target_metadata["Columns"]]
point = np.dot(np.array( [[1./image_size[0],0,0,0],
[0,1./image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
return point[:2,0] # percentual coordinate as well
def patient_coor_from_slice(percentual_coordinate, source_metadata):
point = np.array([[percentual_coordinate[0]],
[percentual_coordinate[1]],
[0],
[1]])
image_size = [source_metadata["Rows"], source_metadata["Columns"]]
point = np.dot(np.array( [[image_size[0],0,0,0],
[0,image_size[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = source_metadata["PixelSpacing"]
point = np.dot(np.array( [[pixel_spacing[0],0,0,0],
[0,pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
Fa = np.array(source_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
posa = source_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[Fa[0,0],Fa[1,0],0,posa[0]],
[Fa[0,1],Fa[1,1],0,posa[1]],
[Fa[0,2],Fa[1,2],0,posa[2]],
[0,0,0,1]]), point)
return point[:3,0] # patient coordinate
def point_projection_on_slice(point, target_metadata):
point = np.array([[point[0]],
[point[1]],
[point[2]],
[1]])
posb = target_metadata["ImagePositionPatient"]
point = np.dot(np.array( [[1,0,0,-posb[0]],
[0,1,0,-posb[1]],
[0,0,1,-posb[2]],
[0,0,0,1]]), point)
Fb = np.array(target_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
ff0 = np.sqrt(np.sum(Fb[0,:]*Fb[0,:]))
ff1 = np.sqrt(np.sum(Fb[1,:]*Fb[1,:]))
point = np.dot(np.array( [[Fb[0,0]/ff0,Fb[0,1]/ff0,Fb[0,2]/ff0,0],
[Fb[1,0]/ff1,Fb[1,1]/ff1,Fb[1,2]/ff1,0],
[0,0,0,0],
[0,0,0,1]]), point)
pixel_spacing = target_metadata["PixelSpacing"]
point = np.dot(np.array( [[1./pixel_spacing[0],0,0,0],
[0,1./pixel_spacing[1],0,0],
[0,0,0,0],
[0,0,0,1]]), point)
return point[:2,0] # percentual coordinate as well
def get_chan_transformations(ch2_metadata=None,
ch4_metadata=None,
top_point_metadata=None,
bottom_point_metadata=None,
output_width = 100):
has_both_chans = False
if ch2_metadata is None and ch4_metadata is None:
raise "Need at least one of these slices"
elif ch2_metadata and ch4_metadata is None:
ch4_metadata = ch2_metadata
elif ch4_metadata and ch2_metadata is None:
ch2_metadata = ch4_metadata
else:
has_both_chans = True
F2 = np.array(ch2_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
F4 = np.array(ch4_metadata["ImageOrientationPatient"]).reshape( (2,3) )[::-1,:]
n2 = np.cross(F2[0,:], F2[1,:])
n4 = np.cross(F4[0,:], F4[1,:])
b2 = np.sum(n2 * np.array(ch2_metadata["ImagePositionPatient"]))
b4 = np.sum(n4 * np.array(ch4_metadata["ImagePositionPatient"]))
# find top and bottom of my view
top_point = patient_coor_from_slice(top_point_metadata["hough_roi"], top_point_metadata)
bottom_point = patient_coor_from_slice(bottom_point_metadata["hough_roi"], bottom_point_metadata)
# if it has both chan's: middle line is the common line!
if has_both_chans:
F5 = np.cross(n2, n4)
A = np.array([n2, n4])
b = np.array([b2, b4])
#print A, b
P, rnorm, rank, s = np.linalg.lstsq(A,b)
#print P, rnorm, rank, s
# find top and bottom on the line
A = np.array([F5]).T
b = np.array(top_point)
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
top_point = sc[0] * F5 + P
A = np.array([F5]).T
b = np.array(bottom_point)
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
bottom_point = sc[0] * F5 + P
## FIND THE affine transformation ch2 needs:
ch2_top_point = point_projection_on_slice(top_point, ch2_metadata)
ch2_bottom_point = point_projection_on_slice(bottom_point, ch2_metadata)
n = np.array([ch2_bottom_point[1] - ch2_top_point[1], ch2_top_point[0] - ch2_bottom_point[0]])
ch2_third_point = ch2_top_point + n/2
A = np.array([[ch2_top_point[0], ch2_top_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_top_point[0], ch2_top_point[1], 1],
[ch2_bottom_point[0], ch2_bottom_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_bottom_point[0], ch2_bottom_point[1], 1],
[ch2_third_point[0], ch2_third_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch2_third_point[0], ch2_third_point[1], 1],])
b = np.array([0,0.5*output_width,output_width,0.5*output_width,0,0])
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
# these need to be mixed up a little, because we have non-standard x-y-order
tform_matrix = np.linalg.inv(np.array([[sc[4], sc[3], sc[5]],
[sc[1], sc[0], sc[2]],
[ 0, 0, 1]]))
ch2_form_fix = skimage.transform.ProjectiveTransform(matrix=tform_matrix)
# same for ch4
ch4_top_point = point_projection_on_slice(top_point, ch4_metadata)
ch4_bottom_point = point_projection_on_slice(bottom_point, ch4_metadata)
n = np.array([ch4_bottom_point[1] - ch4_top_point[1], ch4_top_point[0] - ch4_bottom_point[0]])
ch4_third_point = ch4_top_point + n/2
A = np.array([[ch4_top_point[0], ch4_top_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_top_point[0], ch4_top_point[1], 1],
[ch4_bottom_point[0], ch4_bottom_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_bottom_point[0], ch4_bottom_point[1], 1],
[ch4_third_point[0], ch4_third_point[1], 1, 0, 0, 0 ],
[0, 0, 0, ch4_third_point[0], ch4_third_point[1], 1],])
b = np.array([0,0.5*output_width,output_width,0.5*output_width,0,0])
#print A,b
sc, rnorm, rank, s = np.linalg.lstsq(A,b)
#print sc, rnorm, rank, s
# these need to be mixed up a little, because we have non-standard x-y-order
tform_matrix = np.linalg.inv(np.array([[sc[4], sc[3], sc[5]],
[sc[1], sc[0], sc[2]],
[ 0, 0, 1]]))
ch4_form_fix = skimage.transform.ProjectiveTransform(matrix=tform_matrix)
return ch2_form_fix, ch4_form_fix
def augment_brightness(patient_tensor, brightness_adjustment):
# print "augmenting", brightness_adjustment
return np.clip(patient_tensor + brightness_adjustment * np.mean(patient_tensor), 0, 1)
|
317070/kaggle-heart
|
preprocess.py
|
Python
|
mit
| 47,808
|
# -*- coding: utf-8 -*-
# @Author: Gillett Hernandez
# @Date: 2017-11-28 21:37:36
# @Last Modified by: Gillett Hernandez
# @Last Modified time: 2017-12-01 12:35:27
from euler_funcs import really_large_prime_sieve, basic_large_prime_sieve, large_prime_sieve, get_primes
from math import log
import os
# since the numbers are composite, primes above N//2 need not be considered
# thus the sieve only needs to go to N//2
# thus cuts down on runtime by a lot
def invert(L):
"""inverts a list so that the "keys" and values are swapped. i.e. invert([7,5,3]) == {7:0, 5:1, 3:2}"""
return {v:i for i,v in enumerate(L)}
def solve(N, primes):
inverted = invert(primes)
c = 0
mark = False
idx = len(primes)-1
try:
for i,p in enumerate(primes):
if p > N//3:
c += 1
continue
elif p > N//4:
# can only be multiplied by 3 and 2 to yield something under 10**8
# so p*3 and p*2 work
c += 2
continue
elif p > N//2:
break
if i % 100 == 0:
print(p)
if p > N**0.5:
mark = True
if not mark:
print("mark triggered")
c += i + 1
else:
Np = N/p
# add number of primes less than Np
# estimate_idx = Np/log(Np)
# corollary to the prime number theorem says that the nth prime P_n (in) [n log n, n(log n + log log n)]
# lower = int(Np*log(Np))
# upper = 2+int(Np*(log(Np) + log(log(Np))))
# for i in reversed(range(lower, upper)):
# if i % 2 == 0:
# continue
# # try:
# # idx = inverted[i]
# # except:
# # continue
# # else:
# # break
# if i in inverted:
# lastidx, idx = idx, inverted[i]
# break
# else:
# continue
while primes[idx] > Np and idx > 0:
idx -= 1
c += idx + 1
except:
print({key:value for key,value in locals().items() if key != "primes" and key != "inverted"})
return c
def test(N):
primes = get_primes(limit=N//2)
print(f"number of primes = {len(primes)}")
print(solve(N, primes))
def main():
# primes = get_primes(limit=10_000_000)
N = 100_000_000
if not os.path.exists(f"primes_to_{N//2}.txt"):
L = basic_large_prime_sieve(N//2)
with open(f"primes_to_{N//2}.txt", "w") as fd:
for p in L:
fd.write("{}\n".format(p))
with open(f"primes_to_{N//2}.txt", 'r') as fd:
primes = [int(e) for e in fd.read().strip().split("\n")]
print(len(primes))
print(solve(N, primes))
if __name__ == '__main__':
# test(30)
# test(100)
# test(1000)
# test(25000)
# test(10000000)
main()
|
gillett-hernandez/project-euler
|
Python/problem_187.py
|
Python
|
mit
| 3,139
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.